Compare commits

..

No commits in common. "master" and "nilmdb-1.3.0" have entirely different histories.

215 changed files with 19858 additions and 22885 deletions

View File

@ -1,11 +1,10 @@
# -*- conf -*- # -*- conf -*-
[run] [run]
branch = True # branch = True
[report] [report]
exclude_lines = exclude_lines =
pragma: no cover pragma: no cover
if 0: if 0:
omit = nilmdb/scripts,nilmdb/_version.py,nilmdb/fsck omit = nilmdb/utils/datetime_tz*,nilmdb/scripts,nilmdb/_version.py
show_missing = True

7
.gitignore vendored
View File

@ -4,7 +4,6 @@ tests/*testdb/
db/ db/
# Compiled / cythonized files # Compiled / cythonized files
README.html
docs/*.html docs/*.html
build/ build/
*.pyc *.pyc
@ -16,8 +15,10 @@ nilmdb/server/rbtree.c
# Setup junk # Setup junk
dist/ dist/
nilmdb.egg-info/ nilmdb.egg-info/
venv/
.eggs/ # This gets generated as needed by setup.py
MANIFEST.in
MANIFEST
# Misc # Misc
timeit*out timeit*out

250
.pylintrc Normal file
View File

@ -0,0 +1,250 @@
# -*- conf -*-
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Profiled execution.
profile=no
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=datetime_tz
# Pickle collected data for later comparisons.
persistent=no
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
[MESSAGES CONTROL]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once).
disable=C0111,R0903,R0201,R0914,R0912,W0142,W0703,W0702
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html
output-format=parseable
# Include message's id in output
include-ids=yes
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Add a comment according to your evaluation note. This is used by the global
# evaluation report (RP0004).
comment=no
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=80
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the beginning of the name of dummy variables
# (i.e. not used).
dummy-variables-rgx=_|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[BASIC]
# Required attributes for module, separated by a comma
required-attributes=
# List of builtins function names that should not be used, separated by a comma
bad-functions=apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__)|version)$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{0,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{0,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{0,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{0,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{0,30}$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match functions or classes name which do
# not require a docstring
no-docstring-rgx=__.*__
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branchs=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

View File

@ -1,29 +0,0 @@
# Root
include README.txt
include setup.cfg
include setup.py
include versioneer.py
include Makefile
include .coveragerc
include .pylintrc
include requirements.txt
# Cython files -- include .pyx source, but not the generated .c files
# (Downstream systems must have cython installed in order to build)
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
exclude nilmdb/server/interval.c
exclude nilmdb/server/rbtree.c
# Version
include nilmdb/_version.py
# Tests
recursive-include tests *.py
recursive-include tests/data *
include tests/test.order
# Docs
recursive-include docs Makefile *.md
# Extras
recursive-include extras *

View File

@ -2,49 +2,45 @@
all: test all: test
version: version:
python3 setup.py version python setup.py version
build: build:
python3 setup.py build_ext --inplace python setup.py build_ext --inplace
dist: sdist dist: sdist
sdist: sdist:
python3 setup.py sdist python setup.py sdist
install: install:
python3 setup.py install python setup.py install
develop: develop:
python3 setup.py develop python setup.py develop
docs: docs:
make -C docs make -C docs
ctrl: flake
flake:
flake8 nilmdb
lint: lint:
pylint3 --rcfile=setup.cfg nilmdb pylint --rcfile=.pylintrc nilmdb
test: test:
ifneq ($(INSIDE_EMACS),) ifeq ($(INSIDE_EMACS), t)
# Use the slightly more flexible script # Use the slightly more flexible script
python3 setup.py build_ext --inplace python setup.py build_ext --inplace
python3 tests/runtests.py python tests/runtests.py
else else
# Let setup.py check dependencies, build stuff, and run the test # Let setup.py check dependencies, build stuff, and run the test
python3 setup.py nosetests python setup.py nosetests
endif endif
clean:: clean::
find . -name '*.pyc' -o -name '__pycache__' -print0 | xargs -0 rm -rf find . -name '*pyc' | xargs rm -f
rm -f .coverage rm -f .coverage
rm -rf tests/*testdb* rm -rf tests/*testdb*
rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so MANIFEST.in
make -C docs clean make -C docs clean
gitclean:: gitclean::
git clean -dXf git clean -dXf
.PHONY: all version build dist sdist install docs test .PHONY: all version build dist sdist install docs lint test clean
.PHONY: ctrl lint flake clean gitclean

View File

@ -1,40 +0,0 @@
# nilmdb: Non-Intrusive Load Monitor Database
by Jim Paris <jim@jtan.com>
NilmDB requires Python 3.8 or newer.
## Prerequisites:
# Runtime and build environments
sudo apt install python3 python3-dev python3-venv python3-pip
# Create a new Python virtual environment to isolate deps.
python3 -m venv ../venv
source ../venv/bin/activate # run "deactivate" to leave
# Install all Python dependencies
pip3 install -r requirements.txt
## Test:
python3 setup.py nosetests
## Install:
Install it into the virtual environment
python3 setup.py install
If you want to instead install it system-wide, you will also need to
install the requirements system-wide:
sudo pip3 install -r requirements.txt
sudo python3 setup.py install
## Usage:
nilmdb-server --help
nilmdb-fsck --help
nilmtool --help
See docs/wsgi.md for info on setting up a WSGI application in Apache.

26
README.txt Normal file
View File

@ -0,0 +1,26 @@
nilmdb: Non-Intrusive Load Monitor Database
by Jim Paris <jim@jtan.com>
Prerequisites:
# Runtime and build environments
sudo apt-get install python2.7 python2.7-dev python-setuptools cython
# Base NilmDB dependencies
sudo apt-get install python-cherrypy3 python-decorator python-simplejson
sudo apt-get install python-requests python-dateutil python-tz python-psutil
# Tools for running tests
sudo apt-get install python-nose python-coverage
Test:
python setup.py nosetests
Install:
python setup.py install
Usage:
nilmdb-server --help
nilmtool --help

View File

@ -140,7 +140,7 @@ Speed
- Next slowdown target is nilmdb.layout.Parser.parse(). - Next slowdown target is nilmdb.layout.Parser.parse().
- Rewrote parsers using cython and sscanf - Rewrote parsers using cython and sscanf
- Stats (rev 10831), with `_add_interval` disabled - Stats (rev 10831), with _add_interval disabled
layout.pyx.Parser.parse:128 6303 sec, 262k calls layout.pyx.Parser.parse:128 6303 sec, 262k calls
layout.pyx.parse:63 13913 sec, 5.1g calls layout.pyx.parse:63 13913 sec, 5.1g calls
@ -186,19 +186,6 @@ IntervalSet speed
- rbtree and interval converted to cython: - rbtree and interval converted to cython:
8.4 μS, total 12 s, 134 MB RAM 8.4 μS, total 12 s, 134 MB RAM
- Would like to move Interval itself back to Python so other
non-cythonized code like client code can use it more easily.
Testing speed with just `test_interval` being tested, with
`range(5,22)`, using `/usr/bin/time -v python tests/runtests.py`,
times recorded for 2097152:
- 52ae397 (Interval in cython):
12.6133 μs each, ratio 0.866533, total 47 sec, 399 MB RAM
- 9759dcf (Interval in python):
21.2937 μs each, ratio 1.462870, total 83 sec, 1107 MB RAM
That's a huge difference! Instead, will keep Interval and DBInterval
cythonized inside nilmdb, and just have an additional copy in
nilmdb.utils for clients to use.
Layouts Layouts
------- -------
Current/old design has specific layouts: RawData, PrepData, RawNotchedData. Current/old design has specific layouts: RawData, PrepData, RawNotchedData.
@ -341,129 +328,3 @@ Current places where we use lines:
- Finished. Just a single insert() that takes any length string and - Finished. Just a single insert() that takes any length string and
does very little processing until it's time to send it to the does very little processing until it's time to send it to the
server. server.
Timestamps
----------
Timestamps are currently double-precision floats (64 bit). Since the
mantissa is 53-bit, this can only represent about 15-17 significant
figures, and microsecond Unix timestamps like 1222333444.000111 are
already 16 significant figures. Rounding is therefore an issue;
it's hard to sure that converting from ASCII, then back to ASCII,
will always give the same result.
Also, if the client provides a floating point value like 1.9999999999,
we need to be careful that we don't store it as 1.9999999999 but later
print it as 2.000000, because then round-trips change the data.
Possible solutions:
- When the client provides a floating point value to the server,
always round to the 6th decimal digit before verifying & storing.
Good for compatibility and simplicity. But still might have rounding
issues, and clients will also need to round when doing their own
verification. Having every piece of code need to know which digit
to round at is not ideal.
- Always store int64 timestamps on the server, representing
microseconds since epoch. int64 timestamps are used in all HTTP
parameters, in insert/extract ASCII strings, client API, commandline
raw timestamps, etc. Pretty big change.
This is what we'll go with...
- Client programs that interpret the timestamps as doubles instead
of ints will remain accurate until 2^53 microseconds, or year
2255.
- On insert, maybe it's OK to send floating point microsecond values
(1234567890123456.0), just to cope with clients that want to print
everything as a double. Server could try parsing as int64, and if
that fails, parse as double and truncate to int64. However, this
wouldn't catch imprecise inputs like "1.23456789012e+15". But
maybe that can just be ignored; it's likely to cause a
non-monotonic error at the client.
- Timestamps like 1234567890.123456 never show up anywhere, except
for interfacing to datetime_tz etc. Command line "raw timestamps"
are always printed as int64 values, and a new format
"@1234567890123456" is added to the parser for specifying them
exactly.
Binary interface
----------------
The ASCII interface is too slow for high-bandwidth processing, like
sinefits, prep, etc. A binary interface was added so that you can
extract the raw binary out of the bulkdata storage. This binary is
a little-endian format, e.g. in C a uint16_6 stream would be:
#include <endian.h>
#include <stdint.h>
struct {
int64_t timestamp_le;
uint16_t data_le[6];
} __attribute__((packed));
Remember to byteswap (with e.g. `letoh` in C)!
This interface is used by the new `nilmdb.client.numpyclient.NumpyClient`
class, which is a subclass of the normal `nilmcb.client.client.Client`
and has all of the same functions. It adds three new functions:
- `stream_extract_numpy` to extract data as a Numpy array
- `stream_insert_numpy` to insert data as a Numpy array
- `stream_insert_numpy_context` is the context manager for
incrementally inserting data
It is significantly faster! It is about 20 times faster to decimate a
stream with `nilm-decimate` when the filter code is using the new
binary/numpy interface.
WSGI interface & chunked requests
---------------------------------
mod_wsgi requires "WSGIChunkedRequest On" to handle
"Transfer-encoding: Chunked" requests. However, `/stream/insert`
doesn't handle this correctly right now, because:
- The `cherrypy.request.body.read()` call needs to be fixed for chunked requests
- We don't want to just buffer endlessly in the server, and it will
require some thought on how to handle data in chunks (what to do about
interval endpoints).
It is probably better to just keep the endpoint management on the client
side, so leave "WSGIChunkedRequest off" for now.
Unicode & character encoding
----------------------------
Stream data is passed back and forth as raw `bytes` objects in most
places, including the `nilmdb.client` and command-line interfaces.
This is done partially for performance reasons, and partially to
support the binary insert/extract options, where character-set encoding
would not apply.
For the HTTP server, the raw bytes transferred over HTTP are interpreted
as follows:
- For `/stream/insert`, the client-provided `Content-Type` is ignored,
and the data is read as if it were `application/octet-stream`.
- For `/stream/extract`, the returned data is `application/octet-stream`.
- All other endpoints communicate via JSON, which is specified to always
be encoded as UTF-8. This includes:
- `/version`
- `/dbinfo`
- `/stream/list`
- `/stream/create`
- `/stream/destroy`
- `/stream/rename`
- `/stream/get_metadata`
- `/stream/set_metadata`
- `/stream/update_metadata`
- `/stream/remove`
- `/stream/intervals`

View File

@ -1,32 +0,0 @@
WSGI Application in Apache
--------------------------
Install `apache2` and `libapache2-mod-wsgi`
We'll set up the database server at URL `http://myhost.com/nilmdb`.
The database will be stored in `/home/nilm/db`, and the process will
run as user `nilm`, group `nilm`.
First, create a WSGI script `/home/nilm/nilmdb.wsgi` containing:
import nilmdb.server
application = nilmdb.server.wsgi_application("/home/nilm/db", "/nilmdb")
The first parameter is the local filesystem path, and the second
parameter is the path part of the URL.
Then, set up Apache with a configuration like:
<VirtualHost>
WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
<Location /nilmdb>
WSGIProcessGroup nilmdb-procgroup
WSGIApplicationGroup nilmdb-appgroup
# Access control example:
Order deny,allow
Deny from all
Allow from 1.2.3.4
</Location>
</VirtualHost>

View File

@ -1,50 +0,0 @@
#!/usr/bin/env python3
import os
import sys
import pickle
import argparse
import fcntl
import re
from nilmdb.client.numpyclient import layout_to_dtype
parser = argparse.ArgumentParser(
description = """
Fix database corruption where binary writes caused too much data to be
written to the file. Truncates files to the correct length. This was
fixed by b98ff1331a515ad47fd3203615e835b529b039f9.
""")
parser.add_argument("path", action="store", help='Database root path')
parser.add_argument("-y", "--yes", action="store_true", help='Fix them')
args = parser.parse_args()
lock = os.path.join(args.path, "data.lock")
with open(lock, "w") as f:
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
fix = {}
for (path, dirs, files) in os.walk(args.path):
if "_format" in files:
with open(os.path.join(path, "_format")) as format:
fmt = pickle.load(format)
rowsize = layout_to_dtype(fmt["layout"]).itemsize
maxsize = rowsize * fmt["rows_per_file"]
fix[path] = maxsize
if maxsize < 128000000: # sanity check
raise Exception("bad maxsize " + str(maxsize))
for fixpath in fix:
for (path, dirs, files) in os.walk(fixpath):
for fn in files:
if not re.match("^[0-9a-f]{4,}$", fn):
continue
fn = os.path.join(path, fn)
size = os.path.getsize(fn)
maxsize = fix[fixpath]
if size > maxsize:
diff = size - maxsize
print(diff, "too big:", fn)
if args.yes:
with open(fn, "a+") as dbfile:
dbfile.truncate(maxsize)

View File

@ -1,20 +0,0 @@
# To enable bash completion:
#
# 1. Ensure python-argcomplete is installed:
# pip install argcomplete
# 2. Source this file:
# . nilmtool-bash-completion.sh
_nilmtool_argcomplete() {
local IFS=$(printf "\013")
COMPREPLY=( $(IFS="$IFS" \
COMP_LINE="$COMP_LINE" \
COMP_WORDBREAKS="$COMP_WORDBREAKS" \
COMP_POINT="$COMP_POINT" \
_ARGCOMPLETE=1 \
"$1" 8>&1 9>&2 1>/dev/null 2>/dev/null) )
if [[ $? != 0 ]]; then
unset COMPREPLY
fi
}
complete -o nospace -F _nilmtool_argcomplete nilmtool

View File

@ -1,5 +1,10 @@
"""Main NilmDB import""" """Main NilmDB import"""
from ._version import get_versions # These aren't imported automatically, because loading the server
# stuff isn't always necessary.
#from nilmdb.server import NilmDB, Server
#from nilmdb.client import Client
from nilmdb._version import get_versions
__version__ = get_versions()['version'] __version__ = get_versions()['version']
del get_versions del get_versions

View File

@ -1,520 +1,197 @@
IN_LONG_VERSION_PY = True
# This file helps to compute a version number in source trees obtained from # This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag # git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build # feature). Distribution tarballs (build by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file # directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number. # that just contains the computed version number.
# This file is released into the public domain. Generated by # This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer) # versioneer-0.7+ (https://github.com/warner/python-versioneer)
# these strings will be replaced by git during git-archive
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess import subprocess
import sys import sys
def run_command(args, cwd=None, verbose=False):
def get_keywords(): try:
"""Get the keywords needed to look up the version information.""" # remember shell=False, so use git.cmd on windows, not just git
# these strings will be replaced by git during git-archive. p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
# setup.py/versioneer.py will grep for the variable names, so they must except EnvironmentError:
# each be defined on a line of their own. _version.py will just call e = sys.exc_info()[1]
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "nilmdb-"
cfg.parentdir_prefix = "nilmdb-"
cfg.versionfile_source = "nilmdb/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose: if verbose:
print("unable to find command, tried %s" % (commands,)) print("unable to run %s" % args[0])
return None, None print(e)
return None
stdout = p.communicate()[0].strip() stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3: if sys.version >= '3':
stdout = stdout.decode() stdout = stdout.decode()
if p.returncode != 0: if p.returncode != 0:
if verbose: if verbose:
print("unable to run %s (error)" % dispcmd) print("unable to run %s (error)" % args[0])
print("stdout was %s" % stdout) return None
return None, p.returncode return stdout
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose): import sys
"""Try to determine the version from the parent directory name. import re
import os.path
Source tarballs conventionally unpack into a directory that includes both def get_expanded_variables(versionfile_source):
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these # the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py, # variables. When used from setup.py, we don't want to import
# so we do it with a regexp instead. This function is not used from # _version.py, so we do it with a regexp instead. This function is not
# _version.py. # used from _version.py.
keywords = {} variables = {}
try: try:
f = open(versionfile_abs, "r") for line in open(versionfile_source,"r").readlines():
for line in f.readlines():
if line.strip().startswith("git_refnames ="): if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line) mo = re.search(r'=\s*"(.*)"', line)
if mo: if mo:
keywords["refnames"] = mo.group(1) variables["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="): if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line) mo = re.search(r'=\s*"(.*)"', line)
if mo: if mo:
keywords["full"] = mo.group(1) variables["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError: except EnvironmentError:
pass pass
return keywords return variables
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
@register_vcs_handler("git", "keywords") refnames = variables["refnames"].strip()
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"): if refnames.startswith("$Format"):
if verbose: if verbose:
print("keywords are unexpanded, not using") print("variables are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball") return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")]) refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of for ref in list(refs):
# just "foo-1.0". If we see a "tag: " prefix, prefer those. if not re.search(r'\d', ref):
TAG = "tag: " if verbose:
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) print("discarding '%s', no digits" % ref)
if not tags: refs.discard(ref)
# Either we're using git < 1.8.3, or there really are no tags. We use # Assume all version tags have a digit. git's %d expansion
# a heuristic: assume all version tags have a digit. The old git %d # behaves like git log --decorate=short and strips out the
# expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us
# refs/heads/ and refs/tags/ prefixes that would let us distinguish # distinguish between branches and tags. By ignoring refnames
# between branches and tags. By ignoring refnames without digits, we # without digits, we filter out many common branch names like
# filter out many common branch names like "release" and # "release" and "stabilization", as well as "HEAD" and "master".
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose: if verbose:
print("likely tags: %s" % ",".join(sorted(tags))) print("remaining refs: %s" % ",".join(sorted(refs)))
for ref in sorted(tags): for ref in sorted(refs):
# sorting will prefer e.g. "2.0" over "2.0rc1" # sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix): if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):] r = ref[len(tag_prefix):]
if verbose: if verbose:
print("picking %s" % r) print("picking %s" % r)
return {"version": r, return { "version": r,
"full-revisionid": keywords["full"].strip(), "full": variables["full"].strip() }
"dirty": False, "error": None, # no suitable tags, so we use the full revision id
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose: if verbose:
print("no suitable tags, using unknown + full revision id") print("no suitable tags, using full revision id")
return {"version": "0+unknown", return { "version": variables["full"].strip(),
"full-revisionid": keywords["full"].strip(), "full": variables["full"].strip() }
"dirty": False, "error": "no suitable tags", "date": None}
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
@register_vcs_handler("git", "pieces_from_vcs") # this runs 'git' from the root of the source tree. That either means
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # someone ran a setup.py command (and this code is in versioneer.py, so
"""Get version from 'git describe' in the root of the source tree. # IN_LONG_VERSION_PY=False, thus the containing directory is the root of
# the source tree), or someone ran a project-specific entry point (and
This only gets called if the git-archive 'subst' keywords were *not* # this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
expanded, and _version.py hasn't already been rewritten with a short # containing directory is somewhere deeper in the source tree). This only
version string, meaning we're inside a checked out source tree. # gets called if the git-archive 'subst' variables were *not* expanded,
""" # and _version.py hasn't already been rewritten with a short version
GITS = ["git"] # string, meaning we're inside a checked out source tree.
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try: try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, here = os.path.abspath(__file__)
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError: except NameError:
return {"version": "0+unknown", "full-revisionid": None, # some py2exe/bbfreeze/non-CPython implementations don't do __file__
"dirty": None, return {} # not always correct
"error": "unable to find root of source tree",
"date": None}
try: # versionfile_source is the relative path from the top of the source tree
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) # (where the .git directory might live) to this file. Invert this to find
return render(pieces, cfg.style) # the root from __file__.
except NotThisMethod: root = here
pass if IN_LONG_VERSION_PY:
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
else:
root = os.path.dirname(here)
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return {}
try: GIT = "git"
if cfg.parentdir_prefix: if sys.platform == "win32":
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) GIT = "git.cmd"
except NotThisMethod: stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
pass cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
if IN_LONG_VERSION_PY:
# We're running from _version.py. If it's from a source tree
# (execute-in-place), we can work upwards to find the root of the
# tree, and then check the parent directory for a version string. If
# it's in an installed application, there's no hope.
try:
here = os.path.abspath(__file__)
except NameError:
# py2exe/bbfreeze/non-CPython don't have __file__
return {} # without __file__, we have no hope
# versionfile_source is the relative path from the top of the source
# tree to _version.py. Invert this to find the root from __file__.
root = here
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
else:
# we're running from versioneer.py, which means we're running from
# the setup.py in a source tree. sys.argv[0] is setup.py in the root.
here = os.path.abspath(sys.argv[0])
root = os.path.dirname(here)
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": ""}
tag_prefix = "nilmdb-"
parentdir_prefix = "nilmdb-"
versionfile_source = "nilmdb/_version.py"
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
variables = { "refnames": git_refnames, "full": git_full }
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
if not ver:
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
if not ver:
ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
verbose)
if not ver:
ver = default
return ver
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}

View File

@ -2,24 +2,24 @@
"""Class for performing HTTP client requests via libcurl""" """Class for performing HTTP client requests via libcurl"""
import json
import contextlib
import nilmdb.utils import nilmdb.utils
import nilmdb.client.httpclient import nilmdb.client.httpclient
from nilmdb.client.errors import ClientError from nilmdb.client.errors import ClientError
from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
import time
import simplejson as json
import contextlib
from nilmdb.utils.time import float_time_to_string
def extract_timestamp(line): def extract_timestamp(line):
"""Extract just the timestamp from a line of data text""" """Extract just the timestamp from a line of data text"""
return string_to_timestamp(line.split()[0]) return float(line.split()[0])
class Client(object):
class Client():
"""Main client interface to the Nilm database.""" """Main client interface to the Nilm database."""
def __init__(self, url, post_json=False): def __init__(self, url, post_json = False):
"""Initialize client with given URL. If post_json is true, """Initialize client with given URL. If post_json is true,
POST requests are sent with Content-Type 'application/json' POST requests are sent with Content-Type 'application/json'
instead of the default 'x-www-form-urlencoded'.""" instead of the default 'x-www-form-urlencoded'."""
@ -38,7 +38,7 @@ class Client():
if self.post_json: if self.post_json:
# If we're posting as JSON, we don't need to encode it further here # If we're posting as JSON, we don't need to encode it further here
return data return data
return json.dumps(data, separators=(',', ':')) return json.dumps(data, separators=(',',':'))
def close(self): def close(self):
"""Close the connection; safe to call multiple times""" """Close the connection; safe to call multiple times"""
@ -57,12 +57,7 @@ class Client():
as a dictionary.""" as a dictionary."""
return self.http.get("dbinfo") return self.http.get("dbinfo")
def stream_list(self, path=None, layout=None, extended=False): def stream_list(self, path = None, layout = None, extended = False):
"""Return a sorted list of [path, layout] lists. If 'path' or
'layout' are specified, only return streams that match those
exact values. If 'extended' is True, the returned lists have
extended info, e.g.: [path, layout, extent_min, extent_max,
total_rows, total_seconds."""
params = {} params = {}
if path is not None: if path is not None:
params["path"] = path params["path"] = path
@ -70,12 +65,10 @@ class Client():
params["layout"] = layout params["layout"] = layout
if extended: if extended:
params["extended"] = 1 params["extended"] = 1
streams = self.http.get("stream/list", params) return self.http.get("stream/list", params)
return nilmdb.utils.sort.sort_human(streams, key=lambda s: s[0])
def stream_get_metadata(self, path, keys=None): def stream_get_metadata(self, path, keys = None):
"""Get stream metadata""" params = { "path": path }
params = {"path": path}
if keys is not None: if keys is not None:
params["key"] = keys params["key"] = keys
return self.http.get("stream/get_metadata", params) return self.http.get("stream/get_metadata", params)
@ -86,7 +79,7 @@ class Client():
params = { params = {
"path": path, "path": path,
"data": self._json_post_param(data) "data": self._json_post_param(data)
} }
return self.http.post("stream/set_metadata", params) return self.http.post("stream/set_metadata", params)
def stream_update_metadata(self, path, data): def stream_update_metadata(self, path, data):
@ -94,162 +87,108 @@ class Client():
params = { params = {
"path": path, "path": path,
"data": self._json_post_param(data) "data": self._json_post_param(data)
} }
return self.http.post("stream/update_metadata", params) return self.http.post("stream/update_metadata", params)
def stream_create(self, path, layout): def stream_create(self, path, layout):
"""Create a new stream""" """Create a new stream"""
params = { params = { "path": path,
"path": path, "layout" : layout }
"layout": layout
}
return self.http.post("stream/create", params) return self.http.post("stream/create", params)
def stream_destroy(self, path): def stream_destroy(self, path):
"""Delete stream. Fails if any data is still present.""" """Delete stream and its contents"""
params = { params = { "path": path }
"path": path
}
return self.http.post("stream/destroy", params) return self.http.post("stream/destroy", params)
def stream_rename(self, oldpath, newpath): def stream_remove(self, path, start = None, end = None):
"""Rename a stream."""
params = {
"oldpath": oldpath,
"newpath": newpath
}
return self.http.post("stream/rename", params)
def stream_remove(self, path, start=None, end=None):
"""Remove data from the specified time range""" """Remove data from the specified time range"""
params = { params = {
"path": path "path": path
} }
if start is not None: if start is not None:
params["start"] = timestamp_to_string(start) params["start"] = float_time_to_string(start)
if end is not None: if end is not None:
params["end"] = timestamp_to_string(end) params["end"] = float_time_to_string(end)
total = 0 return self.http.post("stream/remove", params)
for count in self.http.post_gen("stream/remove", params):
total += int(count)
return total
@contextlib.contextmanager @contextlib.contextmanager
def stream_insert_context(self, path, start=None, end=None): def stream_insert_context(self, path, start = None, end = None):
"""Return a context manager that allows data to be efficiently """Return a context manager that allows data to be efficiently
inserted into a stream in a piecewise manner. Data is inserted into a stream in a piecewise manner. Data is be provided
provided as ASCII lines, and is aggregated and sent to the as single lines, and is aggregated and sent to the server in larger
server in larger or smaller chunks as necessary. Data lines chunks as necessary. Data lines must match the database layout for
must match the database layout for the given path, and end the given path, and end with a newline.
with a newline.
Example: Example:
with client.stream_insert_context('/path', start, end) as ctx: with client.stream_insert_context('/path', start, end) as ctx:
ctx.insert('1234567890000000 1 2 3 4\\n') ctx.insert('1234567890.0 1 2 3 4\\n')
ctx.insert('1234567891000000 1 2 3 4\\n') ctx.insert('1234567891.0 1 2 3 4\\n')
For more details, see help for nilmdb.client.client.StreamInserter For more details, see help for nilmdb.client.client.StreamInserter
This may make multiple requests to the server, if the data is This may make multiple requests to the server, if the data is
large enough or enough time has passed between insertions. large enough or enough time has passed between insertions.
""" """
ctx = StreamInserter(self, path, start, end) ctx = StreamInserter(self.http, path, start, end)
yield ctx yield ctx
ctx.finalize() ctx.finalize()
ctx.destroy()
def stream_insert(self, path, data, start=None, end=None): def stream_insert(self, path, data, start = None, end = None):
"""Insert rows of data into a stream. data should be a string """Insert rows of data into a stream. data should be a string
or iterable that provides ASCII data that matches the database or iterable that provides ASCII data that matches the database
layout for path. Data is passed through stream_insert_context, layout for path. See stream_insert_context for details on the
so it will be broken into reasonably-sized chunks and 'start' and 'end' parameters."""
start/end will be deduced if missing."""
with self.stream_insert_context(path, start, end) as ctx: with self.stream_insert_context(path, start, end) as ctx:
if isinstance(data, bytes): if isinstance(data, basestring):
ctx.insert(data) ctx.insert(data)
else: else:
for chunk in data: for chunk in data:
ctx.insert(chunk) ctx.insert(chunk)
return ctx.last_response return ctx.last_response
def stream_insert_block(self, path, data, start, end, binary=False): def stream_intervals(self, path, start = None, end = None):
"""Insert a single fixed block of data into the stream. It is
sent directly to the server in one block with no further
processing.
If 'binary' is True, provide raw binary data in little-endian
format matching the path layout, including an int64 timestamp.
Otherwise, provide ASCII data matching the layout."""
params = {
"path": path,
"start": timestamp_to_string(start),
"end": timestamp_to_string(end),
}
if binary:
params["binary"] = 1
return self.http.put("stream/insert", data, params)
def stream_intervals(self, path, start=None, end=None, diffpath=None):
""" """
Return a generator that yields each stream interval. Return a generator that yields each stream interval.
If 'diffpath' is not None, yields only interval ranges that are
present in 'path' but not in 'diffpath'.
""" """
params = { params = {
"path": path "path": path
} }
if diffpath is not None:
params["diffpath"] = diffpath
if start is not None: if start is not None:
params["start"] = timestamp_to_string(start) params["start"] = float_time_to_string(start)
if end is not None: if end is not None:
params["end"] = timestamp_to_string(end) params["end"] = float_time_to_string(end)
return self.http.get_gen("stream/intervals", params) return self.http.get_gen("stream/intervals", params)
def stream_extract(self, path, start=None, end=None, def stream_extract(self, path, start = None, end = None, count = False):
count=False, markup=False, binary=False):
""" """
Extract data from a stream. Returns a generator that yields Extract data from a stream. Returns a generator that yields
lines of ASCII-formatted data that matches the database lines of ASCII-formatted data that matches the database
layout for the given path. layout for the given path.
If 'count' is True, return a count of matching data points Specify count = True to return a count of matching data points
rather than the actual data. The output format is unchanged. rather than the actual data. The output format is unchanged.
If 'markup' is True, include comments in the returned data
that indicate interval starts and ends.
If 'binary' is True, return chunks of raw binary data, rather
than lines of ASCII-formatted data. Raw binary data is
little-endian and matches the database types (including an
int64 timestamp).
""" """
params = { params = {
"path": path, "path": path,
} }
if start is not None: if start is not None:
params["start"] = timestamp_to_string(start) params["start"] = float_time_to_string(start)
if end is not None: if end is not None:
params["end"] = timestamp_to_string(end) params["end"] = float_time_to_string(end)
if count: if count:
params["count"] = 1 params["count"] = 1
if markup: return self.http.get_gen("stream/extract", params)
params["markup"] = 1
if binary:
params["binary"] = 1
return self.http.get_gen("stream/extract", params, binary=binary)
def stream_count(self, path, start=None, end=None): def stream_count(self, path, start = None, end = None):
""" """
Return the number of rows of data in the stream that satisfy Return the number of rows of data in the stream that satisfy
the given timestamps. the given timestamps.
""" """
counts = list(self.stream_extract(path, start, end, count=True)) counts = list(self.stream_extract(path, start, end, count = True))
return int(counts[0]) return int(counts[0])
class StreamInserter(object):
class StreamInserter():
"""Object returned by stream_insert_context() that manages """Object returned by stream_insert_context() that manages
the insertion of rows of data into a particular path. the insertion of rows of data into a particular path.
@ -286,15 +225,17 @@ class StreamInserter():
# See design.md for a discussion of how much data to send. This # See design.md for a discussion of how much data to send. This
# is a soft limit -- we might send up to twice as much or so # is a soft limit -- we might send up to twice as much or so
_max_data = 2 * 1024 * 1024 _max_data = 2 * 1024 * 1024
_max_data_after_send = 64 * 1024
def __init__(self, client, path, start, end): # Delta to add to the final timestamp, if "end" wasn't given
"""'client' is the client object. 'path' is the database _end_epsilon = 1e-6
def __init__(self, http, path, start = None, end = None):
"""'http' is the httpclient object. 'path' is the database
path to insert to. 'start' and 'end' are used for the first path to insert to. 'start' and 'end' are used for the first
contiguous interval and may be None.""" contiguous interval."""
self.last_response = None self.last_response = None
self._client = client self._http = http
self._path = path self._path = path
# Start and end for the overall contiguous interval we're # Start and end for the overall contiguous interval we're
@ -307,15 +248,6 @@ class StreamInserter():
self._block_data = [] self._block_data = []
self._block_len = 0 self._block_len = 0
self.destroyed = False
def destroy(self):
"""Ensure this object can't be used again without raising
an error"""
def error(*args, **kwargs):
raise Exception("don't reuse this context object")
self._send_block = self.insert = self.finalize = self.send = error
def insert(self, data): def insert(self, data):
"""Insert a chunk of ASCII formatted data in string form. The """Insert a chunk of ASCII formatted data in string form. The
overall data must consist of lines terminated by '\\n'.""" overall data must consist of lines terminated by '\\n'."""
@ -337,11 +269,7 @@ class StreamInserter():
# Send the block once we have enough data # Send the block once we have enough data
if self._block_len >= maxdata: if self._block_len >= maxdata:
self._send_block(final=False) self._send_block(final = False)
if self._block_len >= self._max_data_after_send:
raise ValueError("too much data left over after trying"
" to send intermediate block; is it"
" missing newlines or malformed?")
def update_start(self, start): def update_start(self, start):
"""Update the start time for the next contiguous interval. """Update the start time for the next contiguous interval.
@ -364,12 +292,7 @@ class StreamInserter():
If more data is inserted after a finalize(), it will become If more data is inserted after a finalize(), it will become
part of a new interval and there may be a gap left in-between.""" part of a new interval and there may be a gap left in-between."""
self._send_block(final=True) self._send_block(final = True)
def send(self):
"""Send any data that we might have buffered up. Does not affect
any other treatment of timestamps or endpoints."""
self._send_block(final=False)
def _get_first_noncomment(self, block): def _get_first_noncomment(self, block):
"""Return the (start, end) indices of the first full line in """Return the (start, end) indices of the first full line in
@ -377,10 +300,10 @@ class StreamInserter():
there isn't one.""" there isn't one."""
start = 0 start = 0
while True: while True:
end = block.find(b'\n', start) end = block.find('\n', start)
if end < 0: if end < 0:
raise IndexError raise IndexError
if block[start] != b'#'[0]: if block[start] != '#':
return (start, (end + 1)) return (start, (end + 1))
start = end + 1 start = end + 1
@ -388,22 +311,22 @@ class StreamInserter():
"""Return the (start, end) indices of the last full line in """Return the (start, end) indices of the last full line in
block[:length] that isn't a comment, or raise IndexError if block[:length] that isn't a comment, or raise IndexError if
there isn't one.""" there isn't one."""
end = block.rfind(b'\n') end = block.rfind('\n')
if end <= 0: if end <= 0:
raise IndexError raise IndexError
while True: while True:
start = block.rfind(b'\n', 0, end) start = block.rfind('\n', 0, end)
if block[start + 1] != b'#'[0]: if block[start + 1] != '#':
return ((start + 1), end) return ((start + 1), end)
if start == -1: if start == -1:
raise IndexError raise IndexError
end = start end = start
def _send_block(self, final=False): def _send_block(self, final = False):
"""Send data currently in the block. The data sent will """Send data currently in the block. The data sent will
consist of full lines only, so some might be left over.""" consist of full lines only, so some might be left over."""
# Build the full string to send # Build the full string to send
block = b"".join(self._block_data) block = "".join(self._block_data)
start_ts = self._interval_start start_ts = self._interval_start
if start_ts is None: if start_ts is None:
@ -412,7 +335,7 @@ class StreamInserter():
(spos, epos) = self._get_first_noncomment(block) (spos, epos) = self._get_first_noncomment(block)
start_ts = extract_timestamp(block[spos:epos]) start_ts = extract_timestamp(block[spos:epos])
except (ValueError, IndexError): except (ValueError, IndexError):
pass # no timestamp is OK, if we have no data pass # no timestamp is OK, if we have no data
if final: if final:
# For a final block, it must end in a newline, and the # For a final block, it must end in a newline, and the
@ -420,14 +343,14 @@ class StreamInserter():
# or the timestamp of the last line plus epsilon. # or the timestamp of the last line plus epsilon.
end_ts = self._interval_end end_ts = self._interval_end
try: try:
if block[-1] != b'\n'[0]: if block[-1] != '\n':
raise ValueError("final block didn't end with a newline") raise ValueError("final block didn't end with a newline")
if end_ts is None: if end_ts is None:
(spos, epos) = self._get_last_noncomment(block) (spos, epos) = self._get_last_noncomment(block)
end_ts = extract_timestamp(block[spos:epos]) end_ts = extract_timestamp(block[spos:epos])
end_ts += nilmdb.utils.time.epsilon end_ts += self._end_epsilon
except (ValueError, IndexError): except (ValueError, IndexError):
pass # no timestamp is OK, if we have no data pass # no timestamp is OK, if we have no data
self._block_data = [] self._block_data = []
self._block_len = 0 self._block_len = 0
@ -443,7 +366,7 @@ class StreamInserter():
(spos, epos) = self._get_last_noncomment(block) (spos, epos) = self._get_last_noncomment(block)
end_ts = extract_timestamp(block[spos:epos]) end_ts = extract_timestamp(block[spos:epos])
except (ValueError, IndexError): except (ValueError, IndexError):
# If we found no timestamp, give up; we could send this # If we found no timestamp, give up; we'll send this
# block later when we have more data. # block later when we have more data.
return return
if spos == 0: if spos == 0:
@ -454,7 +377,7 @@ class StreamInserter():
# the server complain so that the error is the same # the server complain so that the error is the same
# as if we hadn't done this chunking. # as if we hadn't done this chunking.
end_ts = self._interval_end end_ts = self._interval_end
self._block_data = [block[spos:]] self._block_data = [ block[spos:] ]
self._block_len = (epos - spos) self._block_len = (epos - spos)
block = block[:spos] block = block[:spos]
@ -462,7 +385,7 @@ class StreamInserter():
self._interval_start = end_ts self._interval_start = end_ts
# Double check endpoints # Double check endpoints
if (start_ts is None or end_ts is None) or (start_ts == end_ts): if start_ts is None or end_ts is None:
# If the block has no non-comment lines, it's OK # If the block has no non-comment lines, it's OK
try: try:
self._get_first_noncomment(block) self._get_first_noncomment(block)
@ -471,7 +394,7 @@ class StreamInserter():
raise ClientError("have data to send, but no start/end times") raise ClientError("have data to send, but no start/end times")
# Send it # Send it
self.last_response = self._client.stream_insert_block( params = { "path": self._path,
self._path, block, start_ts, end_ts, binary=False) "start": float_time_to_string(start_ts),
"end": float_time_to_string(end_ts) }
return self.last_response = self._http.put("stream/insert", block, params)

View File

@ -1,41 +1,33 @@
"""HTTP client errors""" """HTTP client errors"""
from nilmdb.utils.printf import sprintf from nilmdb.utils.printf import *
class Error(Exception): class Error(Exception):
"""Base exception for both ClientError and ServerError responses""" """Base exception for both ClientError and ServerError responses"""
def __init__(self, def __init__(self,
status="Unspecified error", status = "Unspecified error",
message=None, message = None,
url=None, url = None,
traceback=None): traceback = None):
super().__init__(status) Exception.__init__(self, status)
self.status = status # e.g. "400 Bad Request" self.status = status # e.g. "400 Bad Request"
self.message = message # textual message from the server self.message = message # textual message from the server
self.url = url # URL we were requesting self.url = url # URL we were requesting
self.traceback = traceback # server traceback, if available self.traceback = traceback # server traceback, if available
def _format_error(self, show_url): def _format_error(self, show_url):
s = sprintf("[%s]", self.status) s = sprintf("[%s]", self.status)
if self.message: if self.message:
s += sprintf(" %s", self.message) s += sprintf(" %s", self.message)
if show_url and self.url: if show_url and self.url: # pragma: no cover
s += sprintf(" (%s)", self.url) s += sprintf(" (%s)", self.url)
if self.traceback: if self.traceback: # pragma: no cover
s += sprintf("\nServer traceback:\n%s", self.traceback) s += sprintf("\nServer traceback:\n%s", self.traceback)
return s return s
def __str__(self): def __str__(self):
return self._format_error(show_url=False) return self._format_error(show_url = False)
def __repr__(self): # pragma: no cover
def __repr__(self): return self._format_error(show_url = True)
return self._format_error(show_url=True)
class ClientError(Error): class ClientError(Error):
pass pass
class ServerError(Error): class ServerError(Error):
pass pass

View File

@ -1,25 +1,26 @@
"""HTTP client library""" """HTTP client library"""
import json import nilmdb.utils
import urllib.parse
import requests
from nilmdb.client.errors import ClientError, ServerError, Error from nilmdb.client.errors import ClientError, ServerError, Error
import simplejson as json
import urlparse
import requests
class HTTPClient(): class HTTPClient(object):
"""Class to manage and perform HTTP requests from the client""" """Class to manage and perform HTTP requests from the client"""
def __init__(self, baseurl="", post_json=False, verify_ssl=True): def __init__(self, baseurl = "", post_json = False):
"""If baseurl is supplied, all other functions that take """If baseurl is supplied, all other functions that take
a URL can be given a relative URL instead.""" a URL can be given a relative URL instead."""
# Verify / clean up URL # Verify / clean up URL
reparsed = urllib.parse.urlparse(baseurl).geturl() reparsed = urlparse.urlparse(baseurl).geturl()
if '://' not in reparsed: if '://' not in reparsed:
reparsed = urllib.parse.urlparse("http://" + baseurl).geturl() reparsed = urlparse.urlparse("http://" + baseurl).geturl()
self.baseurl = reparsed.rstrip('/') + '/' self.baseurl = reparsed
# Note whether we want SSL verification # Build Requests session object, enable SSL verification
self.verify_ssl = verify_ssl self.session = requests.Session()
self.session.verify = True
# Saved response, so that tests can verify a few things. # Saved response, so that tests can verify a few things.
self._last_response = {} self._last_response = {}
@ -32,64 +33,44 @@ class HTTPClient():
# Default variables for exception. We use the entire body as # Default variables for exception. We use the entire body as
# the default message, in case we can't extract it from a JSON # the default message, in case we can't extract it from a JSON
# response. # response.
args = { args = { "url" : url,
"url": url, "status" : str(code),
"status": str(code), "message" : body,
"message": body, "traceback" : None }
"traceback": None
}
try: try:
# Fill with server-provided data if we can # Fill with server-provided data if we can
jsonerror = json.loads(body) jsonerror = json.loads(body)
args["status"] = jsonerror["status"] args["status"] = jsonerror["status"]
args["message"] = jsonerror["message"] args["message"] = jsonerror["message"]
args["traceback"] = jsonerror["traceback"] args["traceback"] = jsonerror["traceback"]
except Exception: except Exception: # pragma: no cover
pass pass
if 400 <= code <= 499: if code >= 400 and code <= 499:
raise ClientError(**args) raise ClientError(**args)
else: else: # pragma: no cover
if 500 <= code <= 599: if code >= 500 and code <= 599:
if args["message"] is None: if args["message"] is None:
args["message"] = ("(no message; try disabling " args["message"] = ("(no message; try disabling " +
"response.stream option in " "response.stream option in " +
"nilmdb.server for better debugging)") "nilmdb.server for better debugging)")
raise ServerError(**args) raise ServerError(**args)
else: else:
raise Error(**args) raise Error(**args)
def close(self): def close(self):
pass self.session.close()
def _do_req(self, method, url, query_data, body_data, stream, headers): def _do_req(self, method, url, query_data, body_data, stream, headers):
url = urllib.parse.urljoin(self.baseurl, url) url = urlparse.urljoin(self.baseurl, url)
try: try:
# Create a new session, ensure we send "Connection: close", response = self.session.request(method, url,
# and explicitly close connection after the transfer. params = query_data,
# This is to avoid HTTP/1.1 persistent connections data = body_data,
# (keepalive), because they have fundamental race stream = stream,
# conditions when there are delays between requests: headers = headers)
# a new request may be sent at the same instant that the
# server decides to timeout the connection.
session = requests.Session()
if headers is None:
headers = {}
headers["Connection"] = "close"
response = session.request(method, url,
params=query_data,
data=body_data,
stream=stream,
headers=headers,
verify=self.verify_ssl)
# Close the connection. If it's a generator (stream =
# True), the requests library shouldn't actually close the
# HTTP connection until all data has been read from the
# response.
session.close()
except requests.RequestException as e: except requests.RequestException as e:
raise ServerError(status="502 Error", url=url, raise ServerError(status = "502 Error", url = url,
message=str(e)) message = str(e.message))
if response.status_code != 200: if response.status_code != 200:
self._handle_error(url, response.status_code, response.content) self._handle_error(url, response.status_code, response.content)
self._last_response = response self._last_response = response
@ -100,90 +81,53 @@ class HTTPClient():
return (response, False) return (response, False)
# Normal versions that return data directly # Normal versions that return data directly
def _req(self, method, url, query=None, body=None, headers=None): def _req(self, method, url, query = None, body = None, headers = None):
""" """
Make a request and return the body data as a string or parsed Make a request and return the body data as a string or parsed
JSON object, or raise an error if it contained an error. JSON object, or raise an error if it contained an error.
""" """
(response, isjson) = self._do_req(method, url, query, body, (response, isjson) = self._do_req(method, url, query, body,
stream=False, headers=headers) stream = False, headers = headers)
if isjson: if isjson:
return json.loads(response.content) return json.loads(response.content)
return response.text return response.content
def get(self, url, params=None): def get(self, url, params = None):
"""Simple GET (parameters in URL)""" """Simple GET (parameters in URL)"""
return self._req("GET", url, params, None) return self._req("GET", url, params, None)
def post(self, url, params=None): def post(self, url, params = None):
"""Simple POST (parameters in body)""" """Simple POST (parameters in body)"""
if self.post_json: if self.post_json:
return self._req("POST", url, None, return self._req("POST", url, None,
json.dumps(params), json.dumps(params),
{'Content-type': 'application/json'}) { 'Content-type': 'application/json' })
else: else:
return self._req("POST", url, None, params) return self._req("POST", url, None, params)
def put(self, url, data, params=None, def put(self, url, data, params = None):
content_type="application/octet-stream"):
"""Simple PUT (parameters in URL, data in body)""" """Simple PUT (parameters in URL, data in body)"""
h = {'Content-type': content_type} return self._req("PUT", url, params, data)
return self._req("PUT", url, query=params, body=data, headers=h)
# Generator versions that return data one line at a time. # Generator versions that return data one line at a time.
def _req_gen(self, method, url, query=None, body=None, def _req_gen(self, method, url, query = None, body = None, headers = None):
headers=None, binary=False):
""" """
Make a request and return a generator that gives back strings Make a request and return a generator that gives back strings
or JSON decoded lines of the body data, or raise an error if or JSON decoded lines of the body data, or raise an error if
it contained an eror. it contained an eror.
""" """
(response, isjson) = self._do_req(method, url, query, body, (response, isjson) = self._do_req(method, url, query, body,
stream=True, headers=headers) stream = True, headers = headers)
if isjson:
# Like the iter_lines function in Requests, but only splits on for line in response.iter_lines():
# the specified line ending.
def lines(source, ending):
pending = None
for chunk in source:
if pending is not None:
chunk = pending + chunk
tmp = chunk.split(ending)
lines = tmp[:-1]
if chunk.endswith(ending):
pending = None
else:
pending = tmp[-1]
for line in lines:
yield line
if pending is not None:
yield pending
# Yield the chunks or lines as requested
if binary:
for chunk in response.iter_content(chunk_size=65536):
yield chunk
elif isjson:
for line in lines(response.iter_content(chunk_size=1),
ending=b'\r\n'):
yield json.loads(line) yield json.loads(line)
else: else:
for line in lines(response.iter_content(chunk_size=65536), for line in response.iter_lines():
ending=b'\n'):
yield line yield line
def get_gen(self, url, params=None, binary=False): def get_gen(self, url, params = None):
"""Simple GET (parameters in URL) returning a generator""" """Simple GET (parameters in URL) returning a generator"""
return self._req_gen("GET", url, params, binary=binary) return self._req_gen("GET", url, params)
def post_gen(self, url, params=None):
"""Simple POST (parameters in body) returning a generator"""
if self.post_json:
return self._req_gen("POST", url, None,
json.dumps(params),
{'Content-type': 'application/json'})
else:
return self._req_gen("POST", url, None, params)
# Not much use for a POST or PUT generator, since they don't # Not much use for a POST or PUT generator, since they don't
# return much data. # return much data.

View File

@ -1,263 +0,0 @@
# -*- coding: utf-8 -*-
"""Provide a NumpyClient class that is based on normal Client, but has
additional methods for extracting and inserting data via Numpy arrays."""
import contextlib
import numpy
import nilmdb.utils
import nilmdb.client.client
import nilmdb.client.httpclient
from nilmdb.client.errors import ClientError
def layout_to_dtype(layout):
ltype = layout.split('_')[0]
lcount = int(layout.split('_')[1])
if ltype.startswith('int'):
atype = '<i' + str(int(ltype[3:]) // 8)
elif ltype.startswith('uint'):
atype = '<u' + str(int(ltype[4:]) // 8)
elif ltype.startswith('float'):
atype = '<f' + str(int(ltype[5:]) // 8)
else:
raise ValueError("bad layout")
if lcount == 1:
dtype = [('timestamp', '<i8'), ('data', atype)]
else:
dtype = [('timestamp', '<i8'), ('data', atype, lcount)]
return numpy.dtype(dtype)
class NumpyClient(nilmdb.client.client.Client):
"""Subclass of nilmdb.client.Client that adds additional methods for
extracting and inserting data via Numpy arrays."""
def _get_dtype(self, path, layout):
if layout is None:
streams = self.stream_list(path)
if len(streams) != 1:
raise ClientError("can't get layout for path: " + path)
layout = streams[0][1]
return layout_to_dtype(layout)
def stream_extract_numpy(self, path, start=None, end=None,
layout=None, maxrows=100000,
structured=False):
"""
Extract data from a stream. Returns a generator that yields
Numpy arrays of up to 'maxrows' of data each.
If 'layout' is None, it is read using stream_info.
If 'structured' is False, all data is converted to float64
and returned in a flat 2D array. Otherwise, data is returned
as a structured dtype in a 1D array.
"""
dtype = self._get_dtype(path, layout)
def to_numpy(data):
a = numpy.frombuffer(data, dtype)
if structured:
return a
return numpy.c_[a['timestamp'], a['data']]
chunks = []
total_len = 0
maxsize = dtype.itemsize * maxrows
for data in self.stream_extract(path, start, end, binary=True):
# Add this block of binary data
chunks.append(data)
total_len += len(data)
# See if we have enough to make the requested Numpy array
while total_len >= maxsize:
assembled = b"".join(chunks)
total_len -= maxsize
chunks = [assembled[maxsize:]]
block = assembled[:maxsize]
yield to_numpy(block)
if total_len:
yield to_numpy(b"".join(chunks))
@contextlib.contextmanager
def stream_insert_numpy_context(self, path, start=None, end=None,
layout=None):
"""Return a context manager that allows data to be efficiently
inserted into a stream in a piecewise manner. Data is
provided as Numpy arrays, and is aggregated and sent to the
server in larger or smaller chunks as necessary. Data format
must match the database layout for the given path.
For more details, see help for
nilmdb.client.numpyclient.StreamInserterNumpy
If 'layout' is not None, use it as the layout rather than
querying the database.
"""
dtype = self._get_dtype(path, layout)
ctx = StreamInserterNumpy(self, path, start, end, dtype)
yield ctx
ctx.finalize()
ctx.destroy()
def stream_insert_numpy(self, path, data, start=None, end=None,
layout=None):
"""Insert data into a stream. data should be a Numpy array
which will be passed through stream_insert_numpy_context to
break it into chunks etc. See the help for that function
for details."""
with self.stream_insert_numpy_context(path, start, end, layout) as ctx:
if isinstance(data, numpy.ndarray):
ctx.insert(data)
else:
for chunk in data:
ctx.insert(chunk)
return ctx.last_response
class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
"""Object returned by stream_insert_numpy_context() that manages
the insertion of rows of data into a particular path.
See help for nilmdb.client.client.StreamInserter for details.
The only difference is that, instead of ASCII formatted data,
this context manager can take Numpy arrays, which are either
structured (1D with complex dtype) or flat (2D with simple dtype).
"""
# Soft limit of how many bytes to send per HTTP request.
_max_data = 2 * 1024 * 1024
def __init__(self, client, path, start, end, dtype):
"""
'client' is the client object. 'path' is the database path
to insert to. 'start' and 'end' are used for the first
contiguous interval and may be None. 'dtype' is the Numpy
dtype for this stream.
"""
super(StreamInserterNumpy, self).__init__(client, path, start, end)
self._dtype = dtype
# Max rows to send at once
self._max_rows = self._max_data // self._dtype.itemsize
# List of the current arrays we're building up to send
self._block_arrays = []
self._block_rows = 0
def insert(self, array):
"""Insert Numpy data, which must match the layout type."""
if not isinstance(array, numpy.ndarray):
array = numpy.array(array)
if array.ndim == 1:
# Already a structured array; just verify the type
if array.dtype != self._dtype:
raise ValueError("wrong dtype for 1D (structured) array")
elif array.ndim == 2:
# Convert to structured array
sarray = numpy.zeros(array.shape[0], dtype=self._dtype)
try:
sarray['timestamp'] = array[:, 0]
# Need the squeeze in case sarray['data'] is 1 dimensional
sarray['data'] = numpy.squeeze(array[:, 1:])
except (IndexError, ValueError):
raise ValueError("wrong number of fields for this data type")
array = sarray
else:
raise ValueError("wrong number of dimensions in array")
length = len(array)
maxrows = self._max_rows
if length == 0:
return
if length > maxrows:
# This is more than twice what we wanted to send, so split
# it up. This is a bit inefficient, but the user really
# shouldn't be providing this much data at once.
for cut in range(0, length, maxrows):
self.insert(array[cut:(cut + maxrows)])
return
# Add this array to our list
self._block_arrays.append(array)
self._block_rows += length
# Send if it's too long
if self._block_rows >= maxrows:
self._send_block(final=False)
def _send_block(self, final=False):
"""Send the data current stored up. One row might be left
over if we need its timestamp saved."""
# Build the full array to send
if self._block_rows == 0:
array = numpy.zeros(0, dtype=self._dtype)
else:
array = numpy.hstack(self._block_arrays)
# Get starting timestamp
start_ts = self._interval_start
if start_ts is None:
# Pull start from the first row
try:
start_ts = array['timestamp'][0]
except IndexError:
pass # no timestamp is OK, if we have no data
# Get ending timestamp
if final:
# For a final block, the timestamp is either the
# user-provided end, or the timestamp of the last line
# plus epsilon.
end_ts = self._interval_end
if end_ts is None:
try:
end_ts = array['timestamp'][-1]
end_ts += nilmdb.utils.time.epsilon
except IndexError:
pass # no timestamp is OK, if we have no data
self._block_arrays = []
self._block_rows = 0
# Next block is completely fresh
self._interval_start = None
self._interval_end = None
else:
# An intermediate block. We need to save the last row
# for the next block, and use its timestamp as the ending
# timestamp for this one.
if len(array) < 2:
# Not enough data to send an intermediate block
return
end_ts = array['timestamp'][-1]
if self._interval_end is not None and end_ts > self._interval_end:
# User gave us bad endpoints; send it anyway, and let
# the server complain so that the error is the same
# as if we hadn't done this chunking.
end_ts = self._interval_end
self._block_arrays = [array[-1:]]
self._block_rows = 1
array = array[:-1]
# Next block continues where this one ended
self._interval_start = end_ts
# If we have no endpoints, or equal endpoints, it's OK as long
# as there's no data to send
if (start_ts is None or end_ts is None) or (start_ts == end_ts):
if not array:
return
raise ClientError("have data to send, but invalid start/end times")
# Send it
data = array.tostring()
self.last_response = self._client.stream_insert_block(
self._path, data, start_ts, end_ts, binary=True)
return

View File

@ -1,127 +1,64 @@
"""Command line client functionality""" """Command line client functionality"""
import os import nilmdb.client
from nilmdb.utils.printf import *
from nilmdb.utils import datetime_tz
import nilmdb.utils.time
import sys import sys
import signal import os
import argparse import argparse
from argparse import ArgumentDefaultsHelpFormatter as def_form from argparse import ArgumentDefaultsHelpFormatter as def_form
import nilmdb.client
from nilmdb.utils.printf import fprintf, sprintf
import nilmdb.utils.time
import argcomplete
import datetime_tz
# Valid subcommands. Defined in separate files just to break # Valid subcommands. Defined in separate files just to break
# things up -- they're still called with Cmdline as self. # things up -- they're still called with Cmdline as self.
subcommands = ["help", "info", "create", "rename", "list", "intervals", subcommands = [ "help", "info", "create", "list", "metadata",
"metadata", "insert", "extract", "remove", "destroy"] "insert", "extract", "remove", "destroy" ]
# Import the subcommand modules # Import the subcommand modules
subcmd_mods = {} subcmd_mods = {}
for cmd in subcommands: for cmd in subcommands:
subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist=[cmd]) subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist = [ cmd ])
class JimArgumentParser(argparse.ArgumentParser): class JimArgumentParser(argparse.ArgumentParser):
def parse_args(self, args=None, namespace=None):
# Look for --version anywhere and change it to just "nilmtool
# --version". This makes "nilmtool cmd --version" work, which
# is needed by help2man.
if "--version" in (args or sys.argv[1:]):
args = ["--version"]
return argparse.ArgumentParser.parse_args(self, args, namespace)
def error(self, message): def error(self, message):
self.print_usage(sys.stderr) self.print_usage(sys.stderr)
self.exit(2, sprintf("error: %s\n", message)) self.exit(2, sprintf("error: %s\n", message))
class Cmdline(object):
class Complete(): def __init__(self, argv = None):
# Completion helpers, for using argcomplete (see
# extras/nilmtool-bash-completion.sh)
def escape(self, s):
quote_chars = ["\\", "\"", "'", " "]
for char in quote_chars:
s = s.replace(char, "\\" + char)
return s
def none(self, prefix, parsed_args, **kwargs):
return []
rate = none
time = none
url = none
def path(self, prefix, parsed_args, **kwargs):
client = nilmdb.client.Client(parsed_args.url)
return (self.escape(s[0])
for s in client.stream_list()
if s[0].startswith(prefix))
def layout(self, prefix, parsed_args, **kwargs):
types = ["int8", "int16", "int32", "int64",
"uint8", "uint16", "uint32", "uint64",
"float32", "float64"]
layouts = []
for i in range(1, 10):
layouts.extend([(t + "_" + str(i)) for t in types])
return (lay for lay in layouts if lay.startswith(prefix))
def meta_key(self, prefix, parsed_args, **kwargs):
return (kv.split('=')[0] for kv
in self.meta_keyval(prefix, parsed_args, **kwargs))
def meta_keyval(self, prefix, parsed_args, **kwargs):
client = nilmdb.client.Client(parsed_args.url)
path = parsed_args.path
if not path:
return []
results = []
for (k, v) in client.stream_get_metadata(path).items():
kv = self.escape(k + '=' + v)
if kv.startswith(prefix):
results.append(kv)
return results
class Cmdline():
def __init__(self, argv=None):
self.argv = argv or sys.argv[1:] self.argv = argv or sys.argv[1:]
self.client = None self.client = None
self.def_url = os.environ.get("NILMDB_URL", "http://localhost/nilmdb/") self.def_url = os.environ.get("NILMDB_URL", "http://localhost:12380")
self.subcmd = {} self.subcmd = {}
self.complete = Complete()
self.complete_output_stream = None # overridden by test suite
def arg_time(self, toparse): def arg_time(self, toparse):
"""Parse a time string argument""" """Parse a time string argument"""
try: try:
return nilmdb.utils.time.parse_time(toparse) return nilmdb.utils.time.parse_time(toparse).totimestamp()
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(sprintf("%s \"%s\"", raise argparse.ArgumentTypeError(sprintf("%s \"%s\"",
str(e), toparse)) str(e), toparse))
# Set up the parser
def parser_setup(self): def parser_setup(self):
self.parser = JimArgumentParser(add_help=False, self.parser = JimArgumentParser(add_help = False,
formatter_class=def_form) formatter_class = def_form)
group = self.parser.add_argument_group("General options") group = self.parser.add_argument_group("General options")
group.add_argument("-h", "--help", action='help', group.add_argument("-h", "--help", action='help',
help='show this help message and exit') help='show this help message and exit')
group.add_argument("-v", "--version", action="version", group.add_argument("-V", "--version", action="version",
version=nilmdb.__version__) version = nilmdb.__version__)
group = self.parser.add_argument_group("Server") group = self.parser.add_argument_group("Server")
group.add_argument("-u", "--url", action="store", group.add_argument("-u", "--url", action="store",
default=self.def_url, default=self.def_url,
help="NilmDB server URL (default: %(default)s)" help="NilmDB server URL (default: %(default)s)")
).completer = self.complete.url
sub = self.parser.add_subparsers( sub = self.parser.add_subparsers(
title="Commands", dest="command", required=True, title="Commands", dest="command",
description="Use 'help command' or 'command --help' for more " description="Use 'help command' or 'command --help' for more "
"details on a particular command.") "details on a particular command.")
@ -136,18 +73,12 @@ class Cmdline():
sys.exit(-1) sys.exit(-1)
def run(self): def run(self):
# Set SIGPIPE to its default handler -- we don't need Python
# to catch it for us.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
# Clear cached timezone, so that we can pick up timezone changes # Clear cached timezone, so that we can pick up timezone changes
# while running this from the test suite. # while running this from the test suite.
datetime_tz._localtz = None datetime_tz._localtz = None
# Run parser # Run parser
self.parser_setup() self.parser_setup()
argcomplete.autocomplete(self.parser, exit_method=sys.exit,
output_stream=self.complete_output_stream)
self.args = self.parser.parse_args(self.argv) self.args = self.parser.parse_args(self.argv)
# Run arg verify handler if there is one # Run arg verify handler if there is one
@ -160,7 +91,7 @@ class Cmdline():
# unless the particular command requests that we don't. # unless the particular command requests that we don't.
if "no_test_connect" not in self.args: if "no_test_connect" not in self.args:
try: try:
self.client.version() server_version = self.client.version()
except nilmdb.client.Error as e: except nilmdb.client.Error as e:
self.die("error connecting to server: %s", str(e)) self.die("error connecting to server: %s", str(e))

View File

@ -1,11 +1,11 @@
from argparse import RawDescriptionHelpFormatter as raw_form from nilmdb.utils.printf import *
import nilmdb.client import nilmdb.client
from argparse import RawDescriptionHelpFormatter as raw_form
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("create", help="Create a new stream", cmd = sub.add_parser("create", help="Create a new stream",
formatter_class=raw_form, formatter_class = raw_form,
description=""" description="""
Create a new empty stream at the specified path and with the specified Create a new empty stream at the specified path and with the specified
layout type. layout type.
@ -19,17 +19,14 @@ Layout types are of the format: type_count
For example, 'float32_8' means the data for this stream has 8 columns of For example, 'float32_8' means the data for this stream has 8 columns of
32-bit floating point values. 32-bit floating point values.
""") """)
cmd.set_defaults(handler=cmd_create) cmd.set_defaults(handler = cmd_create)
group = cmd.add_argument_group("Required arguments") group = cmd.add_argument_group("Required arguments")
group.add_argument("path", group.add_argument("path",
help="Path (in database) of new stream, e.g. /foo/bar", help="Path (in database) of new stream, e.g. /foo/bar")
).completer = self.complete.path
group.add_argument("layout", group.add_argument("layout",
help="Layout type for new stream, e.g. float32_8", help="Layout type for new stream, e.g. float32_8")
).completer = self.complete.layout
return cmd return cmd
def cmd_create(self): def cmd_create(self):
"""Create new stream""" """Create new stream"""
try: try:

View File

@ -1,52 +1,25 @@
import fnmatch from nilmdb.utils.printf import *
import nilmdb.client
from argparse import ArgumentDefaultsHelpFormatter as def_form from argparse import ArgumentDefaultsHelpFormatter as def_form
from nilmdb.utils.printf import printf
import nilmdb.client
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("destroy", help="Delete a stream and all data", cmd = sub.add_parser("destroy", help="Delete a stream and all data",
formatter_class=def_form, formatter_class = def_form,
description=""" description="""
Destroy the stream at the specified path. Destroy the stream at the specified path. All
The stream must be empty. All metadata data and metadata related to the stream is
related to the stream is permanently deleted. permanently deleted.
Wildcards and multiple paths are supported.
""") """)
cmd.set_defaults(handler=cmd_destroy) cmd.set_defaults(handler = cmd_destroy)
group = cmd.add_argument_group("Options")
group.add_argument("-R", "--remove", action="store_true",
help="Remove all data before destroying stream")
group.add_argument("-q", "--quiet", action="store_true",
help="Don't display names when destroying "
"multiple paths")
group = cmd.add_argument_group("Required arguments") group = cmd.add_argument_group("Required arguments")
group.add_argument("path", nargs='+', group.add_argument("path",
help="Path of the stream to delete, e.g. /foo/bar/*", help="Path of the stream to delete, e.g. /foo/bar")
).completer = self.complete.path
return cmd return cmd
def cmd_destroy(self): def cmd_destroy(self):
"""Destroy stream""" """Destroy stream"""
streams = [s[0] for s in self.client.stream_list()] try:
paths = [] self.client.stream_destroy(self.args.path)
for path in self.args.path: except nilmdb.client.ClientError as e:
new = fnmatch.filter(streams, path) self.die("error destroying stream: %s", str(e))
if not new:
self.die("error: no stream matched path: %s", path)
paths.extend(new)
for path in paths:
if not self.args.quiet and len(paths) > 1:
printf("Destroying %s\n", path)
try:
if self.args.remove:
self.client.stream_remove(path)
self.client.stream_destroy(path)
except nilmdb.client.ClientError as e:
self.die("error destroying stream: %s", str(e))

View File

@ -1,56 +1,41 @@
import sys from __future__ import print_function
from nilmdb.utils.printf import *
from nilmdb.utils.printf import printf
import nilmdb.client import nilmdb.client
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("extract", help="Extract data", cmd = sub.add_parser("extract", help="Extract data",
description=""" description="""
Extract data from a stream. Extract data from a stream.
""") """)
cmd.set_defaults(verify=cmd_extract_verify, cmd.set_defaults(verify = cmd_extract_verify,
handler=cmd_extract) handler = cmd_extract)
group = cmd.add_argument_group("Data selection") group = cmd.add_argument_group("Data selection")
group.add_argument("path", group.add_argument("path",
help="Path of stream, e.g. /foo/bar", help="Path of stream, e.g. /foo/bar")
).completer = self.complete.path
group.add_argument("-s", "--start", required=True, group.add_argument("-s", "--start", required=True,
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Starting timestamp (free-form, inclusive)", help="Starting timestamp (free-form, inclusive)")
).completer = self.complete.time
group.add_argument("-e", "--end", required=True, group.add_argument("-e", "--end", required=True,
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Ending timestamp (free-form, noninclusive)", help="Ending timestamp (free-form, noninclusive)")
).completer = self.complete.time
group = cmd.add_argument_group("Output format") group = cmd.add_argument_group("Output format")
group.add_argument("-B", "--binary", action="store_true",
help="Raw binary output")
group.add_argument("-b", "--bare", action="store_true", group.add_argument("-b", "--bare", action="store_true",
help="Exclude timestamps from output lines") help="Exclude timestamps from output lines")
group.add_argument("-a", "--annotate", action="store_true", group.add_argument("-a", "--annotate", action="store_true",
help="Include comments with some information " help="Include comments with some information "
"about the stream") "about the stream")
group.add_argument("-m", "--markup", action="store_true",
help="Include comments with interval starts and ends")
group.add_argument("-T", "--timestamp-raw", action="store_true", group.add_argument("-T", "--timestamp-raw", action="store_true",
help="Show raw timestamps in annotated information") help="Show raw timestamps in annotated information")
group.add_argument("-c", "--count", action="store_true", group.add_argument("-c", "--count", action="store_true",
help="Just output a count of matched data points") help="Just output a count of matched data points")
return cmd return cmd
def cmd_extract_verify(self): def cmd_extract_verify(self):
if self.args.start > self.args.end: if self.args.start is not None and self.args.end is not None:
self.parser.error("start is after end") if self.args.start > self.args.end:
self.parser.error("start is after end")
if self.args.binary:
if (self.args.bare or self.args.annotate or self.args.markup or
self.args.timestamp_raw or self.args.count):
self.parser.error("--binary cannot be combined with other options")
def cmd_extract(self): def cmd_extract(self):
streams = self.client.stream_list(self.args.path) streams = self.client.stream_list(self.args.path)
@ -59,9 +44,9 @@ def cmd_extract(self):
layout = streams[0][1] layout = streams[0][1]
if self.args.timestamp_raw: if self.args.timestamp_raw:
time_string = nilmdb.utils.time.timestamp_to_string time_string = nilmdb.utils.time.float_time_to_string
else: else:
time_string = nilmdb.utils.time.timestamp_to_human time_string = nilmdb.utils.time.format_time
if self.args.annotate: if self.args.annotate:
printf("# path: %s\n", self.args.path) printf("# path: %s\n", self.args.path)
@ -70,23 +55,15 @@ def cmd_extract(self):
printf("# end: %s\n", time_string(self.args.end)) printf("# end: %s\n", time_string(self.args.end))
printed = False printed = False
if self.args.binary:
printer = sys.stdout.buffer.write
else:
printer = lambda x: print(x.decode('utf-8'))
bare = self.args.bare
count = self.args.count
for dataline in self.client.stream_extract(self.args.path, for dataline in self.client.stream_extract(self.args.path,
self.args.start, self.args.start,
self.args.end, self.args.end,
self.args.count, self.args.count):
self.args.markup, if self.args.bare and not self.args.count:
self.args.binary):
if bare and not count:
# Strip timestamp (first element). Doesn't make sense # Strip timestamp (first element). Doesn't make sense
# if we are only returning a count. # if we are only returning a count.
dataline = b' '.join(dataline.split(b' ')[1:]) dataline = ' '.join(dataline.split(' ')[1:])
printer(dataline) print(dataline)
printed = True printed = True
if not printed: if not printed:
if self.args.annotate: if self.args.annotate:

View File

@ -1,5 +1,7 @@
import argparse from nilmdb.utils.printf import *
import argparse
import sys
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("help", help="Show detailed help for a command", cmd = sub.add_parser("help", help="Show detailed help for a command",
@ -7,15 +9,14 @@ def setup(self, sub):
Show help for a command. 'help command' is Show help for a command. 'help command' is
the same as 'command --help'. the same as 'command --help'.
""") """)
cmd.set_defaults(handler=cmd_help) cmd.set_defaults(handler = cmd_help)
cmd.set_defaults(no_test_connect=True) cmd.set_defaults(no_test_connect = True)
cmd.add_argument("command", nargs="?", cmd.add_argument("command", nargs="?",
help="Command to get help about") help="Command to get help about")
cmd.add_argument("rest", nargs=argparse.REMAINDER, cmd.add_argument("rest", nargs=argparse.REMAINDER,
help=argparse.SUPPRESS) help=argparse.SUPPRESS)
return cmd return cmd
def cmd_help(self): def cmd_help(self):
if self.args.command in self.subcmd: if self.args.command in self.subcmd:
self.subcmd[self.args.command].print_help() self.subcmd[self.args.command].print_help()

View File

@ -1,21 +1,19 @@
from argparse import ArgumentDefaultsHelpFormatter as def_form
import nilmdb.client import nilmdb.client
from nilmdb.utils.printf import printf from nilmdb.utils.printf import *
from nilmdb.utils import human_size from nilmdb.utils import human_size
from argparse import ArgumentDefaultsHelpFormatter as def_form
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("info", help="Server information", cmd = sub.add_parser("info", help="Server information",
formatter_class=def_form, formatter_class = def_form,
description=""" description="""
List information about the server, like List information about the server, like
version. version.
""") """)
cmd.set_defaults(handler=cmd_info) cmd.set_defaults(handler = cmd_info)
return cmd return cmd
def cmd_info(self): def cmd_info(self):
"""Print info about the server""" """Print info about the server"""
printf("Client version: %s\n", nilmdb.__version__) printf("Client version: %s\n", nilmdb.__version__)
@ -23,8 +21,5 @@ def cmd_info(self):
printf("Server URL: %s\n", self.client.geturl()) printf("Server URL: %s\n", self.client.geturl())
dbinfo = self.client.dbinfo() dbinfo = self.client.dbinfo()
printf("Server database path: %s\n", dbinfo["path"]) printf("Server database path: %s\n", dbinfo["path"])
for (desc, field) in [("used by NilmDB", "size"), printf("Server database size: %s\n", human_size(dbinfo["size"]))
("used by other", "other"), printf("Server database free space: %s\n", human_size(dbinfo["free"]))
("reserved", "reserved"),
("free", "free")]:
printf("Server disk space %s: %s\n", desc, human_size(dbinfo[field]))

View File

@ -1,18 +1,17 @@
import sys from nilmdb.utils.printf import *
from nilmdb.utils.printf import printf
import nilmdb.client import nilmdb.client
import nilmdb.utils.timestamper as timestamper import nilmdb.utils.timestamper as timestamper
import nilmdb.utils.time import nilmdb.utils.time
import sys
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("insert", help="Insert data", cmd = sub.add_parser("insert", help="Insert data",
description=""" description="""
Insert data into a stream. Insert data into a stream.
""") """)
cmd.set_defaults(verify=cmd_insert_verify, cmd.set_defaults(verify = cmd_insert_verify,
handler=cmd_insert) handler = cmd_insert)
cmd.add_argument("-q", "--quiet", action='store_true', cmd.add_argument("-q", "--quiet", action='store_true',
help='suppress unnecessary messages') help='suppress unnecessary messages')
@ -26,8 +25,7 @@ def setup(self, sub):
group.add_argument("-t", "--timestamp", action="store_true", group.add_argument("-t", "--timestamp", action="store_true",
help="Add timestamps to each line") help="Add timestamps to each line")
group.add_argument("-r", "--rate", type=float, group.add_argument("-r", "--rate", type=float,
help="Data rate, in Hz", help="Data rate, in Hz")
).completer = self.complete.rate
group = cmd.add_argument_group("Start time", group = cmd.add_argument_group("Start time",
description=""" description="""
@ -41,8 +39,7 @@ def setup(self, sub):
exc = group.add_mutually_exclusive_group() exc = group.add_mutually_exclusive_group()
exc.add_argument("-s", "--start", exc.add_argument("-s", "--start",
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Starting timestamp (free-form)", help="Starting timestamp (free-form)")
).completer = self.complete.time
exc.add_argument("-f", "--filename", action="store_true", exc.add_argument("-f", "--filename", action="store_true",
help="Use filename to determine start time") help="Use filename to determine start time")
@ -55,31 +52,26 @@ def setup(self, sub):
timezone.""") timezone.""")
group.add_argument("-e", "--end", group.add_argument("-e", "--end",
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Ending timestamp (free-form)", help="Ending timestamp (free-form)")
).completer = self.complete.time
group = cmd.add_argument_group("Required parameters") group = cmd.add_argument_group("Required parameters")
group.add_argument("path", group.add_argument("path",
help="Path of stream, e.g. /foo/bar", help="Path of stream, e.g. /foo/bar")
).completer = self.complete.path group.add_argument("file", nargs = '?', default='-',
group.add_argument("file", nargs='?', default='-',
help="File to insert (default: - (stdin))") help="File to insert (default: - (stdin))")
return cmd return cmd
def cmd_insert_verify(self): def cmd_insert_verify(self):
if self.args.timestamp: if self.args.timestamp:
if not self.args.rate: if not self.args.rate:
self.die("error: --rate is needed, but was not specified") self.die("error: --rate is needed, but was not specified")
if not self.args.filename and self.args.start is None: if not self.args.filename and self.args.start is None:
self.die("error: need --start or --filename " self.die("error: need --start or --filename when adding timestamps")
"when adding timestamps")
else: else:
if self.args.start is None or self.args.end is None: if self.args.start is None or self.args.end is None:
self.die("error: when not adding timestamps, --start and " self.die("error: when not adding timestamps, --start and "
"--end are required") "--end are required")
def cmd_insert(self): def cmd_insert(self):
# Find requested stream # Find requested stream
streams = self.client.stream_list(self.args.path) streams = self.client.stream_list(self.args.path)
@ -91,7 +83,7 @@ def cmd_insert(self):
try: try:
filename = arg.file filename = arg.file
if filename == '-': if filename == '-':
infile = sys.stdin.buffer infile = sys.stdin
else: else:
try: try:
infile = open(filename, "rb") infile = open(filename, "rb")
@ -100,7 +92,7 @@ def cmd_insert(self):
if arg.start is None: if arg.start is None:
try: try:
arg.start = nilmdb.utils.time.parse_time(filename) arg.start = nilmdb.utils.time.parse_time(filename).totimestamp()
except ValueError: except ValueError:
self.die("error extracting start time from filename '%s'", self.die("error extracting start time from filename '%s'",
filename) filename)
@ -108,16 +100,16 @@ def cmd_insert(self):
if arg.timestamp: if arg.timestamp:
data = timestamper.TimestamperRate(infile, arg.start, arg.rate) data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
else: else:
data = iter(lambda: infile.read(1048576), b'') data = iter(lambda: infile.read(1048576), '')
# Print info # Print info
if not arg.quiet: if not arg.quiet:
printf(" Input file: %s\n", filename) printf(" Input file: %s\n", filename)
printf(" Start time: %s\n", printf(" Start time: %s\n",
nilmdb.utils.time.timestamp_to_human(arg.start)) nilmdb.utils.time.format_time(arg.start))
if arg.end: if arg.end:
printf(" End time: %s\n", printf(" End time: %s\n",
nilmdb.utils.time.timestamp_to_human(arg.end)) nilmdb.utils.time.format_time(arg.end))
if arg.timestamp: if arg.timestamp:
printf("Timestamper: %s\n", str(data)) printf("Timestamper: %s\n", str(data))

View File

@ -1,76 +0,0 @@
from argparse import ArgumentDefaultsHelpFormatter as def_form
from nilmdb.utils.printf import printf
import nilmdb.utils.time
from nilmdb.utils.interval import Interval
def setup(self, sub):
cmd = sub.add_parser("intervals", help="List intervals",
formatter_class=def_form,
description="""
List intervals in a stream, similar to
'list --detail path'.
If '--diff diffpath' is provided, only
interval ranges that are present in 'path'
and not present in 'diffpath' are printed.
""")
cmd.set_defaults(verify=cmd_intervals_verify,
handler=cmd_intervals)
group = cmd.add_argument_group("Stream selection")
group.add_argument("path", metavar="PATH",
help="List intervals for this path",
).completer = self.complete.path
group.add_argument("-d", "--diff", metavar="PATH",
help="Subtract intervals from this path",
).completer = self.complete.path
group = cmd.add_argument_group("Interval details")
group.add_argument("-s", "--start",
metavar="TIME", type=self.arg_time,
help="Starting timestamp for intervals "
"(free-form, inclusive)",
).completer = self.complete.time
group.add_argument("-e", "--end",
metavar="TIME", type=self.arg_time,
help="Ending timestamp for intervals "
"(free-form, noninclusive)",
).completer = self.complete.time
group = cmd.add_argument_group("Misc options")
group.add_argument("-T", "--timestamp-raw", action="store_true",
help="Show raw timestamps when printing times")
group.add_argument("-o", "--optimize", action="store_true",
help="Optimize (merge adjacent) intervals")
return cmd
def cmd_intervals_verify(self):
if self.args.start is not None and self.args.end is not None:
if self.args.start >= self.args.end:
self.parser.error("start must precede end")
def cmd_intervals(self):
"""List intervals in a stream"""
if self.args.timestamp_raw:
time_string = nilmdb.utils.time.timestamp_to_string
else:
time_string = nilmdb.utils.time.timestamp_to_human
try:
intervals = (Interval(start, end) for (start, end) in
self.client.stream_intervals(self.args.path,
self.args.start,
self.args.end,
self.args.diff))
if self.args.optimize:
intervals = nilmdb.utils.interval.optimize(intervals)
for i in intervals:
printf("[ %s -> %s ]\n", time_string(i.start), time_string(i.end))
except nilmdb.client.ClientError as e:
self.die("error listing intervals: %s", str(e))

View File

@ -1,25 +1,28 @@
import fnmatch from nilmdb.utils.printf import *
from argparse import ArgumentDefaultsHelpFormatter as def_form
from nilmdb.utils.printf import printf
import nilmdb.utils.time import nilmdb.utils.time
import fnmatch
import argparse
from argparse import ArgumentDefaultsHelpFormatter as def_form
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("list", help="List streams", cmd = sub.add_parser("list", help="List streams",
formatter_class=def_form, formatter_class = def_form,
description=""" description="""
List streams available in the database, List streams available in the database,
optionally filtering by path. Wildcards optionally filtering by layout or path. Wildcards
are accepted; non-matching paths or wildcards are accepted.
are ignored.
""") """)
cmd.set_defaults(verify=cmd_list_verify, cmd.set_defaults(verify = cmd_list_verify,
handler=cmd_list) handler = cmd_list)
group = cmd.add_argument_group("Stream filtering") group = cmd.add_argument_group("Stream filtering")
group.add_argument("path", metavar="PATH", default=["*"], nargs='*', group.add_argument("-p", "--path", metavar="PATH", default="*",
).completer = self.complete.path help="Match only this path (-p can be omitted)")
group.add_argument("path_positional", default="*",
nargs="?", help=argparse.SUPPRESS)
group.add_argument("-l", "--layout", default="*",
help="Match only this stream layout")
group = cmd.add_argument_group("Interval info") group = cmd.add_argument_group("Interval info")
group.add_argument("-E", "--ext", action="store_true", group.add_argument("-E", "--ext", action="store_true",
@ -32,74 +35,68 @@ def setup(self, sub):
group.add_argument("-s", "--start", group.add_argument("-s", "--start",
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Starting timestamp for intervals " help="Starting timestamp for intervals "
"(free-form, inclusive)", "(free-form, inclusive)")
).completer = self.complete.time
group.add_argument("-e", "--end", group.add_argument("-e", "--end",
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Ending timestamp for intervals " help="Ending timestamp for intervals "
"(free-form, noninclusive)", "(free-form, noninclusive)")
).completer = self.complete.time
group = cmd.add_argument_group("Misc options") group = cmd.add_argument_group("Misc options")
group.add_argument("-T", "--timestamp-raw", action="store_true", group.add_argument("-T", "--timestamp-raw", action="store_true",
help="Show raw timestamps when printing times") help="Show raw timestamps when printing times")
group.add_argument("-l", "--layout", action="store_true",
help="Show layout type next to path name")
group.add_argument("-n", "--no-decim", action="store_true",
help="Skip paths containing \"~decim-\"")
return cmd return cmd
def cmd_list_verify(self): def cmd_list_verify(self):
# A hidden "path_positional" argument lets the user leave off the
# "-p" when specifying the path. Handle it here.
got_opt = self.args.path != "*"
got_pos = self.args.path_positional != "*"
if got_pos:
if got_opt:
self.parser.error("too many paths specified")
else:
self.args.path = self.args.path_positional
if self.args.start is not None and self.args.end is not None: if self.args.start is not None and self.args.end is not None:
if self.args.start >= self.args.end: if self.args.start >= self.args.end:
self.parser.error("start must precede end") self.parser.error("start must precede end")
if self.args.start is not None or self.args.end is not None: if self.args.start is not None or self.args.end is not None:
if not self.args.detail: if not self.args.detail:
self.parser.error("--start and --end only make sense " self.parser.error("--start and --end only make sense with --detail")
"with --detail")
def cmd_list(self): def cmd_list(self):
"""List available streams""" """List available streams"""
streams = self.client.stream_list(extended=True) streams = self.client.stream_list(extended = True)
if self.args.timestamp_raw: if self.args.timestamp_raw:
time_string = nilmdb.utils.time.timestamp_to_string time_string = nilmdb.utils.time.float_time_to_string
else: else:
time_string = nilmdb.utils.time.timestamp_to_human time_string = nilmdb.utils.time.format_time
for argpath in self.args.path: for stream in streams:
for stream in streams: (path, layout, int_min, int_max, rows, seconds) = stream[:6]
(path, layout, int_min, int_max, rows, time) = stream[:6] if not (fnmatch.fnmatch(path, self.args.path) and
if not fnmatch.fnmatch(path, argpath): fnmatch.fnmatch(layout, self.args.layout)):
continue continue
if self.args.no_decim and "~decim-" in path:
continue
if self.args.layout: printf("%s %s\n", path, layout)
printf("%s %s\n", path, layout)
if self.args.ext:
if int_min is None or int_max is None:
printf(" interval extents: (no data)\n")
else: else:
printf("%s\n", path) printf(" interval extents: %s -> %s\n",
time_string(int_min), time_string(int_max))
printf(" total data: %d rows, %.6f seconds\n",
rows or 0, seconds or 0);
if self.args.ext: if self.args.detail:
if int_min is None or int_max is None: printed = False
printf(" interval extents: (no data)\n") for (start, end) in self.client.stream_intervals(
else: path, self.args.start, self.args.end):
printf(" interval extents: %s -> %s\n", printf(" [ %s -> %s ]\n", time_string(start), time_string(end))
time_string(int_min), time_string(int_max)) printed = True
printf(" total data: %d rows, %.6f seconds\n", if not printed:
rows or 0, printf(" (no intervals)\n")
nilmdb.utils.time.timestamp_to_seconds(time or 0))
if self.args.detail:
printed = False
for (start, end) in self.client.stream_intervals(
path, self.args.start, self.args.end):
printf(" [ %s -> %s ]\n",
time_string(start), time_string(end))
printed = True
if not printed:
printf(" (no intervals)\n")

View File

@ -1,8 +1,7 @@
from nilmdb.utils.printf import printf from nilmdb.utils.printf import *
import nilmdb import nilmdb
import nilmdb.client import nilmdb.client
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("metadata", help="Get or set stream metadata", cmd = sub.add_parser("metadata", help="Get or set stream metadata",
description=""" description="""
@ -10,34 +9,25 @@ def setup(self, sub):
a stream. a stream.
""", """,
usage="%(prog)s path [-g [key ...] | " usage="%(prog)s path [-g [key ...] | "
"-s key=value [...] | -u key=value [...]] | " "-s key=value [...] | -u key=value [...]]")
"-d [key ...]") cmd.set_defaults(handler = cmd_metadata)
cmd.set_defaults(handler=cmd_metadata)
group = cmd.add_argument_group("Required arguments") group = cmd.add_argument_group("Required arguments")
group.add_argument("path", group.add_argument("path",
help="Path of stream, e.g. /foo/bar", help="Path of stream, e.g. /foo/bar")
).completer = self.complete.path
group = cmd.add_argument_group("Actions") group = cmd.add_argument_group("Actions")
exc = group.add_mutually_exclusive_group() exc = group.add_mutually_exclusive_group()
exc.add_argument("-g", "--get", nargs="*", metavar="key", exc.add_argument("-g", "--get", nargs="*", metavar="key",
help="Get metadata for specified keys (default all)", help="Get metadata for specified keys (default all)")
).completer = self.complete.meta_key
exc.add_argument("-s", "--set", nargs="+", metavar="key=value", exc.add_argument("-s", "--set", nargs="+", metavar="key=value",
help="Replace all metadata with provided " help="Replace all metadata with provided "
"key=value pairs", "key=value pairs")
).completer = self.complete.meta_keyval
exc.add_argument("-u", "--update", nargs="+", metavar="key=value", exc.add_argument("-u", "--update", nargs="+", metavar="key=value",
help="Update metadata using provided " help="Update metadata using provided "
"key=value pairs", "key=value pairs")
).completer = self.complete.meta_keyval
exc.add_argument("-d", "--delete", nargs="*", metavar="key",
help="Delete metadata for specified keys (default all)",
).completer = self.complete.meta_key
return cmd return cmd
def cmd_metadata(self): def cmd_metadata(self):
"""Manipulate metadata""" """Manipulate metadata"""
if self.args.set is not None or self.args.update is not None: if self.args.set is not None or self.args.update is not None:
@ -62,29 +52,15 @@ def cmd_metadata(self):
handler(self.args.path, data) handler(self.args.path, data)
except nilmdb.client.ClientError as e: except nilmdb.client.ClientError as e:
self.die("error setting/updating metadata: %s", str(e)) self.die("error setting/updating metadata: %s", str(e))
elif self.args.delete is not None:
# Delete (by setting values to empty strings)
keys = None
if self.args.delete:
keys = list(self.args.delete)
try:
data = self.client.stream_get_metadata(self.args.path, keys)
for key in data:
data[key] = ""
self.client.stream_update_metadata(self.args.path, data)
except nilmdb.client.ClientError as e:
self.die("error deleting metadata: %s", str(e))
else: else:
# Get (or unspecified) # Get (or unspecified)
keys = None keys = self.args.get or None
if self.args.get:
keys = list(self.args.get)
try: try:
data = self.client.stream_get_metadata(self.args.path, keys) data = self.client.stream_get_metadata(self.args.path, keys)
except nilmdb.client.ClientError as e: except nilmdb.client.ClientError as e:
self.die("error getting metadata: %s", str(e)) self.die("error getting metadata: %s", str(e))
for key, value in sorted(data.items()): for key, value in sorted(data.items()):
# Print nonexistant keys as having empty value # Omit nonexistant keys
if value is None: if value is None:
value = "" value = ""
printf("%s=%s\n", key, value) printf("%s=%s\n", key, value)

View File

@ -1,59 +1,37 @@
import fnmatch from nilmdb.utils.printf import *
from nilmdb.utils.printf import printf
import nilmdb.client import nilmdb.client
def setup(self, sub): def setup(self, sub):
cmd = sub.add_parser("remove", help="Remove data", cmd = sub.add_parser("remove", help="Remove data",
description=""" description="""
Remove all data from a specified time range within a Remove all data from a specified time range within a
stream. If multiple streams or wildcards are stream.
provided, the same time range is removed from all
streams.
""") """)
cmd.set_defaults(handler=cmd_remove) cmd.set_defaults(handler = cmd_remove)
group = cmd.add_argument_group("Data selection") group = cmd.add_argument_group("Data selection")
group.add_argument("path", nargs='+', group.add_argument("path",
help="Path of stream, e.g. /foo/bar/*", help="Path of stream, e.g. /foo/bar")
).completer = self.complete.path
group.add_argument("-s", "--start", required=True, group.add_argument("-s", "--start", required=True,
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Starting timestamp (free-form, inclusive)", help="Starting timestamp (free-form, inclusive)")
).completer = self.complete.time
group.add_argument("-e", "--end", required=True, group.add_argument("-e", "--end", required=True,
metavar="TIME", type=self.arg_time, metavar="TIME", type=self.arg_time,
help="Ending timestamp (free-form, noninclusive)", help="Ending timestamp (free-form, noninclusive)")
).completer = self.complete.time
group = cmd.add_argument_group("Output format") group = cmd.add_argument_group("Output format")
group.add_argument("-q", "--quiet", action="store_true",
help="Don't display names when removing "
"from multiple paths")
group.add_argument("-c", "--count", action="store_true", group.add_argument("-c", "--count", action="store_true",
help="Output number of data points removed") help="Output number of data points removed")
return cmd return cmd
def cmd_remove(self): def cmd_remove(self):
streams = [s[0] for s in self.client.stream_list()]
paths = []
for path in self.args.path:
new = fnmatch.filter(streams, path)
if not new:
self.die("error: no stream matched path: %s", path)
paths.extend(new)
try: try:
for path in paths: count = self.client.stream_remove(self.args.path,
if not self.args.quiet and len(paths) > 1: self.args.start, self.args.end)
printf("Removing from %s\n", path)
count = self.client.stream_remove(path,
self.args.start, self.args.end)
if self.args.count:
printf("%d\n", count)
except nilmdb.client.ClientError as e: except nilmdb.client.ClientError as e:
self.die("error removing data: %s", str(e)) self.die("error removing data: %s", str(e))
if self.args.count:
printf("%d\n", count)
return 0 return 0

View File

@ -1,32 +0,0 @@
from argparse import ArgumentDefaultsHelpFormatter as def_form
import nilmdb.client
def setup(self, sub):
cmd = sub.add_parser("rename", help="Rename a stream",
formatter_class=def_form,
description="""
Rename a stream.
Only the stream's path is renamed; no
metadata is changed.
""")
cmd.set_defaults(handler=cmd_rename)
group = cmd.add_argument_group("Required arguments")
group.add_argument("oldpath",
help="Old path, e.g. /foo/old",
).completer = self.complete.path
group.add_argument("newpath",
help="New path, e.g. /foo/bar/new",
).completer = self.complete.path
return cmd
def cmd_rename(self):
"""Rename a stream"""
try:
self.client.stream_rename(self.args.oldpath, self.args.newpath)
except nilmdb.client.ClientError as e:
self.die("error renaming stream: %s", str(e))

View File

@ -1,3 +0,0 @@
"""nilmdb.fsck"""
from nilmdb.fsck.fsck import Fsck

View File

@ -1,610 +0,0 @@
# -*- coding: utf-8 -*-
"""Check database consistency, with some ability to fix problems.
This should be able to fix cases where a database gets corrupted due
to unexpected system shutdown, and detect other cases that may cause
NilmDB to return errors when trying to manipulate the database."""
import nilmdb.utils
import nilmdb.server
import nilmdb.client.numpyclient
from nilmdb.utils.interval import IntervalError
from nilmdb.server.interval import Interval, IntervalSet
from nilmdb.utils.printf import printf, fprintf, sprintf
from collections import defaultdict
import sqlite3
import os
import sys
import progressbar
import re
import shutil
import pickle
import numpy
class FsckError(Exception):
def __init__(self, msg="", *args):
if args:
msg = sprintf(msg, *args)
Exception.__init__(self, msg)
class FixableFsckError(FsckError):
def __init__(self, msg=""):
FsckError.__init__(self, f'{msg}\nThis may be fixable with "--fix".')
class RetryFsck(FsckError):
pass
class FsckFormatError(FsckError):
pass
def log(format, *args):
printf(format, *args)
def err(format, *args):
fprintf(sys.stderr, format, *args)
# Decorator that retries a function if it returns a specific value
def retry_if_raised(exc, message=None, max_retries=1000):
def f1(func):
def f2(*args, **kwargs):
for n in range(max_retries):
try:
return func(*args, **kwargs)
except exc:
if message:
log(f"{message} ({n+1})\n\n")
raise Exception("Max number of retries (%d) exceeded; giving up" %
max_retries)
return f2
return f1
class Progress(object):
def __init__(self, maxval):
if maxval == 0:
maxval = 1
self.bar = progressbar.ProgressBar(
maxval=maxval,
widgets=[progressbar.Percentage(), ' ',
progressbar.Bar(), ' ',
progressbar.ETA()])
self.bar.term_width = self.bar.term_width or 75
def __enter__(self):
self.bar.start()
self.last_update = 0
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.bar.finish()
else:
printf("\n")
def update(self, val):
self.bar.update(val)
class Fsck(object):
def __init__(self, path, fix=False):
self.basepath = path
self.sqlpath = os.path.join(path, "data.sql")
self.bulkpath = os.path.join(path, "data")
self.bulklock = os.path.join(path, "data.lock")
self.fix = fix
### Main checks
@retry_if_raised(RetryFsck, "Something was fixed: restarting fsck")
def check(self, skip_data=False):
self.bulk = None
self.sql = None
try:
self.check_paths()
self.check_sql()
self.check_streams()
self.check_intervals()
if skip_data:
log("skipped data check\n")
else:
self.check_data()
finally:
if self.bulk:
self.bulk.close()
if self.sql: # pragma: no cover
# (coverage doesn't handle finally clauses correctly;
# both branches here are tested)
self.sql.commit()
self.sql.close()
log("ok\n")
### Check basic path structure
def check_paths(self):
log("checking paths\n")
if self.bulk:
self.bulk.close()
if not os.path.isfile(self.sqlpath):
raise FsckError("SQL database missing (%s)", self.sqlpath)
if not os.path.isdir(self.bulkpath):
raise FsckError("Bulk data directory missing (%s)", self.bulkpath)
with open(self.bulklock, "w") as lockfile:
if not nilmdb.utils.lock.exclusive_lock(lockfile):
raise FsckError('Database already locked by another process\n'
'Make sure all other processes that might be '
'using the database are stopped.\n'
'Restarting apache will cause it to unlock '
'the db until a request is received.')
# unlocked immediately
self.bulk = nilmdb.server.bulkdata.BulkData(self.basepath)
### Check SQL database health
def check_sql(self):
log("checking sqlite database\n")
self.sql = sqlite3.connect(self.sqlpath)
with self.sql:
cur = self.sql.cursor()
ver = cur.execute("PRAGMA user_version").fetchone()[0]
good = max(nilmdb.server.nilmdb._sql_schema_updates.keys())
if ver != good:
raise FsckError("database version %d too old, should be %d",
ver, good)
self.stream_path = {}
self.stream_layout = {}
log(" loading paths\n")
result = cur.execute("SELECT id, path, layout FROM streams")
for r in result:
if r[0] in self.stream_path:
raise FsckError("duplicated ID %d in stream IDs", r[0])
self.stream_path[r[0]] = r[1]
self.stream_layout[r[0]] = r[2]
log(" loading intervals\n")
self.stream_interval = defaultdict(list)
result = cur.execute("SELECT stream_id, start_time, end_time, "
"start_pos, end_pos FROM ranges "
"ORDER BY start_time")
for r in result:
if r[0] not in self.stream_path:
raise FsckError("interval ID %d not in streams", r[0])
self.stream_interval[r[0]].append((r[1], r[2], r[3], r[4]))
log(" loading metadata\n")
self.stream_meta = defaultdict(dict)
result = cur.execute("SELECT stream_id, key, value FROM metadata")
for r in result:
if r[0] not in self.stream_path:
raise FsckError("metadata ID %d not in streams", r[0])
if r[1] in self.stream_meta[r[0]]:
raise FsckError(
"duplicate metadata key '%s' for stream %d",
r[1], r[0])
self.stream_meta[r[0]][r[1]] = r[2]
### Check streams and basic interval overlap
def check_streams(self):
ids = list(self.stream_path.keys())
log("checking %s streams\n", "{:,d}".format(len(ids)))
with Progress(len(ids)) as pbar:
for i, sid in enumerate(ids):
pbar.update(i)
path = self.stream_path[sid]
# unique path, valid layout
if list(self.stream_path.values()).count(path) != 1:
raise FsckError("duplicated path %s", path)
layout = self.stream_layout[sid].split('_')[0]
if layout not in ('int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64',
'float32', 'float64'):
raise FsckError("bad layout %s for %s", layout, path)
count = int(self.stream_layout[sid].split('_')[1])
if count < 1 or count > 1024:
raise FsckError("bad count %d for %s", count, path)
# must exist in bulkdata
bulk = self.bulkpath + path
bulk = bulk.encode('utf-8')
if not os.path.isdir(bulk):
raise FsckError("%s: missing bulkdata dir", path)
if not nilmdb.server.bulkdata.Table.exists(bulk):
raise FsckError("%s: bad bulkdata table", path)
# intervals don't overlap. Abuse IntervalSet to check
# for intervals in file positions, too.
timeiset = IntervalSet()
posiset = IntervalSet()
for (stime, etime, spos, epos) in self.stream_interval[sid]:
new = Interval(stime, etime)
try:
timeiset += new
except IntervalError:
raise FsckError("%s: overlap in intervals:\n"
"set: %s\nnew: %s",
path, str(timeiset), str(new))
if spos != epos:
new = Interval(spos, epos)
try:
posiset += new
except IntervalError:
self.fix_row_overlap(sid, path, posiset, new)
try:
# Check bulkdata
self.check_bulkdata(sid, path, bulk)
# Check that we can open bulkdata
tab = nilmdb.server.bulkdata.Table(bulk)
except FsckFormatError:
# If there are no files except _format, try deleting
# the entire stream; this may remove metadata, but
# it's probably unimportant.
files = list(os.listdir(bulk))
if len(files) > 1:
raise FsckFormatError(f"{path}: can't load _format, "
f"but data is also present")
# Since the stream was empty, just remove it
self.fix_remove_stream(sid, path, bulk,
"empty, with corrupted format file")
except FsckError as e:
raise e
except Exception as e: # pragma: no cover
# No coverage because this is an unknown/unexpected error
raise FsckError("%s: can't open bulkdata: %s",
path, str(e))
tab.close()
def fix_row_overlap(self, sid, path, existing, new):
# If the file rows (spos, epos) overlap in the interval table,
# and the overlapping ranges look like this:
# A --------- C
# B -------- D
# Then we can try changing the first interval to go from
# A to B instead.
msg = (f"{path}: overlap in file offsets:\n"
f"existing ranges: {existing}\n"
f"overlapping interval: {new}")
if not self.fix:
raise FixableFsckError(msg)
err(f"\n{msg}\nSeeing if we can truncate one of them...\n")
# See if there'e exactly one interval that overlaps the
# conflicting one in the right way
match = None
for intv in self.stream_interval[sid]:
(stime, etime, spos, epos) = intv
if spos < new.start and epos > new.start:
if match:
err(f"no, more than one interval matched:\n"
f"{intv}\n{match}\n")
raise FsckError(f"{path}: unfixable overlap")
match = intv
if match is None:
err("no intervals overlapped in the right way\n")
raise FsckError(f"{path}: unfixable overlap")
# Truncate the file position
err(f"truncating {match}\n")
with self.sql:
cur = self.sql.cursor()
cur.execute("UPDATE ranges SET end_pos=? "
"WHERE stream_id=? AND start_time=? AND "
"end_time=? AND start_pos=? AND end_pos=?",
(new.start, sid, *match))
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
raise FsckError("failed to fix SQL database")
raise RetryFsck
### Check that bulkdata is good enough to be opened
@retry_if_raised(RetryFsck)
def check_bulkdata(self, sid, path, bulk):
try:
with open(os.path.join(bulk, b"_format"), "rb") as f:
fmt = pickle.load(f)
except Exception as e:
raise FsckFormatError(f"{path}: can't load _format file ({e})")
if fmt["version"] != 3:
raise FsckFormatError("%s: bad or unsupported bulkdata version %d",
path, fmt["version"])
rows_per_file = int(fmt["rows_per_file"])
if rows_per_file < 1:
raise FsckFormatError(f"{path}: bad rows_per_file {rows_per_file}")
files_per_dir = int(fmt["files_per_dir"])
if files_per_dir < 1:
raise FsckFormatError(f"{path}: bad files_per_dir {files_per_dir}")
layout = fmt["layout"]
if layout != self.stream_layout[sid]:
raise FsckFormatError("%s: layout mismatch %s != %s", path,
layout, self.stream_layout[sid])
# Every file should have a size that's the multiple of the row size
rkt = nilmdb.server.rocket.Rocket(layout, None)
row_size = rkt.binary_size
rkt.close()
# Find all directories
regex = re.compile(b"^[0-9a-f]{4,}$")
subdirs = sorted(filter(regex.search, os.listdir(bulk)),
key=lambda x: int(x, 16), reverse=True)
for subdir in subdirs:
# Find all files in that dir
subpath = os.path.join(bulk, subdir)
files = list(filter(regex.search, os.listdir(subpath)))
if not files:
self.fix_empty_subdir(subpath)
# Verify that their size is a multiple of the row size
for filename in files:
filepath = os.path.join(subpath, filename)
offset = os.path.getsize(filepath)
if offset % row_size:
self.fix_bad_filesize(path, filepath, offset, row_size)
def fix_empty_subdir(self, subpath):
msg = sprintf("bulkdata path %s is missing data files", subpath)
if not self.fix:
raise FixableFsckError(msg)
# Try to fix it by just deleting whatever is present,
# as long as it's only ".removed" files.
err("\n%s\n", msg)
for fn in os.listdir(subpath):
if not fn.endswith(b".removed"):
raise FsckError("can't fix automatically: please manually "
"remove the file '%s' and try again",
os.path.join(subpath, fn).decode(
'utf-8', errors='backslashreplace'))
# Remove the whole thing
err("Removing empty subpath\n")
shutil.rmtree(subpath)
raise RetryFsck
def fix_bad_filesize(self, path, filepath, offset, row_size):
extra = offset % row_size
msg = sprintf("%s: size of file %s (%d) is not a multiple" +
" of row size (%d): %d extra bytes present",
path, filepath, offset, row_size, extra)
if not self.fix:
raise FixableFsckError(msg)
# Try to fix it by just truncating the file
err("\n%s\n", msg)
newsize = offset - extra
err("Truncating file to %d bytes and retrying\n", newsize)
with open(filepath, "r+b") as f:
f.truncate(newsize)
raise RetryFsck
def fix_remove_stream(self, sid, path, bulk, reason):
msg = f"stream {path} is corrupted: {reason}"
if not self.fix:
raise FixableFsckError(msg)
# Remove the stream from disk and the database
err(f"\n{msg}\n")
err(f"Removing stream {path} from disk and database\n")
shutil.rmtree(bulk)
with self.sql:
cur = self.sql.cursor()
cur.execute("DELETE FROM streams WHERE id=?",
(sid,))
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
raise FsckError("failed to remove stream")
cur.execute("DELETE FROM ranges WHERE stream_id=?", (sid,))
cur.execute("DELETE FROM metadata WHERE stream_id=?", (sid,))
raise RetryFsck
### Check interval endpoints
def check_intervals(self):
total_ints = sum(len(x) for x in list(self.stream_interval.values()))
log("checking %s intervals\n", "{:,d}".format(total_ints))
done = 0
with Progress(total_ints) as pbar:
for sid in self.stream_interval:
try:
bulk = self.bulkpath + self.stream_path[sid]
bulk = bulk.encode('utf-8')
tab = nilmdb.server.bulkdata.Table(bulk)
def update(x):
pbar.update(done + x)
ints = self.stream_interval[sid]
done += self.check_table_intervals(sid, ints, tab, update)
finally:
tab.close()
def check_table_intervals(self, sid, ints, tab, update):
# look in the table to make sure we can pick out the interval's
# endpoints
path = self.stream_path[sid] # noqa: F841 unused
tab.file_open.cache_remove_all()
for (i, intv) in enumerate(ints):
update(i)
(stime, etime, spos, epos) = intv
if spos == epos and spos >= 0 and spos <= tab.nrows:
continue
try:
srow = tab[spos] # noqa: F841 unused
erow = tab[epos-1] # noqa: F841 unused
except Exception as e:
self.fix_bad_interval(sid, intv, tab, str(e))
return len(ints)
def fix_bad_interval(self, sid, intv, tab, msg):
path = self.stream_path[sid]
msg = sprintf("%s: interval %s error accessing rows: %s",
path, str(intv), str(msg))
if not self.fix:
raise FixableFsckError(msg)
err("\n%s\n", msg)
(stime, etime, spos, epos) = intv
# If it's just that the end pos is more than the number of rows
# in the table, lower end pos and truncate interval time too.
if spos < tab.nrows and epos >= tab.nrows:
err("end position is past endrows, but it can be truncated\n")
err("old end: time %d, pos %d\n", etime, epos)
new_epos = tab.nrows
new_etime = tab[new_epos-1] + 1
err("new end: time %d, pos %d\n", new_etime, new_epos)
if stime < new_etime:
# Change it in SQL
with self.sql:
cur = self.sql.cursor()
cur.execute("UPDATE ranges SET end_time=?, end_pos=? "
"WHERE stream_id=? AND start_time=? AND "
"end_time=? AND start_pos=? AND end_pos=?",
(new_etime, new_epos, sid, stime, etime,
spos, epos))
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
raise FsckError("failed to fix SQL database")
raise RetryFsck
err("actually it can't be truncated; times are bad too\n")
# Otherwise, the only hope is to delete the interval entirely.
err("*** Deleting the entire interval from SQL.\n")
err("This may leave stale data on disk. To fix that, copy all "
"data from this stream to a new stream using nilm-copy, then\n")
err("remove all data from and destroy %s.\n", path)
with self.sql:
cur = self.sql.cursor()
cur.execute("DELETE FROM ranges WHERE "
"stream_id=? AND start_time=? AND "
"end_time=? AND start_pos=? AND end_pos=?",
(sid, stime, etime, spos, epos))
if cur.rowcount != 1: # pragma: no cover (shouldn't fail)
raise FsckError("failed to remove interval")
raise RetryFsck
### Check data in each interval
def check_data(self):
total_rows = sum(sum((y[3] - y[2]) for y in x)
for x in list(self.stream_interval.values()))
log("checking %s rows of data\n", "{:,d}".format(total_rows))
done = 0
with Progress(total_rows) as pbar:
for sid in self.stream_interval:
try:
bulk = self.bulkpath + self.stream_path[sid]
bulk = bulk.encode('utf-8')
tab = nilmdb.server.bulkdata.Table(bulk)
def update(x):
pbar.update(done + x)
ints = self.stream_interval[sid]
done += self.check_table_data(sid, ints, tab, update)
finally:
tab.close()
def check_table_data(self, sid, ints, tab, update):
# Pull out all of the interval's data and verify that it's
# monotonic.
maxrows = getattr(self, 'maxrows_override', 100000)
path = self.stream_path[sid]
layout = self.stream_layout[sid]
dtype = nilmdb.client.numpyclient.layout_to_dtype(layout)
tab.file_open.cache_remove_all()
done = 0
for intv in ints:
last_ts = None
(stime, etime, spos, epos) = intv
# Break interval into maxrows-sized chunks
next_start = spos
while next_start < epos:
start = next_start
stop = min(start + maxrows, epos)
count = stop - start
next_start = stop
# Get raw data, convert to NumPy arary
try:
raw = tab.get_data(start, stop, binary=True)
data = numpy.frombuffer(raw, dtype)
except Exception as e: # pragma: no cover
# No coverage because it's hard to trigger this -- earlier
# checks check the ranges, so this would probably be a real
# disk error, malloc failure, etc.
raise FsckError(
"%s: failed to grab rows %d through %d: %s",
path, start, stop, repr(e))
ts = data['timestamp']
# Verify that all timestamps are in range.
match = (ts < stime) | (ts >= etime)
if match.any():
row = numpy.argmax(match)
if ts[row] != 0:
raise FsckError("%s: data timestamp %d at row %d "
"outside interval range [%d,%d)",
path, ts[row], row + start,
stime, etime)
# Timestamp is zero and out of the expected range;
# assume file ends with zeroed data and just truncate it.
self.fix_table_by_truncating(
path, tab, row + start,
"data timestamp is out of range, and zero")
# Verify that timestamps are monotonic
match = numpy.diff(ts) <= 0
if match.any():
row = numpy.argmax(match)
if ts[row+1] != 0:
raise FsckError(
"%s: non-monotonic timestamp (%d -> %d) "
"at row %d", path, ts[row], ts[row+1],
row + start)
# Timestamp is zero and non-monotonic;
# assume file ends with zeroed data and just truncate it.
self.fix_table_by_truncating(
path, tab, row + start + 1,
"data timestamp is non-monotonic, and zero")
first_ts = ts[0]
if last_ts is not None and first_ts <= last_ts:
raise FsckError("%s: first interval timestamp %d is not "
"greater than the previous last interval "
"timestamp %d, at row %d",
path, first_ts, last_ts, start)
last_ts = ts[-1]
# The previous errors are fixable, by removing the
# offending intervals, or changing the data
# timestamps. But these are probably unlikely errors,
# so it's not worth implementing that yet.
# Done
done += count
update(done)
return done
def fix_table_by_truncating(self, path, tab, row, reason):
# Simple fix for bad data: truncate the table at the given row.
# On retry, fix_bad_interval will correct the database and timestamps
# to account for this truncation.
msg = f"{path}: bad data in table, starting at row {row}: {reason}"
if not self.fix:
raise FixableFsckError(msg)
err(f"\n{msg}\nWill try truncating table\n")
(subdir, fname, offs, count) = tab._offset_from_row(row)
tab._remove_or_truncate_file(subdir, fname, offs)
raise RetryFsck

View File

@ -1,27 +0,0 @@
#!/usr/bin/env python3
import nilmdb.fsck
import argparse
def main():
"""Main entry point for the 'nilmdb-fsck' command line script"""
parser = argparse.ArgumentParser(
description='Check database consistency',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", action="version",
version=nilmdb.__version__)
parser.add_argument("-f", "--fix", action="store_true",
default=False, help='Fix errors when possible '
'(which may involve removing data)')
parser.add_argument("-n", "--no-data", action="store_true",
default=False, help='Skip the slow full-data check')
parser.add_argument('database', help='Database directory')
args = parser.parse_args()
nilmdb.fsck.Fsck(args.database, args.fix).check(skip_data=args.no_data)
if __name__ == "__main__":
main()

View File

@ -1,43 +1,38 @@
#!/usr/bin/env python3 #!/usr/bin/python
import os
import sys
import socket
import argparse
import cherrypy
import nilmdb.server import nilmdb.server
import argparse
import os
import socket
def main(): def main():
"""Main entry point for the 'nilmdb-server' command line script""" """Main entry point for the 'nilmdb-server' command line script"""
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Run the NilmDB server', description = 'Run the NilmDB server',
formatter_class=argparse.ArgumentDefaultsHelpFormatter) formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", action="version", parser.add_argument("-V", "--version", action="version",
version=nilmdb.__version__) version = nilmdb.__version__)
group = parser.add_argument_group("Standard options") group = parser.add_argument_group("Standard options")
group.add_argument('-a', '--address', group.add_argument('-a', '--address',
help='Only listen on the given address', help = 'Only listen on the given address',
default='0.0.0.0') default = '0.0.0.0')
group.add_argument('-p', '--port', help='Listen on the given port', group.add_argument('-p', '--port', help = 'Listen on the given port',
type=int, default=12380) type = int, default = 12380)
group.add_argument('-d', '--database', help='Database directory', group.add_argument('-d', '--database', help = 'Database directory',
default="./db") default = os.path.join(os.getcwd(), "db"))
group.add_argument('-q', '--quiet', help='Silence output', group.add_argument('-q', '--quiet', help = 'Silence output',
action='store_true') action = 'store_true')
group.add_argument('-t', '--traceback', group.add_argument('-t', '--traceback',
help='Provide tracebacks in client errors', help = 'Provide tracebacks in client errors',
action='store_true', default=False) action = 'store_true', default = False)
group = parser.add_argument_group("Debug options") group = parser.add_argument_group("Debug options")
group.add_argument('-y', '--yappi', help='Run under yappi profiler and ' group.add_argument('-y', '--yappi', help = 'Run under yappi profiler and '
'invoke interactive shell afterwards', 'invoke interactive shell afterwards',
action='store_true') action = 'store_true')
args = parser.parse_args() args = parser.parse_args()
@ -46,54 +41,47 @@ def main():
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database) db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
# Configure the server # Configure the server
if not args.quiet: if args.quiet:
cherrypy._cpconfig.environments['embedded']['log.screen'] = True embedded = True
else:
embedded = False
server = nilmdb.server.Server(db, server = nilmdb.server.Server(db,
host=args.address, host = args.address,
port=args.port, port = args.port,
force_traceback=args.traceback) embedded = embedded,
force_traceback = args.traceback)
# Print info # Print info
if not args.quiet: if not args.quiet:
print("Version: %s" % nilmdb.__version__) print "Version: %s" % nilmdb.__version__
print("Database: %s" % (os.path.realpath(args.database))) print "Database: %s" % (os.path.realpath(args.database))
if args.address == '0.0.0.0' or args.address == '::': if args.address == '0.0.0.0' or args.address == '::':
host = socket.getfqdn() host = socket.getfqdn()
else: else:
host = args.address host = args.address
print("Server URL: http://%s:%d/" % (host, args.port)) print "Server URL: http://%s:%d/" % ( host, args.port)
print("----") print "----"
# Run it # Run it
try: if args.yappi:
if args.yappi: print "Running in yappi"
print("Running in yappi") try:
try: import yappi
import yappi yappi.start()
yappi.start() server.start(blocking = True)
server.start(blocking=True) finally:
finally: yappi.stop()
yappi.stop() yappi.print_stats(sort_type = yappi.SORTTYPE_TTOT, limit = 50)
stats = yappi.get_func_stats() from IPython import embed
stats.sort("ttot") embed(header = "Use the yappi object to explore further, "
stats.print_all() "quit to exit")
try: else:
from IPython import embed server.start(blocking = True)
embed(header="Use the `yappi` or `stats` object to "
"explore further, `quit` to exit")
except ModuleNotFoundError:
print("\nInstall ipython to explore further")
else:
server.start(blocking=True)
except nilmdb.server.serverutil.CherryPyExit:
print("Exiting due to CherryPy error", file=sys.stderr)
raise
finally:
if not args.quiet:
print("Closing database")
db.close()
# Clean up
if not args.quiet:
print "Closing database"
db.close()
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -1,12 +1,10 @@
#!/usr/bin/env python3 #!/usr/bin/python
import nilmdb.cmdline import nilmdb.cmdline
def main(): def main():
"""Main entry point for the 'nilmtool' command line script""" """Main entry point for the 'nilmtool' command line script"""
nilmdb.cmdline.Cmdline().run() nilmdb.cmdline.Cmdline().run()
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -1,9 +1,21 @@
"""nilmdb.server""" """nilmdb.server"""
# Set up pyximport to automatically rebuild Cython modules if needed. from __future__ import absolute_import
import pyximport
pyximport.install(inplace=True, build_in_temp=False) # Try to set up pyximport to automatically rebuild Cython modules. If
# this doesn't work, it's OK, as long as the modules were built externally.
# (e.g. python setup.py build_ext --inplace)
try: # pragma: no cover
import Cython
import distutils.version
if (distutils.version.LooseVersion(Cython.__version__) <
distutils.version.LooseVersion("0.17")): # pragma: no cover
raise ImportError("Cython version too old")
import pyximport
pyximport.install(inplace = True, build_in_temp = False)
except (ImportError, TypeError): # pragma: no cover
pass
from nilmdb.server.nilmdb import NilmDB from nilmdb.server.nilmdb import NilmDB
from nilmdb.server.server import Server, wsgi_application from nilmdb.server.server import Server
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError from nilmdb.server.errors import NilmDBError, StreamError, OverlapError

View File

@ -1,138 +1,60 @@
# Fixed record size bulk data storage # Fixed record size bulk data storage
import os # Need absolute_import so that "import nilmdb" won't pull in
import re # nilmdb.py, but will pull the parent nilmdb module instead.
import sys from __future__ import absolute_import
import pickle from __future__ import division
import tempfile from nilmdb.utils.printf import *
from nilmdb.utils.time import float_time_to_string as ftts
from nilmdb.utils.printf import sprintf
from nilmdb.utils.time import timestamp_to_string
import nilmdb.utils import nilmdb.utils
import nilmdb.utils.lock import os
import cPickle as pickle
import re
import sys
#from . import pyrocket as rocket
from . import rocket from . import rocket
# Up to 256 open file descriptors at any given time. # Up to 256 open file descriptors at any given time.
# These variables are global so they can be used in the decorator arguments. # These variables are global so they can be used in the decorator arguments.
table_cache_size = 32 table_cache_size = 16
fd_cache_size = 8 fd_cache_size = 16
@nilmdb.utils.must_close(wrap_verify = False)
@nilmdb.utils.must_close(wrap_verify=False) class BulkData(object):
class BulkData():
def __init__(self, basepath, **kwargs): def __init__(self, basepath, **kwargs):
if isinstance(basepath, str): self.basepath = basepath
self.basepath = self._encode_filename(basepath) self.root = os.path.join(self.basepath, "data")
else:
self.basepath = basepath
self.root = os.path.join(self.basepath, b"data")
self.lock = self.root + b".lock"
self.lockfile = None
# Tuneables # Tuneables
if "file_size" in kwargs and kwargs["file_size"] is not None: if "file_size" in kwargs:
self.file_size = kwargs["file_size"] self.file_size = kwargs["file_size"]
else: else:
# Default to approximately 128 MiB per file # Default to approximately 128 MiB per file
self.file_size = 128 * 1024 * 1024 self.file_size = 128 * 1024 * 1024
if "files_per_dir" in kwargs and kwargs["files_per_dir"] is not None: if "files_per_dir" in kwargs:
self.files_per_dir = kwargs["files_per_dir"] self.files_per_dir = kwargs["files_per_dir"]
else: else:
# 32768 files per dir should work even on FAT32 # 32768 files per dir should work even on FAT32
self.files_per_dir = 32768 self.files_per_dir = 32768
if "initial_nrows" in kwargs and kwargs["initial_nrows"] is not None:
self.initial_nrows = kwargs["initial_nrows"]
else:
# First row is 0
self.initial_nrows = 0
# Make root path # Make root path
if not os.path.isdir(self.root): if not os.path.isdir(self.root):
os.mkdir(self.root) os.mkdir(self.root)
# Create the lock
self.lockfile = open(self.lock, "w")
if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
raise IOError('database at "' +
self._decode_filename(self.basepath) +
'" is already locked by another process')
def close(self): def close(self):
self.getnode.cache_remove_all() self.getnode.cache_remove_all()
if self.lockfile:
nilmdb.utils.lock.exclusive_unlock(self.lockfile)
self.lockfile.close()
try:
os.unlink(self.lock)
except OSError:
pass
self.lockfile = None
def _encode_filename(self, path): def _encode_filename(self, path):
# Translate unicode strings to raw bytes, if needed. We # Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(),
# always manipulate paths internally as bytes. # because we want to be able to represent all code points and the user
return path.encode('utf-8') # will never be directly exposed to filenames. We can then do path
# manipulations on the UTF-8 directly.
def _decode_filename(self, path): if isinstance(path, unicode):
# Translate raw bytes to unicode strings, escaping if needed return path.encode('utf-8')
return path.decode('utf-8', errors='backslashreplace') return path
def _create_check_ospath(self, ospath):
if ospath[-1:] == b'/':
raise ValueError("invalid path; should not end with a /")
if Table.exists(ospath):
raise ValueError("stream already exists at this path")
if os.path.isdir(ospath):
# Look for any files in subdirectories. Fully empty subdirectories
# are OK; they might be there during a rename
for (root, dirs, files) in os.walk(ospath):
if files:
raise ValueError(
"non-empty subdirs of this path already exist")
def _create_parents(self, unicodepath):
"""Verify the path name, and create parent directories if they
don't exist. Returns a list of elements that got created."""
path = self._encode_filename(unicodepath)
if path[0:1] != b'/':
raise ValueError("paths must start with / ")
[group, node] = path.rsplit(b"/", 1)
if group == b'':
raise ValueError("invalid path; path must contain at least one "
"folder")
if node == b'':
raise ValueError("invalid path; should not end with a /")
if not Table.valid_path(path):
raise ValueError("path name is invalid or contains reserved words")
# Create the table's base dir. Note that we make a
# distinction here between NilmDB paths (always Unix style,
# split apart manually) and OS paths (built up with
# os.path.join)
# Make directories leading up to this one
elements = path.lstrip(b'/').split(b'/')
made_dirs = []
try:
# Make parent elements
for i in range(len(elements)):
ospath = os.path.join(self.root, *elements[0:i])
if Table.exists(ospath):
raise ValueError("path is subdir of existing node")
if not os.path.isdir(ospath):
os.mkdir(ospath)
made_dirs.append(ospath)
except Exception:
# Remove paths that we created
for ospath in reversed(made_dirs):
os.rmdir(ospath)
raise
return elements
def create(self, unicodepath, layout_name): def create(self, unicodepath, layout_name):
""" """
@ -145,11 +67,32 @@ class BulkData():
layout_name: string for nilmdb.layout.get_named(), e.g. 'float32_8' layout_name: string for nilmdb.layout.get_named(), e.g. 'float32_8'
""" """
elements = self._create_parents(unicodepath) path = self._encode_filename(unicodepath)
if path[0] != '/':
raise ValueError("paths must start with /")
[ group, node ] = path.rsplit("/", 1)
if group == '':
raise ValueError("invalid path; path must contain at least one "
"folder")
# Create the table. Note that we make a distinction here
# between NilmDB paths (always Unix style, split apart
# manually) and OS paths (built up with os.path.join)
# Make directories leading up to this one
elements = path.lstrip('/').split('/')
for i in range(len(elements)):
ospath = os.path.join(self.root, *elements[0:i])
if Table.exists(ospath):
raise ValueError("path is subdir of existing node")
if not os.path.isdir(ospath):
os.mkdir(ospath)
# Make the final dir # Make the final dir
ospath = os.path.join(self.root, *elements) ospath = os.path.join(self.root, *elements)
self._create_check_ospath(ospath) if os.path.isdir(ospath):
raise ValueError("subdirs of this path already exist")
os.mkdir(ospath) os.mkdir(ospath)
try: try:
@ -159,78 +102,24 @@ class BulkData():
# Open and cache it # Open and cache it
self.getnode(unicodepath) self.getnode(unicodepath)
except Exception: except:
exc_info = sys.exc_info() exc_info = sys.exc_info()
try: try:
os.rmdir(ospath) os.rmdir(ospath)
except OSError: except OSError:
pass pass
raise exc_info[1].with_traceback(exc_info[2]) raise exc_info[1], None, exc_info[2]
# Success # Success
return return
def _remove_leaves(self, unicodepath):
"""Remove empty directories starting at the leaves of unicodepath"""
path = self._encode_filename(unicodepath)
elements = path.lstrip(b'/').split(b'/')
for i in reversed(list(range(len(elements)))):
ospath = os.path.join(self.root, *elements[0:i+1])
try:
os.rmdir(ospath)
except OSError:
pass
def rename(self, oldunicodepath, newunicodepath):
"""Move entire tree from 'oldunicodepath' to
'newunicodepath'"""
oldpath = self._encode_filename(oldunicodepath)
newpath = self._encode_filename(newunicodepath)
# Get OS paths
oldelements = oldpath.lstrip(b'/').split(b'/')
oldospath = os.path.join(self.root, *oldelements)
newelements = newpath.lstrip(b'/').split(b'/')
newospath = os.path.join(self.root, *newelements)
# Basic checks
if oldospath == newospath:
raise ValueError("old and new paths are the same")
# Remove Table object at old path from cache
self.getnode.cache_remove(self, oldunicodepath)
# Move the table to a temporary location
tmpdir = tempfile.mkdtemp(prefix=b"rename-", dir=self.root)
tmppath = os.path.join(tmpdir, b"table")
os.rename(oldospath, tmppath)
try:
# Check destination path
self._create_check_ospath(newospath)
# Create parent dirs for new location
self._create_parents(newunicodepath)
# Move table into new location
os.rename(tmppath, newospath)
except Exception:
# On failure, move the table back to original path
os.rename(tmppath, oldospath)
os.rmdir(tmpdir)
raise
# Prune old dirs
self._remove_leaves(oldunicodepath)
os.rmdir(tmpdir)
def destroy(self, unicodepath): def destroy(self, unicodepath):
"""Fully remove all data at a particular path. No way to undo """Fully remove all data at a particular path. No way to undo
it! The group/path structure is removed, too.""" it! The group/path structure is removed, too."""
path = self._encode_filename(unicodepath) path = self._encode_filename(unicodepath)
# Get OS path # Get OS path
elements = path.lstrip(b'/').split(b'/') elements = path.lstrip('/').split('/')
ospath = os.path.join(self.root, *elements) ospath = os.path.join(self.root, *elements)
# Remove Table object from cache # Remove Table object from cache
@ -239,42 +128,41 @@ class BulkData():
# Remove the contents of the target directory # Remove the contents of the target directory
if not Table.exists(ospath): if not Table.exists(ospath):
raise ValueError("nothing at that path") raise ValueError("nothing at that path")
for (root, dirs, files) in os.walk(ospath, topdown=False): for (root, dirs, files) in os.walk(ospath, topdown = False):
for name in files: for name in files:
os.remove(os.path.join(root, name)) os.remove(os.path.join(root, name))
for name in dirs: for name in dirs:
os.rmdir(os.path.join(root, name)) os.rmdir(os.path.join(root, name))
# Remove leftover empty directories # Remove empty parent directories
self._remove_leaves(unicodepath) for i in reversed(range(len(elements))):
ospath = os.path.join(self.root, *elements[0:i+1])
try:
os.rmdir(ospath)
except OSError:
break
# Cache open tables # Cache open tables
@nilmdb.utils.lru_cache(size=table_cache_size, @nilmdb.utils.lru_cache(size = table_cache_size,
onremove=lambda x: x.close()) onremove = lambda x: x.close())
def getnode(self, unicodepath): def getnode(self, unicodepath):
"""Return a Table object corresponding to the given database """Return a Table object corresponding to the given database
path, which must exist.""" path, which must exist."""
path = self._encode_filename(unicodepath) path = self._encode_filename(unicodepath)
elements = path.lstrip(b'/').split(b'/') elements = path.lstrip('/').split('/')
ospath = os.path.join(self.root, *elements) ospath = os.path.join(self.root, *elements)
return Table(ospath, self.initial_nrows) return Table(ospath)
@nilmdb.utils.must_close(wrap_verify = False)
@nilmdb.utils.must_close(wrap_verify=False) class Table(object):
class Table():
"""Tools to help access a single table (data at a specific OS path).""" """Tools to help access a single table (data at a specific OS path)."""
# See design.md for design details # See design.md for design details
# Class methods, to help keep format details in this class. # Class methods, to help keep format details in this class.
@classmethod
def valid_path(cls, root):
"""Return True if a root path is a valid name"""
return b"_format" not in root.split(b"/")
@classmethod @classmethod
def exists(cls, root): def exists(cls, root):
"""Return True if a table appears to exist at this OS path""" """Return True if a table appears to exist at this OS path"""
return os.path.isfile(os.path.join(root, b"_format")) return os.path.isfile(os.path.join(root, "_format"))
@classmethod @classmethod
def create(cls, root, layout, file_size, files_per_dir): def create(cls, root, layout, file_size, files_per_dir):
@ -287,30 +175,37 @@ class Table():
rows_per_file = max(file_size // rkt.binary_size, 1) rows_per_file = max(file_size // rkt.binary_size, 1)
rkt.close() rkt.close()
fmt = { fmt = { "rows_per_file": rows_per_file,
"rows_per_file": rows_per_file, "files_per_dir": files_per_dir,
"files_per_dir": files_per_dir, "layout": layout,
"layout": layout, "version": 2 }
"version": 3 with open(os.path.join(root, "_format"), "wb") as f:
} pickle.dump(fmt, f, 2)
nilmdb.utils.atomic.replace_file(
os.path.join(root, b"_format"), pickle.dumps(fmt, 2))
# Normal methods # Normal methods
def __init__(self, root, initial_nrows=0): def __init__(self, root):
"""'root' is the full OS path to the directory of this table""" """'root' is the full OS path to the directory of this table"""
self.root = root self.root = root
self.initial_nrows = initial_nrows
# Load the format # Load the format
with open(os.path.join(self.root, b"_format"), "rb") as f: with open(os.path.join(self.root, "_format"), "rb") as f:
fmt = pickle.load(f) fmt = pickle.load(f)
if fmt["version"] != 3: if fmt["version"] == 1: # pragma: no cover
# Old versions used floating point timestamps, which aren't # We can handle this old version by converting from
# valid anymore. # struct_fmt back to layout name.
raise NotImplementedError("old version " + str(fmt["version"]) + compat = { "<dHHHHHH": "uint16_6",
" bulk data store is not supported") "<dHHHHHHHHH": "uint16_9",
"<dffffffff": "float32_8" }
if fmt["struct_fmt"] in compat:
fmt["version"] = 2
fmt["layout"] = compat[fmt["struct_fmt"]]
else:
raise NotImplementedError("old version 1 data with format "
+ fmt["struct_fmt"] + " is no good")
elif fmt["version"] != 2: # pragma: no cover (just future proofing)
raise NotImplementedError("version " + str(fmt["version"]) +
" bulk data store not supported")
self.rows_per_file = fmt["rows_per_file"] self.rows_per_file = fmt["rows_per_file"]
self.files_per_dir = fmt["files_per_dir"] self.files_per_dir = fmt["files_per_dir"]
@ -336,38 +231,31 @@ class Table():
# greater than the row number of any piece of data that # greater than the row number of any piece of data that
# currently exists, not necessarily all data that _ever_ # currently exists, not necessarily all data that _ever_
# existed. # existed.
regex = re.compile(b"^[0-9a-f]{4,}$") regex = re.compile("^[0-9a-f]{4,}$")
# Find the last directory. We sort and loop through all of them, # Find the last directory. We sort and loop through all of them,
# starting with the numerically greatest, because the dirs could be # starting with the numerically greatest, because the dirs could be
# empty if something was deleted but the directory was unexpectedly # empty if something was deleted.
# not deleted.
subdirs = sorted(filter(regex.search, os.listdir(self.root)), subdirs = sorted(filter(regex.search, os.listdir(self.root)),
key=lambda x: int(x, 16), reverse=True) key = lambda x: int(x, 16), reverse = True)
for subdir in subdirs: for subdir in subdirs:
# Now find the last file in that dir # Now find the last file in that dir
path = os.path.join(self.root, subdir) path = os.path.join(self.root, subdir)
files = list(filter(regex.search, os.listdir(path))) files = filter(regex.search, os.listdir(path))
if not files: if not files: # pragma: no cover (shouldn't occur)
# Empty dir: try the next one # Empty dir: try the next one
continue continue
# Find the numerical max # Find the numerical max
filename = max(files, key=lambda x: int(x, 16)) filename = max(files, key = lambda x: int(x, 16))
offset = os.path.getsize(os.path.join(self.root, subdir, filename)) offset = os.path.getsize(os.path.join(self.root, subdir, filename))
# Convert to row number # Convert to row number
return self._row_from_offset(subdir, filename, offset) return self._row_from_offset(subdir, filename, offset)
# No files, so no data. We typically start at row 0 in this # No files, so no data
# case, although initial_nrows is specified during some tests return 0
# to exercise other parts of the code better. Since we have
# no files yet, round initial_nrows up so it points to a row
# that would begin a new file.
nrows = ((self.initial_nrows + (self.rows_per_file - 1)) //
self.rows_per_file) * self.rows_per_file
return nrows
def _offset_from_row(self, row): def _offset_from_row(self, row):
"""Return a (subdir, filename, offset, count) tuple: """Return a (subdir, filename, offset, count) tuple:
@ -380,8 +268,8 @@ class Table():
filenum = row // self.rows_per_file filenum = row // self.rows_per_file
# It's OK if these format specifiers are too short; the filenames # It's OK if these format specifiers are too short; the filenames
# will just get longer but will still sort correctly. # will just get longer but will still sort correctly.
dirname = sprintf(b"%04x", filenum // self.files_per_dir) dirname = sprintf("%04x", filenum // self.files_per_dir)
filename = sprintf(b"%04x", filenum % self.files_per_dir) filename = sprintf("%04x", filenum % self.files_per_dir)
offset = (row % self.rows_per_file) * self.row_size offset = (row % self.rows_per_file) * self.row_size
count = self.rows_per_file - (row % self.rows_per_file) count = self.rows_per_file - (row % self.rows_per_file)
return (dirname, filename, offset, count) return (dirname, filename, offset, count)
@ -389,14 +277,14 @@ class Table():
def _row_from_offset(self, subdir, filename, offset): def _row_from_offset(self, subdir, filename, offset):
"""Return the row number that corresponds to the given """Return the row number that corresponds to the given
'subdir/filename' and byte-offset within that file.""" 'subdir/filename' and byte-offset within that file."""
if (offset % self.row_size) != 0: if (offset % self.row_size) != 0: # pragma: no cover
# this shouldn't occur, unless there is some corruption somewhere # this shouldn't occur, unless there is some corruption somewhere
raise ValueError("file offset is not a multiple of data size") raise ValueError("file offset is not a multiple of data size")
filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16) filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
row = (filenum * self.rows_per_file) + (offset // self.row_size) row = (filenum * self.rows_per_file) + (offset // self.row_size)
return row return row
def _remove_or_truncate_file(self, subdir, filename, offset=0): def _remove_or_truncate_file(self, subdir, filename, offset = 0):
"""Remove the given file, and remove the subdirectory too """Remove the given file, and remove the subdirectory too
if it's empty. If offset is nonzero, truncate the file if it's empty. If offset is nonzero, truncate the file
to that size instead.""" to that size instead."""
@ -412,12 +300,12 @@ class Table():
# Try deleting subdir, too # Try deleting subdir, too
try: try:
os.rmdir(os.path.join(self.root, subdir)) os.rmdir(os.path.join(self.root, subdir))
except Exception: except:
pass pass
# Cache open files # Cache open files
@nilmdb.utils.lru_cache(size=fd_cache_size, @nilmdb.utils.lru_cache(size = fd_cache_size,
onremove=lambda f: f.close()) onremove = lambda f: f.close())
def file_open(self, subdir, filename): def file_open(self, subdir, filename):
"""Open and map a given 'subdir/filename' (relative to self.root). """Open and map a given 'subdir/filename' (relative to self.root).
Will be automatically closed when evicted from the cache.""" Will be automatically closed when evicted from the cache."""
@ -430,23 +318,38 @@ class Table():
return rocket.Rocket(self.layout, return rocket.Rocket(self.layout,
os.path.join(self.root, subdir, filename)) os.path.join(self.root, subdir, filename))
def append_data(self, data, start, end, binary=False): def append(self, data):
"""Append the data and flush it to disk.
data is a nested Python list [[row],[row],[...]]"""
remaining = len(data)
dataiter = iter(data)
while remaining:
# See how many rows we can fit into the current file, and open it
(subdir, fname, offset, count) = self._offset_from_row(self.nrows)
if count > remaining:
count = remaining
f = self.file_open(subdir, fname)
# Write the data
written = f.append_iter(count, dataiter)
if written != count: # pragma: no cover
raise Exception("Didn't write the expected number of rows: "
+ str(written) + " != " + str(count))
remaining -= count
self.nrows += count
def append_string(self, data, start, end):
"""Parse the formatted string in 'data', according to the """Parse the formatted string in 'data', according to the
current layout, and append it to the table. If any timestamps current layout, and append it to the table. If any timestamps
are non-monotonic, or don't fall between 'start' and 'end', are non-monotonic, or don't fall between 'start' and 'end',
a ValueError is raised. a ValueError is raised.
Note that data is always of 'bytes' type.
If 'binary' is True, the data should be in raw binary format
instead: little-endian, matching the current table's layout,
including the int64 timestamp.
If this function succeeds, it returns normally. Otherwise, If this function succeeds, it returns normally. Otherwise,
the table is reverted back to its original state by truncating the table is reverted back to its original state by truncating
or deleting files as necessary.""" or deleting files as necessary."""
data_offset = 0 data_offset = 0
last_timestamp = nilmdb.utils.time.min_timestamp last_timestamp = -1e12
tot_rows = self.nrows tot_rows = self.nrows
count = 0 count = 0
linenum = 0 linenum = 0
@ -454,57 +357,40 @@ class Table():
while data_offset < len(data): while data_offset < len(data):
# See how many rows we can fit into the current file, # See how many rows we can fit into the current file,
# and open it # and open it
(subdir, fname, offs, count) = self._offset_from_row(tot_rows) (subdir, fname, offset, count) = self._offset_from_row(tot_rows)
f = self.file_open(subdir, fname) f = self.file_open(subdir, fname)
# Ask the rocket object to parse and append up to "count" # Ask the rocket object to parse and append up to "count"
# rows of data, verifying things along the way. # rows of data, verifying things along the way.
try: try:
if binary:
appender = f.append_binary
else:
appender = f.append_string
(added_rows, data_offset, last_timestamp, linenum (added_rows, data_offset, last_timestamp, linenum
) = appender(count, data, data_offset, linenum, ) = f.append_string(count, data, data_offset, linenum,
start, end, last_timestamp) start, end, last_timestamp)
except rocket.ParseError as e: except rocket.ParseError as e:
(linenum, colnum, errtype, obj) = e.args (linenum, errtype, obj) = e.args
if binary:
where = "byte %d: " % (linenum)
else:
where = "line %d, column %d: " % (linenum, colnum)
# Extract out the error line, add column marker
try:
if binary:
raise IndexError
bad = data.splitlines()[linenum-1]
bad += b'\n' + b' ' * (colnum - 1) + b'^'
except IndexError:
bad = b""
if errtype == rocket.ERR_NON_MONOTONIC: if errtype == rocket.ERR_NON_MONOTONIC:
err = "timestamp is not monotonically increasing" err = sprintf("line %d: timestamp is not monotonically "
"increasing", linenum)
elif errtype == rocket.ERR_OUT_OF_INTERVAL: elif errtype == rocket.ERR_OUT_OF_INTERVAL:
if obj < start: if obj < start:
err = sprintf("Data timestamp %s < start time %s", err = sprintf("line %d: Data timestamp %s < "
timestamp_to_string(obj), "start time %s", linenum,
timestamp_to_string(start)) ftts(obj), ftts(start))
else: else:
err = sprintf("Data timestamp %s >= end time %s", err = sprintf("line %d: Data timestamp %s >= "
timestamp_to_string(obj), "end time %s", linenum,
timestamp_to_string(end)) ftts(obj), ftts(end))
else: else:
err = str(obj) err = sprintf("line %d: %s", linenum, str(obj))
bad_str = bad.decode('utf-8', errors='backslashreplace') raise ValueError("error parsing input data: " + err)
raise ValueError("error parsing input data: " +
where + err + "\n" + bad_str)
tot_rows += added_rows tot_rows += added_rows
except Exception: except Exception:
# Some failure, so try to roll things back by truncating or # Some failure, so try to roll things back by truncating or
# deleting files that we may have appended data to. # deleting files that we may have appended data to.
cleanpos = self.nrows cleanpos = self.nrows
while cleanpos <= tot_rows: while cleanpos <= tot_rows:
(subdir, fname, offs, count) = self._offset_from_row(cleanpos) (subdir, fname, offset, count) = self._offset_from_row(cleanpos)
self._remove_or_truncate_file(subdir, fname, offs) self._remove_or_truncate_file(subdir, fname, offset)
cleanpos += count cleanpos += count
# Re-raise original exception # Re-raise original exception
raise raise
@ -512,11 +398,15 @@ class Table():
# Success, so update self.nrows accordingly # Success, so update self.nrows accordingly
self.nrows = tot_rows self.nrows = tot_rows
def get_data(self, start, stop, binary=False): def _get_data(self, start, stop, as_string):
"""Extract data corresponding to Python range [n:m], """Extract data corresponding to Python range [n:m],
and returns a formatted string""" and returns a numeric list or formatted string,
if (start is None or stop is None or depending on as_string."""
start > stop or start < 0 or stop > self.nrows): if (start is None or
stop is None or
start > stop or
start < 0 or
stop > self.nrows):
raise IndexError("Index out of range") raise IndexError("Index out of range")
ret = [] ret = []
@ -527,21 +417,42 @@ class Table():
if count > remaining: if count > remaining:
count = remaining count = remaining
f = self.file_open(subdir, filename) f = self.file_open(subdir, filename)
if binary: if as_string:
ret.append(f.extract_binary(offset, count))
else:
ret.append(f.extract_string(offset, count)) ret.append(f.extract_string(offset, count))
else:
ret.extend(f.extract_list(offset, count))
remaining -= count remaining -= count
row += count row += count
return b"".join(ret) if as_string:
return "".join(ret)
return ret
def __getitem__(self, row): def get_as_text(self, start, stop):
"""Extract timestamps from a row, with table[n] notation.""" """Extract data corresponding to Python range [n:m],
if row < 0 or row >= self.nrows: and returns a formatted string"""
return self._get_data(start, stop, True)
def __getitem__(self, key):
"""Extract data and return it. Supports simple indexing
(table[n]) and range slices (table[n:m]). Returns a nested
Python list [[row],[row],[...]]"""
# Handle simple slices
if isinstance(key, slice):
# Fall back to brute force if the slice isn't simple
try:
if (key.step is not None and key.step != 1):
raise IndexError
return self._get_data(key.start, key.stop, False)
except IndexError:
return [ self[x] for x in xrange(*key.indices(self.nrows)) ]
# Handle single points (inefficiently!)
if key < 0 or key >= self.nrows:
raise IndexError("Index out of range") raise IndexError("Index out of range")
(subdir, filename, offset, count) = self._offset_from_row(row) (subdir, filename, offset, count) = self._offset_from_row(key)
f = self.file_open(subdir, filename) f = self.file_open(subdir, filename)
return f.extract_timestamp(offset) return f.extract_list(offset, 1)[0]
def _remove_rows(self, subdir, filename, start, stop): def _remove_rows(self, subdir, filename, start, stop):
"""Helper to mark specific rows as being removed from a """Helper to mark specific rows as being removed from a
@ -556,12 +467,12 @@ class Table():
# file. Only when the list covers the entire extent of the # file. Only when the list covers the entire extent of the
# file will that file be removed. # file will that file be removed.
datafile = os.path.join(self.root, subdir, filename) datafile = os.path.join(self.root, subdir, filename)
cachefile = datafile + b".removed" cachefile = datafile + ".removed"
try: try:
with open(cachefile, "rb") as f: with open(cachefile, "rb") as f:
ranges = pickle.load(f) ranges = pickle.load(f)
cachefile_present = True cachefile_present = True
except Exception: except:
ranges = [] ranges = []
cachefile_present = False cachefile_present = False
@ -583,9 +494,8 @@ class Table():
# Not connected; append previous and start again # Not connected; append previous and start again
merged.append(prev) merged.append(prev)
prev = new prev = new
# Last range we were looking at goes into the file. We know if prev is not None:
# there was at least one (the one we just removed). merged.append(prev)
merged.append(prev)
# If the range covered the whole file, we can delete it now. # If the range covered the whole file, we can delete it now.
# Note that the last file in a table may be only partially # Note that the last file in a table may be only partially
@ -594,7 +504,7 @@ class Table():
# remainder will be filled on a subsequent append(), and things # remainder will be filled on a subsequent append(), and things
# are generally easier if we don't have to special-case that. # are generally easier if we don't have to special-case that.
if (len(merged) == 1 and if (len(merged) == 1 and
merged[0][0] == 0 and merged[0][1] == self.rows_per_file): merged[0][0] == 0 and merged[0][1] == self.rows_per_file):
# Delete files # Delete files
if cachefile_present: if cachefile_present:
os.remove(cachefile) os.remove(cachefile)
@ -633,3 +543,11 @@ class Table():
self._remove_rows(subdir, filename, row_offset, row_offset + count) self._remove_rows(subdir, filename, row_offset, row_offset + count)
remaining -= count remaining -= count
row += count row += count
class TimestampOnlyTable(object):
"""Helper that lets us pass a Tables object into bisect, by
returning only the timestamp when a particular row is requested."""
def __init__(self, table):
self.table = table
def __getitem__(self, index):
return self.table[index][0]

View File

@ -1,15 +1,12 @@
"""Exceptions""" """Exceptions"""
class NilmDBError(Exception): class NilmDBError(Exception):
"""Base exception for NilmDB errors""" """Base exception for NilmDB errors"""
def __init__(self, msg="Unspecified error"): def __init__(self, message = "Unspecified error"):
super().__init__(msg) Exception.__init__(self, message)
class StreamError(NilmDBError): class StreamError(NilmDBError):
pass pass
class OverlapError(NilmDBError): class OverlapError(NilmDBError):
pass pass

View File

@ -1,11 +1,5 @@
# cython: language_level=2
"""Interval, IntervalSet """Interval, IntervalSet
The Interval implemented here is just like
nilmdb.utils.interval.Interval, except implemented in Cython for
speed.
Represents an interval of time, and a set of such intervals. Represents an interval of time, and a set of such intervals.
Intervals are half-open, ie. they include data points with timestamps Intervals are half-open, ie. they include data points with timestamps
@ -25,54 +19,51 @@ Intervals are half-open, ie. they include data points with timestamps
# Fourth version is an optimized rb-tree that stores interval starts # Fourth version is an optimized rb-tree that stores interval starts
# and ends directly in the tree, like bxinterval did. # and ends directly in the tree, like bxinterval did.
from ..utils.time import min_timestamp as nilmdb_min_timestamp from ..utils.time import float_time_to_string as ftts
from ..utils.time import max_timestamp as nilmdb_max_timestamp
from ..utils.time import timestamp_to_string
from ..utils.iterator import imerge
from ..utils.interval import IntervalError
import itertools
cimport rbtree cimport rbtree
from libc.stdint cimport uint64_t, int64_t cdef extern from "stdint.h":
ctypedef unsigned long long uint64_t
ctypedef int64_t timestamp_t class IntervalError(Exception):
"""Error due to interval overlap, etc"""
pass
cdef class Interval: cdef class Interval:
"""Represents an interval of time.""" """Represents an interval of time."""
cdef public timestamp_t start, end cdef public double start, end
def __init__(self, timestamp_t start, timestamp_t end): def __init__(self, double start, double end):
""" """
'start' and 'end' are arbitrary numbers that represent time 'start' and 'end' are arbitrary floats that represent time
""" """
if start >= end: if start >= end:
# Explicitly disallow zero-width intervals (since they're half-open) # Explicitly disallow zero-width intervals (since they're half-open)
raise IntervalError("start %s must precede end %s" % (start, end)) raise IntervalError("start %s must precede end %s" % (start, end))
self.start = start self.start = float(start)
self.end = end self.end = float(end)
def __repr__(self): def __repr__(self):
s = repr(self.start) + ", " + repr(self.end) s = repr(self.start) + ", " + repr(self.end)
return self.__class__.__name__ + "(" + s + ")" return self.__class__.__name__ + "(" + s + ")"
def __str__(self): def __str__(self):
return ("[" + timestamp_to_string(self.start) + return "[" + ftts(self.start) + " -> " + ftts(self.end) + ")"
" -> " + timestamp_to_string(self.end) + ")")
# Compare two intervals. If non-equal, order by start then end def __cmp__(self, Interval other):
def __lt__(self, Interval other): """Compare two intervals. If non-equal, order by start then end"""
return (self.start, self.end) < (other.start, other.end) if not isinstance(other, Interval):
def __gt__(self, Interval other): raise TypeError("bad type")
return (self.start, self.end) > (other.start, other.end) if self.start == other.start:
def __le__(self, Interval other): if self.end < other.end:
return (self.start, self.end) <= (other.start, other.end) return -1
def __ge__(self, Interval other): if self.end > other.end:
return (self.start, self.end) >= (other.start, other.end) return 1
def __eq__(self, Interval other): return 0
return (self.start, self.end) == (other.start, other.end) if self.start < other.start:
def __ne__(self, Interval other): return -1
return (self.start, self.end) != (other.start, other.end) return 1
cpdef intersects(self, Interval other): cpdef intersects(self, Interval other):
"""Return True if two Interval objects intersect""" """Return True if two Interval objects intersect"""
@ -80,7 +71,7 @@ cdef class Interval:
return False return False
return True return True
cpdef subset(self, timestamp_t start, timestamp_t end): cpdef subset(self, double start, double end):
"""Return a new Interval that is a subset of this one""" """Return a new Interval that is a subset of this one"""
# A subclass that tracks additional data might override this. # A subclass that tracks additional data might override this.
if start < self.start or end > self.end: if start < self.start or end > self.end:
@ -102,14 +93,14 @@ cdef class DBInterval(Interval):
db_end = 200, db_endpos = 20000 db_end = 200, db_endpos = 20000
""" """
cpdef public timestamp_t db_start, db_end cpdef public double db_start, db_end
cpdef public uint64_t db_startpos, db_endpos cpdef public uint64_t db_startpos, db_endpos
def __init__(self, start, end, def __init__(self, start, end,
db_start, db_end, db_start, db_end,
db_startpos, db_endpos): db_startpos, db_endpos):
""" """
'db_start' and 'db_end' are arbitrary numbers that represent 'db_start' and 'db_end' are arbitrary floats that represent
time. They must be a strict superset of the time interval time. They must be a strict superset of the time interval
covered by 'start' and 'end'. The 'db_startpos' and covered by 'start' and 'end'. The 'db_startpos' and
'db_endpos' are arbitrary database position indicators that 'db_endpos' are arbitrary database position indicators that
@ -129,7 +120,7 @@ cdef class DBInterval(Interval):
s += ", " + repr(self.db_startpos) + ", " + repr(self.db_endpos) s += ", " + repr(self.db_startpos) + ", " + repr(self.db_endpos)
return self.__class__.__name__ + "(" + s + ")" return self.__class__.__name__ + "(" + s + ")"
cpdef subset(self, timestamp_t start, timestamp_t end): cpdef subset(self, double start, double end):
""" """
Return a new DBInterval that is a subset of this one Return a new DBInterval that is a subset of this one
""" """
@ -273,15 +264,21 @@ cdef class IntervalSet:
def __and__(self, other not None): def __and__(self, other not None):
""" """
Compute a new IntervalSet from the intersection of this Compute a new IntervalSet from the intersection of two others
IntervalSet with one other interval.
Output intervals are built as subsets of the intervals in the Output intervals are built as subsets of the intervals in the
first argument (self). first argument (self).
""" """
out = IntervalSet() out = IntervalSet()
for i in self.intersection(other):
out.tree.insert(rbtree.RBNode(i.start, i.end, i)) if not isinstance(other, IntervalSet):
for i in self.intersection(other):
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
else:
for x in other:
for i in self.intersection(x):
out.tree.insert(rbtree.RBNode(i.start, i.end, i))
return out return out
def intersection(self, Interval interval not None, orig = False): def intersection(self, Interval interval not None, orig = False):
@ -298,18 +295,23 @@ cdef class IntervalSet:
(potentially) subsetted to make the one that is being (potentially) subsetted to make the one that is being
returned. returned.
""" """
if orig: if not isinstance(interval, Interval):
for n in self.tree.intersect(interval.start, interval.end): raise TypeError("bad type")
i = n.obj for n in self.tree.intersect(interval.start, interval.end):
subset = i.subset(max(i.start, interval.start), i = n.obj
min(i.end, interval.end)) if i:
yield (subset, i) if i.start >= interval.start and i.end <= interval.end:
else: if orig:
for n in self.tree.intersect(interval.start, interval.end): yield (i, i)
i = n.obj else:
subset = i.subset(max(i.start, interval.start), yield i
min(i.end, interval.end)) else:
yield subset subset = i.subset(max(i.start, interval.start),
min(i.end, interval.end))
if orig:
yield (subset, i)
else:
yield subset
cpdef intersects(self, Interval other): cpdef intersects(self, Interval other):
"""Return True if this IntervalSet intersects another interval""" """Return True if this IntervalSet intersects another interval"""
@ -318,7 +320,7 @@ cdef class IntervalSet:
return True return True
return False return False
def find_end(self, timestamp_t t): def find_end(self, double t):
""" """
Return an Interval from this tree that ends at time t, or Return an Interval from this tree that ends at time t, or
None if it doesn't exist. None if it doesn't exist.

201
nilmdb/server/layout.pyx Normal file
View File

@ -0,0 +1,201 @@
# cython: profile=False
import time
import sys
import inspect
import cStringIO
cdef enum:
max_value_count = 64
cimport cython
cimport libc.stdlib
cimport libc.stdio
cimport libc.string
class ParserError(Exception):
def __init__(self, line, message):
self.message = "line " + str(line) + ": " + message
Exception.__init__(self, self.message)
class FormatterError(Exception):
pass
class Layout:
"""Represents a NILM database layout"""
def __init__(self, typestring):
"""Initialize this Layout object to handle the specified
type string"""
try:
[ datatype, count ] = typestring.split("_")
except:
raise KeyError("invalid layout string")
try:
self.count = int(count)
except ValueError:
raise KeyError("invalid count")
if self.count < 1 or self.count > max_value_count:
raise KeyError("invalid count")
if datatype == 'uint16':
self.parse = self.parse_uint16
self.format_str = "%.6f" + " %d" * self.count
self.format = self.format_generic
elif datatype == 'float32':
self.parse = self.parse_float64
self.format_str = "%.6f" + " %.6e" * self.count
self.format = self.format_generic
elif datatype == 'float64':
self.parse = self.parse_float64
self.format_str = "%.6f" + " %.16e" * self.count
self.format = self.format_generic
else:
raise KeyError("invalid type")
self.datatype = datatype
# Parsers
def parse_float64(self, char *text):
cdef int n
cdef double ts
# Return doubles even in float32 case, since they're going into
# a Python array which would upconvert to double anyway.
result = [0] * (self.count + 1)
cdef char *end
ts = libc.stdlib.strtod(text, &end)
if end == text:
raise ValueError("bad timestamp")
result[0] = ts
for n in range(self.count):
text = end
result[n+1] = libc.stdlib.strtod(text, &end)
if end == text:
raise ValueError("wrong number of values")
n = 0
while end[n] == ' ':
n += 1
if end[n] != '\n' and end[n] != '#' and end[n] != '\0':
raise ValueError("extra data on line")
return (ts, result)
def parse_uint16(self, char *text):
cdef int n
cdef double ts
cdef int v
cdef char *end
result = [0] * (self.count + 1)
ts = libc.stdlib.strtod(text, &end)
if end == text:
raise ValueError("bad timestamp")
result[0] = ts
for n in range(self.count):
text = end
v = libc.stdlib.strtol(text, &end, 10)
if v < 0 or v > 65535:
raise ValueError("value out of range")
result[n+1] = v
if end == text:
raise ValueError("wrong number of values")
n = 0
while end[n] == ' ':
n += 1
if end[n] != '\n' and end[n] != '#' and end[n] != '\0':
raise ValueError("extra data on line")
return (ts, result)
# Formatters
def format_generic(self, d):
n = len(d) - 1
if n != self.count:
raise ValueError("wrong number of values for layout type: "
"got %d, wanted %d" % (n, self.count))
return (self.format_str % tuple(d)) + "\n"
# Get a layout by name
def get_named(typestring):
try:
return Layout(typestring)
except KeyError:
compat = { "PrepData": "float32_8",
"RawData": "uint16_6",
"RawNotchedData": "uint16_9" }
return Layout(compat[typestring])
class Parser(object):
"""Object that parses and stores ASCII data for inclusion into the
database"""
def __init__(self, layout):
if issubclass(layout.__class__, Layout):
self.layout = layout
else:
try:
self.layout = get_named(layout)
except KeyError:
raise TypeError("unknown layout")
self.data = []
self.min_timestamp = None
self.max_timestamp = None
def parse(self, textdata):
"""
Parse the data, provided as lines of text, using the current
layout, into an internal data structure suitable for a
pytables 'table.append(parser.data)'.
"""
cdef double last_ts = -1e12, ts
cdef int n = 0, i
cdef char *line
indata = cStringIO.StringIO(textdata)
# Assume any parsing error is a real error.
# In the future we might want to skip completely empty lines,
# or partial lines right before EOF?
try:
self.data = []
for pyline in indata:
line = pyline
n += 1
if line[0] == '\#':
continue
(ts, row) = self.layout.parse(line)
if ts <= last_ts:
raise ValueError("timestamp is not "
"monotonically increasing")
last_ts = ts
self.data.append(row)
except (ValueError, IndexError, TypeError) as e:
raise ParserError(n, "error: " + e.message)
# Mark timestamp ranges
if len(self.data):
self.min_timestamp = self.data[0][0]
self.max_timestamp = self.data[-1][0]
class Formatter(object):
"""Object that formats database data into ASCII"""
def __init__(self, layout):
if issubclass(layout.__class__, Layout):
self.layout = layout
else:
try:
self.layout = get_named(layout)
except KeyError:
raise TypeError("unknown layout")
def format(self, data):
"""
Format raw data from the database, using the current layout,
as lines of ACSII text.
"""
text = cStringIO.StringIO()
try:
for row in data:
text.write(self.layout.format(row))
except (ValueError, IndexError, TypeError) as e:
raise FormatterError("formatting error: " + e.message)
return text.getvalue()

View File

@ -7,20 +7,21 @@ Object that represents a NILM database file.
Manages both the SQL database and the table storage backend. Manages both the SQL database and the table storage backend.
""" """
import os # Need absolute_import so that "import nilmdb" won't pull in
import errno # nilmdb.py, but will pull the parent nilmdb module instead.
import sqlite3 from __future__ import absolute_import
import nilmdb.utils import nilmdb.utils
from nilmdb.utils.printf import printf from nilmdb.utils.printf import *
from nilmdb.utils.time import timestamp_to_bytes from nilmdb.server.interval import (Interval, DBInterval,
IntervalSet, IntervalError)
from nilmdb.utils.interval import IntervalError
from nilmdb.server.interval import Interval, DBInterval, IntervalSet
from nilmdb.server import bulkdata from nilmdb.server import bulkdata
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
import sqlite3
import os
import errno
import bisect
# Note about performance and transactions: # Note about performance and transactions:
# #
# Committing a transaction in the default sync mode (PRAGMA synchronous=FULL) # Committing a transaction in the default sync mode (PRAGMA synchronous=FULL)
@ -34,10 +35,10 @@ from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
# seems that 'PRAGMA synchronous=NORMAL' and 'PRAGMA journal_mode=WAL' # seems that 'PRAGMA synchronous=NORMAL' and 'PRAGMA journal_mode=WAL'
# give an equivalent speedup more safely. That is what is used here. # give an equivalent speedup more safely. That is what is used here.
_sql_schema_updates = { _sql_schema_updates = {
0: {"next": 1, "sql": """ 0: """
-- All streams -- All streams
CREATE TABLE streams( CREATE TABLE streams(
id INTEGER PRIMARY KEY, -- stream ID id INTEGER PRIMARY KEY, -- stream ID
path TEXT UNIQUE NOT NULL, -- path, e.g. '/newton/prep' path TEXT UNIQUE NOT NULL, -- path, e.g. '/newton/prep'
layout TEXT NOT NULL -- layout name, e.g. float32_8 layout TEXT NOT NULL -- layout name, e.g. float32_8
); );
@ -58,47 +59,24 @@ _sql_schema_updates = {
end_pos INTEGER NOT NULL end_pos INTEGER NOT NULL
); );
CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time); CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time);
"""}, """,
1: {"next": 3, "sql": """ 1: """
-- Generic dictionary-type metadata that can be associated with a stream -- Generic dictionary-type metadata that can be associated with a stream
CREATE TABLE metadata( CREATE TABLE metadata(
stream_id INTEGER NOT NULL, stream_id INTEGER NOT NULL,
key TEXT NOT NULL, key TEXT NOT NULL,
value TEXT value TEXT
); );
"""}, """,
2: {"error": "old format with floating-point timestamps requires "
"nilmdb 1.3.1 or older"},
3: {"next": None},
} }
@nilmdb.utils.must_close() @nilmdb.utils.must_close()
class NilmDB(): class NilmDB(object):
verbose = 0 verbose = 0
def __init__(self, basepath, def __init__(self, basepath, max_results=None,
max_results=None,
max_removals=None,
max_int_removals=None,
bulkdata_args=None): bulkdata_args=None):
"""Initialize NilmDB at the given basepath.
Other arguments are for debugging / testing:
'max_results' is the max rows to send in a single
stream_intervals or stream_extract response.
'max_removals' is the max rows to delete at once
in stream_remove.
'max_int_removals' is the max intervals to delete
at once in stream_remove.
'bulkdata_args' is kwargs for the bulkdata module.
"""
if bulkdata_args is None: if bulkdata_args is None:
bulkdata_args = {} bulkdata_args = {}
@ -117,26 +95,19 @@ class NilmDB():
# SQLite database too # SQLite database too
sqlfilename = os.path.join(self.basepath, "data.sql") sqlfilename = os.path.join(self.basepath, "data.sql")
self.con = sqlite3.connect(sqlfilename, check_same_thread=True) self.con = sqlite3.connect(sqlfilename, check_same_thread = True)
try: self._sql_schema_update()
self._sql_schema_update()
except Exception:
self.data.close()
raise
# See big comment at top about the performance implications of this # See big comment at top about the performance implications of this
self.con.execute("PRAGMA synchronous=NORMAL") self.con.execute("PRAGMA synchronous=NORMAL")
self.con.execute("PRAGMA journal_mode=WAL") self.con.execute("PRAGMA journal_mode=WAL")
# Approximate largest number of elements that we want to send # Approximate largest number of elements that we want to send
# in a single reply (for stream_intervals, stream_extract). # in a single reply (for stream_intervals, stream_extract)
self.max_results = max_results or 16384 if max_results:
self.max_results = max_results
# Remove up to this many rows per call to stream_remove. else:
self.max_removals = max_removals or 1048576 self.max_results = 16384
# Remove up to this many intervals per call to stream_remove.
self.max_int_removals = max_int_removals or 4096
def get_basepath(self): def get_basepath(self):
return self.basepath return self.basepath
@ -145,7 +116,6 @@ class NilmDB():
if self.con: if self.con:
self.con.commit() self.con.commit()
self.con.close() self.con.close()
self.con = None
self.data.close() self.data.close()
def _sql_schema_update(self): def _sql_schema_update(self):
@ -153,20 +123,11 @@ class NilmDB():
version = cur.execute("PRAGMA user_version").fetchone()[0] version = cur.execute("PRAGMA user_version").fetchone()[0]
oldversion = version oldversion = version
while True: while version in _sql_schema_updates:
if version not in _sql_schema_updates: cur.executescript(_sql_schema_updates[version])
raise Exception(self.basepath + ": unknown database version " version = version + 1
+ str(version)) if self.verbose: # pragma: no cover
update = _sql_schema_updates[version] printf("Schema updated to %d\n", version)
if "error" in update:
raise Exception(self.basepath + ": can't use database version "
+ str(version) + ": " + update["error"])
if update["next"] is None:
break
cur.executescript(update["sql"])
version = update["next"]
if self.verbose:
printf("Database schema updated to %d\n", version)
if version != oldversion: if version != oldversion:
with self.con: with self.con:
@ -174,14 +135,14 @@ class NilmDB():
def _check_user_times(self, start, end): def _check_user_times(self, start, end):
if start is None: if start is None:
start = nilmdb.utils.time.min_timestamp start = -1e12
if end is None: if end is None:
end = nilmdb.utils.time.max_timestamp end = 1e12
if start >= end: if start >= end:
raise NilmDBError("start must precede end") raise NilmDBError("start must precede end")
return (start, end) return (start, end)
@nilmdb.utils.lru_cache(size=64) @nilmdb.utils.lru_cache(size = 16)
def _get_intervals(self, stream_id): def _get_intervals(self, stream_id):
""" """
Return a mutable IntervalSet corresponding to the given stream ID. Return a mutable IntervalSet corresponding to the given stream ID.
@ -196,7 +157,7 @@ class NilmDB():
iset += DBInterval(start_time, end_time, iset += DBInterval(start_time, end_time,
start_time, end_time, start_time, end_time,
start_pos, end_pos) start_pos, end_pos)
except IntervalError: except IntervalError: # pragma: no cover
raise NilmDBError("unexpected overlap in ranges table!") raise NilmDBError("unexpected overlap in ranges table!")
return iset return iset
@ -223,17 +184,21 @@ class NilmDB():
# Load this stream's intervals # Load this stream's intervals
iset = self._get_intervals(stream_id) iset = self._get_intervals(stream_id)
# Check for overlap
if iset.intersects(interval): # pragma: no cover (gets caught earlier)
raise NilmDBError("new interval overlaps existing data")
# Check for adjacency. If there's a stream in the database # Check for adjacency. If there's a stream in the database
# that ends exactly when this one starts, and the database # that ends exactly when this one starts, and the database
# rows match up, we can make one interval that covers the # rows match up, we can make one interval that covers the
# time range [adjacent.start -> interval.end) # time range [adjacent.start -> interval.end)
# and database rows [ adjacent.start_pos -> end_pos ]. # and database rows [ adjacent.start_pos -> end_pos ].
# Only do this if the resulting interval isn't too large. # Only do this if the resulting interval isn't too large.
max_merged_rows = 8000 * 60 * 60 * 1.05 # 1.05 hours at 8 KHz max_merged_rows = 8000 * 60 * 60 * 1.05 # 1.05 hours at 8 KHz
adjacent = iset.find_end(interval.start) adjacent = iset.find_end(interval.start)
if (adjacent is not None and if (adjacent is not None and
start_pos == adjacent.db_endpos and start_pos == adjacent.db_endpos and
(end_pos - adjacent.db_startpos) < max_merged_rows): (end_pos - adjacent.db_startpos) < max_merged_rows):
# First delete the old one, both from our iset and the # First delete the old one, both from our iset and the
# database # database
iset -= adjacent iset -= adjacent
@ -265,6 +230,10 @@ class NilmDB():
original: original DBInterval; must be already present in DB original: original DBInterval; must be already present in DB
to_remove: DBInterval to remove; must be subset of 'original' to_remove: DBInterval to remove; must be subset of 'original'
""" """
# Just return if we have nothing to remove
if remove.start == remove.end: # pragma: no cover
return
# Load this stream's intervals # Load this stream's intervals
iset = self._get_intervals(stream_id) iset = self._get_intervals(stream_id)
@ -279,8 +248,7 @@ class NilmDB():
# the removed piece was in the middle. # the removed piece was in the middle.
def add(iset, start, end, start_pos, end_pos): def add(iset, start, end, start_pos, end_pos):
iset += DBInterval(start, end, start, end, start_pos, end_pos) iset += DBInterval(start, end, start, end, start_pos, end_pos)
self._sql_interval_insert(stream_id, start, end, self._sql_interval_insert(stream_id, start, end, start_pos, end_pos)
start_pos, end_pos)
if original.start != remove.start: if original.start != remove.start:
# Interval before the removed region # Interval before the removed region
@ -297,7 +265,7 @@ class NilmDB():
return return
def stream_list(self, path=None, layout=None, extended=False): def stream_list(self, path = None, layout = None, extended = False):
"""Return list of lists of all streams in the database. """Return list of lists of all streams in the database.
If path is specified, include only streams with a path that If path is specified, include only streams with a path that
@ -306,24 +274,24 @@ class NilmDB():
If layout is specified, include only streams with a layout If layout is specified, include only streams with a layout
that matches the given string. that matches the given string.
If extended=False, returns a list of lists containing If extended = False, returns a list of lists containing
the path and layout: [ path, layout ] the path and layout: [ path, layout ]
If extended=True, returns a list of lists containing If extended = True, returns a list of lists containing
more information: more information:
path path
layout layout
interval_min (earliest interval start) interval_min (earliest interval start)
interval_max (latest interval end) interval_max (latest interval end)
rows (total number of rows of data) rows (total number of rows of data)
time (total time covered by this stream, in timestamp units) seconds (total time covered by this stream)
""" """
params = () params = ()
query = "SELECT streams.path, streams.layout" query = "SELECT streams.path, streams.layout"
if extended: if extended:
query += ", min(ranges.start_time), max(ranges.end_time) " query += ", min(ranges.start_time), max(ranges.end_time) "
query += ", coalesce(sum(ranges.end_pos - ranges.start_pos), 0) " query += ", sum(ranges.end_pos - ranges.start_pos) "
query += ", coalesce(sum(ranges.end_time - ranges.start_time), 0) " query += ", sum(ranges.end_time - ranges.start_time) "
query += " FROM streams" query += " FROM streams"
if extended: if extended:
query += " LEFT JOIN ranges ON streams.id = ranges.stream_id" query += " LEFT JOIN ranges ON streams.id = ranges.stream_id"
@ -336,47 +304,33 @@ class NilmDB():
params += (path,) params += (path,)
query += " GROUP BY streams.id ORDER BY streams.path" query += " GROUP BY streams.id ORDER BY streams.path"
result = self.con.execute(query, params).fetchall() result = self.con.execute(query, params).fetchall()
return [list(x) for x in result] return [ list(x) for x in result ]
def stream_intervals(self, path, start=None, end=None, diffpath=None): def stream_intervals(self, path, start = None, end = None):
""" """
List all intervals in 'path' between 'start' and 'end'. If
'diffpath' is not none, list instead the set-difference
between the intervals in the two streams; i.e. all interval
ranges that are present in 'path' but not 'diffpath'.
Returns (intervals, restart) tuple. Returns (intervals, restart) tuple.
'intervals' is a list of [start,end] timestamps of all intervals intervals is a list of [start,end] timestamps of all intervals
that exist for path, between start and end. that exist for path, between start and end.
'restart', if not None, means that there were too many results restart, if nonzero, means that there were too many results to
to return in a single request. The data is complete from the return in a single request. The data is complete from the
starting timestamp to the point at which it was truncated, and starting timestamp to the point at which it was truncated,
a new request with a start time of 'restart' will fetch the and a new request with a start time of 'restart' will fetch
next block of data. the next block of data.
""" """
stream_id = self._stream_id(path) stream_id = self._stream_id(path)
intervals = self._get_intervals(stream_id) intervals = self._get_intervals(stream_id)
if diffpath:
diffstream_id = self._stream_id(diffpath)
diffintervals = self._get_intervals(diffstream_id)
(start, end) = self._check_user_times(start, end) (start, end) = self._check_user_times(start, end)
requested = Interval(start, end) requested = Interval(start, end)
result = [] result = []
if diffpath: for n, i in enumerate(intervals.intersection(requested)):
getter = nilmdb.utils.interval.set_difference(
intervals.intersection(requested),
diffintervals.intersection(requested))
else:
getter = intervals.intersection(requested)
for n, i in enumerate(getter):
if n >= self.max_results: if n >= self.max_results:
restart = i.start restart = i.start
break break
result.append([i.start, i.end]) result.append([i.start, i.end])
else: else:
restart = None restart = 0
return (result, restart) return (result, restart)
def stream_create(self, path, layout_name): def stream_create(self, path, layout_name):
@ -410,8 +364,8 @@ class NilmDB():
def stream_set_metadata(self, path, data): def stream_set_metadata(self, path, data):
"""Set stream metadata from a dictionary, e.g. """Set stream metadata from a dictionary, e.g.
{ description: 'Downstairs lighting', { description = 'Downstairs lighting',
v_scaling: 123.45 } v_scaling = 123.45 }
This replaces all existing metadata. This replaces all existing metadata.
""" """
stream_id = self._stream_id(path) stream_id = self._stream_id(path)
@ -439,50 +393,29 @@ class NilmDB():
data.update(newdata) data.update(newdata)
self.stream_set_metadata(path, data) self.stream_set_metadata(path, data)
def stream_rename(self, oldpath, newpath):
"""Rename a stream."""
stream_id = self._stream_id(oldpath)
# Rename the data
self.data.rename(oldpath, newpath)
# Rename the stream in the database
with self.con as con:
con.execute("UPDATE streams SET path=? WHERE id=?",
(newpath, stream_id))
def stream_destroy(self, path): def stream_destroy(self, path):
"""Fully remove a table from the database. Fails if there are """Fully remove a table and all of its data from the database.
any intervals data present; remove them first. Metadata is No way to undo it! Metadata is removed."""
also removed."""
stream_id = self._stream_id(path) stream_id = self._stream_id(path)
# Verify that no intervals are present, and clear the cache # Delete the cached interval data (if it was cached)
iset = self._get_intervals(stream_id)
if iset:
raise NilmDBError("all intervals must be removed before "
"destroying a stream")
self._get_intervals.cache_remove(self, stream_id) self._get_intervals.cache_remove(self, stream_id)
# Delete the bulkdata storage # Delete the data
self.data.destroy(path) self.data.destroy(path)
# Delete metadata, stream, intervals (should be none) # Delete metadata, stream, intervals
with self.con as con: with self.con as con:
con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,)) con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,))
con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,)) con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,))
con.execute("DELETE FROM streams WHERE id=?", (stream_id,)) con.execute("DELETE FROM streams WHERE id=?", (stream_id,))
def stream_insert(self, path, start, end, data, binary=False): def stream_insert(self, path, start, end, data):
"""Insert new data into the database. """Insert new data into the database.
path: Path at which to add the data path: Path at which to add the data
start: Starting timestamp start: Starting timestamp
end: Ending timestamp end: Ending timestamp
data: Textual data, formatted according to the layout of path data: Textual data, formatted according to the layout of path
'binary', if True, means that 'data' is raw binary:
little-endian, matching the current table's layout,
including the int64 timestamp.
""" """
# First check for basic overlap using timestamp info given. # First check for basic overlap using timestamp info given.
stream_id = self._stream_id(path) stream_id = self._stream_id(path)
@ -496,7 +429,7 @@ class NilmDB():
# there are any parse errors. # there are any parse errors.
table = self.data.getnode(path) table = self.data.getnode(path)
row_start = table.nrows row_start = table.nrows
table.append_data(data, start, end, binary) table.append_string(data, start, end)
row_end = table.nrows row_end = table.nrows
# Insert the record into the sql database. # Insert the record into the sql database.
@ -505,17 +438,6 @@ class NilmDB():
# And that's all # And that's all
return return
def _bisect_left(self, a, x, lo, hi):
# Like bisect.bisect_left, but doesn't choke on large indices on
# 32-bit systems, like bisect's fast C implementation does.
while lo < hi:
mid = (lo + hi) // 2
if a[mid] < x:
lo = mid + 1
else:
hi = mid
return lo
def _find_start(self, table, dbinterval): def _find_start(self, table, dbinterval):
""" """
Given a DBInterval, find the row in the database that Given a DBInterval, find the row in the database that
@ -526,10 +448,10 @@ class NilmDB():
# Optimization for the common case where an interval wasn't truncated # Optimization for the common case where an interval wasn't truncated
if dbinterval.start == dbinterval.db_start: if dbinterval.start == dbinterval.db_start:
return dbinterval.db_startpos return dbinterval.db_startpos
return self._bisect_left(table, return bisect.bisect_left(bulkdata.TimestampOnlyTable(table),
dbinterval.start, dbinterval.start,
dbinterval.db_startpos, dbinterval.db_startpos,
dbinterval.db_endpos) dbinterval.db_endpos)
def _find_end(self, table, dbinterval): def _find_end(self, table, dbinterval):
""" """
@ -545,36 +467,28 @@ class NilmDB():
# want to include the given timestamp in the results. This is # want to include the given timestamp in the results. This is
# so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return # so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return
# non-overlapping data. # non-overlapping data.
return self._bisect_left(table, return bisect.bisect_left(bulkdata.TimestampOnlyTable(table),
dbinterval.end, dbinterval.end,
dbinterval.db_startpos, dbinterval.db_startpos,
dbinterval.db_endpos) dbinterval.db_endpos)
def stream_extract(self, path, start=None, end=None, def stream_extract(self, path, start = None, end = None, count = False):
count=False, markup=False, binary=False):
""" """
Returns (data, restart) tuple. Returns (data, restart) tuple.
'data' is ASCII-formatted data from the database, formatted data is ASCII-formatted data from the database, formatted
according to the layout of the stream. according to the layout of the stream.
'restart', if not None, means that there were too many results to restart, if nonzero, means that there were too many results to
return in a single request. The data is complete from the return in a single request. The data is complete from the
starting timestamp to the point at which it was truncated, starting timestamp to the point at which it was truncated,
and a new request with a start time of 'restart' will fetch and a new request with a start time of 'restart' will fetch
the next block of data. the next block of data.
'count', if true, means to not return raw data, but just the count count, if true, means to not return raw data, but just the count
of rows that would have been returned. This is much faster of rows that would have been returned. This is much faster
than actually fetching the data. It is not limited by than actually fetching the data. It is not limited by
max_results. max_results.
'markup', if true, indicates that returned data should be
marked with a comment denoting when a particular interval
starts, and another comment when an interval ends.
'binary', if true, means to return raw binary rather than
ASCII-formatted data.
""" """
stream_id = self._stream_id(path) stream_id = self._stream_id(path)
table = self.data.getnode(path) table = self.data.getnode(path)
@ -584,9 +498,7 @@ class NilmDB():
result = [] result = []
matched = 0 matched = 0
remaining = self.max_results remaining = self.max_results
restart = None restart = 0
if binary and (markup or count):
raise NilmDBError("binary mode can't be used with markup or count")
for interval in intervals.intersection(requested): for interval in intervals.intersection(requested):
# Reading single rows from the table is too slow, so # Reading single rows from the table is too slow, so
# we use two bisections to find both the starting and # we use two bisections to find both the starting and
@ -603,48 +515,27 @@ class NilmDB():
row_max = row_start + remaining row_max = row_start + remaining
if row_max < row_end: if row_max < row_end:
row_end = row_max row_end = row_max
restart = table[row_max] restart = table[row_max][0]
# Add markup
if markup:
result.append(b"# interval-start " +
timestamp_to_bytes(interval.start) + b"\n")
# Gather these results up # Gather these results up
result.append(table.get_data(row_start, row_end, binary)) result.append(table.get_as_text(row_start, row_end))
# Count them # Count them
remaining -= row_end - row_start remaining -= row_end - row_start
# Add markup, and exit if restart is set. if restart:
if restart is not None:
if markup:
result.append(b"# interval-end " +
timestamp_to_bytes(restart) + b"\n")
break break
if markup:
result.append(b"# interval-end " +
timestamp_to_bytes(interval.end) + b"\n")
if count: if count:
return matched return matched
full_result = b"".join(result) return ("".join(result), restart)
return (full_result, restart)
def stream_remove(self, path, start=None, end=None): def stream_remove(self, path, start = None, end = None):
""" """
Remove data from the specified time interval within a stream. Remove data from the specified time interval within a stream.
Removes all data in the interval [start, end), and intervals
Removes data in the interval [start, end), and intervals are are truncated or split appropriately. Returns the number of
truncated or split appropriately. data points removed.
Returns a (removed, restart) tuple.
'removed' is the number of data points that were removed.
'restart', if not None, means there were too many rows to
remove in a single request. This function should be called
again with a start time of 'restart' to complete the removal.
""" """
stream_id = self._stream_id(path) stream_id = self._stream_id(path)
table = self.data.getnode(path) table = self.data.getnode(path)
@ -652,34 +543,16 @@ class NilmDB():
(start, end) = self._check_user_times(start, end) (start, end) = self._check_user_times(start, end)
to_remove = Interval(start, end) to_remove = Interval(start, end)
removed = 0 removed = 0
remaining = self.max_removals
int_remaining = self.max_int_removals
restart = None
# Can't remove intervals from within the iterator, so we need to # Can't remove intervals from within the iterator, so we need to
# remember what's currently in the intersection now. # remember what's currently in the intersection now.
all_candidates = list(intervals.intersection(to_remove, orig=True)) all_candidates = list(intervals.intersection(to_remove, orig = True))
remove_start = None
remove_end = None
for (dbint, orig) in all_candidates: for (dbint, orig) in all_candidates:
# Stop if we've hit the max number of interval removals
if int_remaining <= 0:
restart = dbint.start
break
# Find row start and end # Find row start and end
row_start = self._find_start(table, dbint) row_start = self._find_start(table, dbint)
row_end = self._find_end(table, dbint) row_end = self._find_end(table, dbint)
# Shorten it if we'll hit the maximum number of removals
row_max = row_start + remaining
if row_max < row_end:
row_end = row_max
dbint.end = table[row_max]
restart = dbint.end
# Adjust the DBInterval to match the newly found ends # Adjust the DBInterval to match the newly found ends
dbint.db_start = dbint.start dbint.db_start = dbint.start
dbint.db_end = dbint.end dbint.db_end = dbint.end
@ -689,29 +562,10 @@ class NilmDB():
# Remove interval from the database # Remove interval from the database
self._remove_interval(stream_id, orig, dbint) self._remove_interval(stream_id, orig, dbint)
# Remove data from the underlying table storage, # Remove data from the underlying table storage
# coalescing adjacent removals to reduce the number of calls table.remove(row_start, row_end)
# to table.remove.
if remove_end == row_start:
# Extend our coalesced region
remove_end = row_end
else:
# Perform previous removal, then save this one
if remove_end is not None:
table.remove(remove_start, remove_end)
remove_start = row_start
remove_end = row_end
# Count how many were removed # Count how many were removed
removed += row_end - row_start removed += row_end - row_start
remaining -= row_end - row_start
int_remaining -= 1
if restart is not None: return removed
break
# Perform any final coalesced removal
if remove_end is not None:
table.remove(remove_start, remove_end)
return (removed, restart)

143
nilmdb/server/pyrocket.py Normal file
View File

@ -0,0 +1,143 @@
# Python implementation of the "rocket" data parsing interface.
# This interface translates between the binary format on disk
# and the ASCII format used when communicating with clients.
# This is slow! Use the C version instead.
from __future__ import absolute_import
import struct
import cStringIO
import itertools
from . import layout as _layout
import nilmdb.utils
from nilmdb.utils.time import float_time_to_string as ftts
ERR_UNKNOWN = 0
ERR_NON_MONOTONIC = 1
ERR_OUT_OF_INTERVAL = 2
class ParseError(Exception):
pass
@nilmdb.utils.must_close(wrap_verify = False)
class Rocket(object):
def __init__(self, layout, filename):
self.layout = layout
if filename:
self.file = open(filename, "a+b")
else:
self.file = None
# For packing/unpacking into a binary file.
# This will change in the C version
try:
(self.ltype, lcount) = layout.split('_', 2)
self.lcount = int(lcount)
except:
raise ValueError("no such layout: badly formatted string")
if self.lcount < 1:
raise ValueError("no such layout: bad count")
try:
struct_fmt = '<d' # Little endian, double timestamp
struct_mapping = {
"int8": 'b',
"uint8": 'B',
"int16": 'h',
"uint16": 'H',
"int32": 'i',
"uint32": 'I',
"int64": 'q',
"uint64": 'Q',
"float32": 'f',
"float64": 'd',
}
struct_fmt += struct_mapping[self.ltype] * self.lcount
except KeyError:
raise ValueError("no such layout: bad data type")
self.packer = struct.Struct(struct_fmt)
# For packing/unpacking from strings.
self.layoutparser = _layout.Layout(self.layout)
self.formatter = _layout.Formatter(self.layout)
def close(self):
if self.file:
self.file.close()
@property
def binary_size(self):
"""Return size of one row of data in the binary file, in bytes"""
return self.packer.size
def append_iter(self, maxrows, data):
"""Append the list data to the file"""
# We assume the file is opened in append mode,
# so all writes go to the end.
written = 0
for row in itertools.islice(data, maxrows):
self.file.write(self.packer.pack(*row))
written += 1
self.file.flush()
return written
def append_string(self, count, data, data_offset, linenum,
start, end, last_timestamp):
"""Parse string and append data.
count: maximum number of rows to add
data: string data
data_offset: byte offset into data to start parsing
linenum: current line number of data
start: starting timestamp for interval
end: end timestamp for interval
last_timestamp: last timestamp that was previously parsed
Raises ParseError if timestamps are non-monotonic, outside the
start/end interval, etc.
On success, return a tuple with three values:
added_rows: how many rows were added from the file
data_offset: current offset into the data string
last_timestamp: last timestamp we parsed
"""
# Parse the input data
indata = cStringIO.StringIO(data)
indata.seek(data_offset)
written = 0
while written < count:
line = indata.readline()
linenum += 1
if line == "":
break
comment = line.find('#')
if comment >= 0:
line = line.split('#', 1)[0]
line = line.strip()
if line == "":
continue
try:
(ts, row) = self.layoutparser.parse(line)
except ValueError as e:
raise ParseError(linenum, ERR_UNKNOWN, e)
if ts <= last_timestamp:
raise ParseError(linenum, ERR_NON_MONOTONIC, ts)
last_timestamp = ts
if ts < start or ts >= end:
raise ParseError(linenum, ERR_OUT_OF_INTERVAL, ts)
self.append_iter(1, [row])
written += 1
return (written, indata.tell(), last_timestamp, linenum)
def extract_list(self, offset, count):
"""Extract count rows of data from the file at offset offset.
Return a list of lists [[row],[row],...]"""
ret = []
self.file.seek(offset)
for i in xrange(count):
data = self.file.read(self.binary_size)
ret.append(list(self.packer.unpack(data)))
return ret
def extract_string(self, offset, count):
"""Extract count rows of data from the file at offset offset.
Return an ascii formatted string according to the layout"""
return self.formatter.format(self.extract_list(offset, count))

View File

@ -1,5 +1,3 @@
# cython: language_level=2
cdef class RBNode: cdef class RBNode:
cdef public object obj cdef public object obj
cdef public double start, end cdef public double start, end

View File

@ -1,6 +1,5 @@
# cython: profile=False # cython: profile=False
# cython: cdivision=True # cython: cdivision=True
# cython: language_level=2
""" """
Jim Paris <jim@jtan.com> Jim Paris <jim@jtan.com>

View File

@ -2,12 +2,8 @@
#include <structmember.h> #include <structmember.h>
#include <endian.h> #include <endian.h>
#include <ctype.h>
#include <stdint.h> #include <stdint.h>
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
/* Values missing from stdint.h */ /* Values missing from stdint.h */
#define UINT8_MIN 0 #define UINT8_MIN 0
#define UINT16_MIN 0 #define UINT16_MIN 0
@ -20,11 +16,9 @@
#define FLOAT64_MIN 0 #define FLOAT64_MIN 0
#define FLOAT64_MAX 0 #define FLOAT64_MAX 0
typedef int64_t timestamp_t;
/* Somewhat arbitrary, just so we can use fixed sizes for strings /* Somewhat arbitrary, just so we can use fixed sizes for strings
etc. */ etc. */
static const int MAX_LAYOUT_COUNT = 1024; static const int MAX_LAYOUT_COUNT = 64;
/* Error object and constants */ /* Error object and constants */
static PyObject *ParseError; static PyObject *ParseError;
@ -41,20 +35,20 @@ static void add_parseerror_codes(PyObject *module)
} }
/* Helpers to raise ParseErrors. Use "return raise_str(...)" etc. */ /* Helpers to raise ParseErrors. Use "return raise_str(...)" etc. */
static PyObject *raise_str(int line, int col, int code, const char *string) static PyObject *raise_str(int linenum, int code, const char *string)
{ {
PyObject *o; PyObject *o;
o = Py_BuildValue("(iiis)", line, col, code, string); o = Py_BuildValue("(iis)", linenum, code, string);
if (o != NULL) { if (o != NULL) {
PyErr_SetObject(ParseError, o); PyErr_SetObject(ParseError, o);
Py_DECREF(o); Py_DECREF(o);
} }
return NULL; return NULL;
} }
static PyObject *raise_int(int line, int col, int code, int64_t num) static PyObject *raise_num(int linenum, int code, double num)
{ {
PyObject *o; PyObject *o;
o = Py_BuildValue("(iiiL)", line, col, code, (long long)num); o = Py_BuildValue("(iid)", linenum, code, num);
if (o != NULL) { if (o != NULL) {
PyErr_SetObject(ParseError, o); PyErr_SetObject(ParseError, o);
Py_DECREF(o); Py_DECREF(o);
@ -138,7 +132,7 @@ static void Rocket_dealloc(Rocket *self)
fclose(self->file); fclose(self->file);
self->file = NULL; self->file = NULL;
} }
Py_TYPE(self)->tp_free((PyObject *)self); self->ob_type->tp_free((PyObject *)self);
} }
static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
@ -160,19 +154,13 @@ static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds) static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
{ {
const char *layout, *path; const char *layout, *path;
int pathlen;
static char *kwlist[] = { "layout", "file", NULL }; static char *kwlist[] = { "layout", "file", NULL };
if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz#", kwlist, if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz", kwlist,
&layout, &path, &pathlen)) &layout, &path))
return -1; return -1;
if (!layout) if (!layout)
return -1; return -1;
if (path) { if (path) {
if (strlen(path) != (size_t)pathlen) {
PyErr_SetString(PyExc_ValueError, "path must not "
"contain NUL characters");
return -1;
}
if ((self->file = fopen(path, "a+b")) == NULL) { if ((self->file = fopen(path, "a+b")) == NULL) {
PyErr_SetFromErrno(PyExc_OSError); PyErr_SetFromErrno(PyExc_OSError);
return -1; return -1;
@ -245,17 +233,117 @@ static PyObject *Rocket_get_file_size(Rocket *self)
return NULL; return NULL;
} }
} }
return PyLong_FromLong(self->file_size); return PyInt_FromLong(self->file_size);
}
/****
* Append from iterator
*/
/* Helper for writing Python objects to the file */
static inline void append_pyobject(FILE *out, PyObject *val, layout_type_t type)
{
union8_t t8;
union16_t t16;
union32_t t32;
union64_t t64;
int ret = 0;
switch (type) {
#define CASE(type, pyconvert, pytype, disktype, htole, bytes) \
case LAYOUT_TYPE_##type: \
pytype = pyconvert(val); \
if (PyErr_Occurred()) \
return; \
disktype = htole(disktype); \
ret = fwrite(&disktype, bytes, 1, out); \
break
CASE(INT8, PyInt_AsLong, t8.i, t8.u, , 1);
CASE(UINT8, PyInt_AsLong, t8.u, t8.u, , 1);
CASE(INT16, PyInt_AsLong, t16.i, t16.u, htole16, 2);
CASE(UINT16, PyInt_AsLong, t16.u, t16.u, htole16, 2);
CASE(INT32, PyInt_AsLong, t32.i, t32.u, htole32, 4);
CASE(UINT32, PyInt_AsLong, t32.u, t32.u, htole32, 4);
CASE(INT64, PyInt_AsLong, t64.i, t64.u, htole64, 8);
CASE(UINT64, PyInt_AsLong, t64.u, t64.u, htole64, 8);
CASE(FLOAT32, PyFloat_AsDouble, t32.f, t32.u, htole32, 4);
CASE(FLOAT64, PyFloat_AsDouble, t64.d, t64.u, htole64, 8);
#undef CASE
default:
PyErr_SetString(PyExc_TypeError, "unknown type");
return;
}
if (ret <= 0) {
PyErr_SetFromErrno(PyExc_OSError);
}
}
/* .append_iter(maxrows, dataiter) */
static PyObject *Rocket_append_iter(Rocket *self, PyObject *args)
{
int maxrows;
PyObject *iter;
PyObject *rowlist;
if (!PyArg_ParseTuple(args, "iO:append_iter", &maxrows, &iter))
return NULL;
if (!PyIter_Check(iter)) {
PyErr_SetString(PyExc_TypeError, "need an iterable");
return NULL;
}
if (!self->file) {
PyErr_SetString(PyExc_Exception, "no file");
return NULL;
}
/* Mark file size so that it will get updated next time it's read */
self->file_size = -1;
int row;
for (row = 0; row < maxrows; row++) {
rowlist = PyIter_Next(iter);
if (!rowlist)
break;
if (!PyList_Check(rowlist)) {
PyErr_SetString(PyExc_TypeError, "rows must be lists");
goto row_err;
}
if (PyList_Size(rowlist) != self->layout_count + 1) {
PyErr_SetString(PyExc_TypeError, "short row");
goto row_err;
}
/* Extract and write timestamp */
append_pyobject(self->file, PyList_GetItem(rowlist, 0),
LAYOUT_TYPE_FLOAT64);
if (PyErr_Occurred())
goto row_err;
/* Extract and write values */
int i;
for (i = 0; i < self->layout_count; i++) {
append_pyobject(self->file,
PyList_GetItem(rowlist, i+1),
self->layout_type);
if (PyErr_Occurred())
goto row_err;
}
}
fflush(self->file);
/* All done */
return PyLong_FromLong(row);
row_err:
fflush(self->file);
Py_DECREF(rowlist);
return NULL;
} }
/**** /****
* Append from string * Append from string
*/ */
static inline long int strtoll10(const char *nptr, char **endptr) { static inline long int strtol10(const char *nptr, char **endptr) {
return strtoll(nptr, endptr, 10); return strtol(nptr, endptr, 10);
} }
static inline long int strtoull10(const char *nptr, char **endptr) { static inline long int strtoul10(const char *nptr, char **endptr) {
return strtoull(nptr, endptr, 10); return strtoul(nptr, endptr, 10);
} }
/* .append_string(count, data, offset, linenum, start, end, last_timestamp) */ /* .append_string(count, data, offset, linenum, start, end, last_timestamp) */
@ -264,12 +352,10 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
int count; int count;
const char *data; const char *data;
int offset; int offset;
const char *linestart;
int linenum; int linenum;
long long ll1, ll2, ll3; double start;
timestamp_t start; double end;
timestamp_t end; double last_timestamp;
timestamp_t last_timestamp;
int written = 0; int written = 0;
char *endptr; char *endptr;
@ -279,32 +365,23 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
union64_t t64; union64_t t64;
int i; int i;
/* Input data is bytes. Using 'y#' instead of 'y' might be /* It would be nice to use 't#' instead of 's' for data,
preferable, but strto* requires the null terminator. */ but we need the null termination for strto*. If we had
if (!PyArg_ParseTuple(args, "iyiiLLL:append_string", &count, strnto* that took a length, we could use t# and not require
a copy. */
if (!PyArg_ParseTuple(args, "isiiddd:append_string", &count,
&data, &offset, &linenum, &data, &offset, &linenum,
&ll1, &ll2, &ll3)) &start, &end, &last_timestamp))
return NULL; return NULL;
start = ll1;
end = ll2;
last_timestamp = ll3;
/* Skip spaces, but don't skip over a newline. */
#define SKIP_BLANK(buf) do { \
while (isspace(*buf)) { \
if (*buf == '\n') \
break; \
buf++; \
} } while(0)
const char *buf = &data[offset]; const char *buf = &data[offset];
while (written < count && *buf) while (written < count && *buf)
{ {
linestart = buf;
linenum++; linenum++;
/* Skip leading whitespace and commented lines */ /* Skip leading whitespace and commented lines */
SKIP_BLANK(buf); while (*buf == ' ' || *buf == '\t')
buf++;
if (*buf == '#') { if (*buf == '#') {
while (*buf && *buf != '\n') while (*buf && *buf != '\n')
buf++; buf++;
@ -314,23 +391,14 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
} }
/* Extract timestamp */ /* Extract timestamp */
t64.i = strtoll(buf, &endptr, 10); t64.d = strtod(buf, &endptr);
if (endptr == buf || !isspace(*endptr)) { if (endptr == buf)
/* Try parsing as a double instead */ return raise_str(linenum, ERR_OTHER, "bad timestamp");
t64.d = strtod(buf, &endptr); if (t64.d <= last_timestamp)
if (endptr == buf) return raise_num(linenum, ERR_NON_MONOTONIC, t64.d);
goto bad_timestamp; last_timestamp = t64.d;
if (!isspace(*endptr)) if (t64.d < start || t64.d >= end)
goto cant_parse_value; return raise_num(linenum, ERR_OUT_OF_INTERVAL, t64.d);
t64.i = round(t64.d);
}
if (t64.i <= last_timestamp)
return raise_int(linenum, buf - linestart + 1,
ERR_NON_MONOTONIC, t64.i);
last_timestamp = t64.i;
if (t64.i < start || t64.i >= end)
return raise_int(linenum, buf - linestart + 1,
ERR_OUT_OF_INTERVAL, t64.i);
t64.u = le64toh(t64.u); t64.u = le64toh(t64.u);
if (fwrite(&t64.u, 8, 1, self->file) != 1) if (fwrite(&t64.u, 8, 1, self->file) != 1)
goto err; goto err;
@ -342,31 +410,23 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
case LAYOUT_TYPE_##type: \ case LAYOUT_TYPE_##type: \
/* parse and write in a loop */ \ /* parse and write in a loop */ \
for (i = 0; i < self->layout_count; i++) { \ for (i = 0; i < self->layout_count; i++) { \
/* skip non-newlines */ \
SKIP_BLANK(buf); \
if (*buf == '\n') \
goto wrong_number_of_values; \
/* parse number */ \
parsetype = parsefunc(buf, &endptr); \ parsetype = parsefunc(buf, &endptr); \
if (*endptr && !isspace(*endptr)) \ if (endptr == buf) \
goto cant_parse_value; \ goto wrong_number_of_values; \
/* check limits */ \
if (type##_MIN != type##_MAX && \ if (type##_MIN != type##_MAX && \
(parsetype < type##_MIN || \ (parsetype < type##_MIN || \
parsetype > type##_MAX)) \ parsetype > type##_MAX)) \
goto value_out_of_range; \ goto value_out_of_range; \
/* convert to disk representation */ \
realtype = parsetype; \ realtype = parsetype; \
disktype = letoh(disktype); \ disktype = letoh(disktype); \
/* write it */ \
if (fwrite(&disktype, bytes, \ if (fwrite(&disktype, bytes, \
1, self->file) != 1) \ 1, self->file) != 1) \
goto err; \ goto err; \
/* advance buf */ \
buf = endptr; \ buf = endptr; \
} \ } \
/* Skip trailing whitespace and comments */ \ /* Skip trailing whitespace and comments */ \
SKIP_BLANK(buf); \ while (*buf == ' ' || *buf == '\t') \
buf++; \
if (*buf == '#') \ if (*buf == '#') \
while (*buf && *buf != '\n') \ while (*buf && *buf != '\n') \
buf++; \ buf++; \
@ -376,14 +436,14 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
goto extra_data_on_line; \ goto extra_data_on_line; \
break break
CS(INT8, strtoll10, t64.i, t8.i, t8.u, , 1); CS(INT8, strtol10, t64.i, t8.i, t8.u, , 1);
CS(UINT8, strtoull10, t64.u, t8.u, t8.u, , 1); CS(UINT8, strtoul10, t64.u, t8.u, t8.u, , 1);
CS(INT16, strtoll10, t64.i, t16.i, t16.u, le16toh, 2); CS(INT16, strtol10, t64.i, t16.i, t16.u, le16toh, 2);
CS(UINT16, strtoull10, t64.u, t16.u, t16.u, le16toh, 2); CS(UINT16, strtoul10, t64.u, t16.u, t16.u, le16toh, 2);
CS(INT32, strtoll10, t64.i, t32.i, t32.u, le32toh, 4); CS(INT32, strtol10, t64.i, t32.i, t32.u, le32toh, 4);
CS(UINT32, strtoull10, t64.u, t32.u, t32.u, le32toh, 4); CS(UINT32, strtoul10, t64.u, t32.u, t32.u, le32toh, 4);
CS(INT64, strtoll10, t64.i, t64.i, t64.u, le64toh, 8); CS(INT64, strtol10, t64.i, t64.i, t64.u, le64toh, 8);
CS(UINT64, strtoull10, t64.u, t64.u, t64.u, le64toh, 8); CS(UINT64, strtoul10, t64.u, t64.u, t64.u, le64toh, 8);
CS(FLOAT32, strtod, t64.d, t32.f, t32.u, le32toh, 4); CS(FLOAT32, strtod, t64.d, t32.f, t32.u, le32toh, 4);
CS(FLOAT64, strtod, t64.d, t64.d, t64.u, le64toh, 8); CS(FLOAT64, strtod, t64.d, t64.d, t64.u, le64toh, 8);
#undef CS #undef CS
@ -398,118 +458,141 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
fflush(self->file); fflush(self->file);
/* Build return value and return */ /* Build return value and return*/
offset = buf - data; offset = buf - data;
PyObject *o; PyObject *o;
o = Py_BuildValue("(iiLi)", written, offset, o = Py_BuildValue("(iidi)", written, offset, last_timestamp, linenum);
(long long)last_timestamp, linenum);
return o; return o;
err: err:
PyErr_SetFromErrno(PyExc_OSError); PyErr_SetFromErrno(PyExc_OSError);
return NULL; return NULL;
bad_timestamp:
return raise_str(linenum, buf - linestart + 1,
ERR_OTHER, "bad timestamp");
cant_parse_value:
return raise_str(linenum, buf - linestart + 1,
ERR_OTHER, "can't parse value");
wrong_number_of_values: wrong_number_of_values:
return raise_str(linenum, buf - linestart + 1, return raise_str(linenum, ERR_OTHER, "wrong number of values");
ERR_OTHER, "wrong number of values");
value_out_of_range: value_out_of_range:
return raise_str(linenum, buf - linestart + 1, return raise_str(linenum, ERR_OTHER, "value out of range");
ERR_OTHER, "value out of range");
extra_data_on_line: extra_data_on_line:
return raise_str(linenum, buf - linestart + 1, return raise_str(linenum, ERR_OTHER, "extra data on line");
ERR_OTHER, "extra data on line");
} }
/**** /****
* Append from binary data * Extract to Python list
*/ */
/* .append_binary(count, data, offset, linenum, start, end, last_timestamp) */ static int _extract_handle_params(Rocket *self, PyObject *args, long *count)
static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
{ {
int count; long offset;
const uint8_t *data; if (!PyArg_ParseTuple(args, "ll", &offset, count))
int data_len; return -1;
int linenum; if (!self->file) {
int offset; PyErr_SetString(PyExc_Exception, "no file");
long long ll1, ll2, ll3; return -1;
timestamp_t start; }
timestamp_t end; /* Seek to target location */
timestamp_t last_timestamp; if (fseek(self->file, offset, SEEK_SET) < 0) {
PyErr_SetFromErrno(PyExc_OSError);
return -1;
}
return 0;
}
if (!PyArg_ParseTuple(args, "iy#iiLLL:append_binary", /* Helper for extracting data from a file as a Python object */
&count, &data, &data_len, &offset, static inline void *extract_pyobject(FILE *in, layout_type_t type)
&linenum, &ll1, &ll2, &ll3)) {
union8_t t8;
union16_t t16;
union32_t t32;
union64_t t64;
switch (type) {
#define CASE(type, pyconvert, pytype, disktype, letoh, bytes) \
case LAYOUT_TYPE_##type: \
if (fread(&disktype, bytes, 1, in) <= 0) \
break; \
disktype = letoh(disktype); \
return pyconvert(pytype); \
break
CASE(INT8, PyInt_FromLong, t8.i, t8.u, , 1);
CASE(UINT8, PyInt_FromLong, t8.u, t8.u, , 1);
CASE(INT16, PyInt_FromLong, t16.i, t16.u, le16toh, 2);
CASE(UINT16, PyInt_FromLong, t16.u, t16.u, le16toh, 2);
CASE(INT32, PyInt_FromLong, t32.i, t32.u, le32toh, 4);
CASE(UINT32, PyInt_FromLong, t32.u, t32.u, le32toh, 4);
CASE(INT64, PyInt_FromLong, t64.i, t64.u, le64toh, 8);
CASE(UINT64, PyInt_FromLong, t64.u, t64.u, le64toh, 8);
CASE(FLOAT32, PyFloat_FromDouble, t32.f, t32.u, le32toh, 4);
CASE(FLOAT64, PyFloat_FromDouble, t64.d, t64.u, le64toh, 8);
#undef CASE
default:
PyErr_SetString(PyExc_TypeError, "unknown type");
return NULL; return NULL;
start = ll1; }
end = ll2; PyErr_SetString(PyExc_OSError, "failed to read from file");
last_timestamp = ll3; return NULL;
}
/* Advance to offset */ static PyObject *Rocket_extract_list(Rocket *self, PyObject *args)
if (offset > data_len) {
return raise_str(0, 0, ERR_OTHER, "bad offset"); long count;
data += offset; if (_extract_handle_params(self, args, &count) < 0)
data_len -= offset; return NULL;
/* Figure out max number of rows to insert */ /* Make a list to return */
int rows = data_len / self->binary_size; PyObject *retlist = PyList_New(0);
if (rows > count) if (!retlist)
rows = count; return NULL;
/* Check timestamps */ /* Read data into new Python lists */
timestamp_t ts; int row;
int i; for (row = 0; row < count; row++)
for (i = 0; i < rows; i++) { {
/* Read raw timestamp, byteswap if needed */ PyObject *rowlist = PyList_New(self->layout_count + 1);
memcpy(&ts, &data[i * self->binary_size], 8); if (!rowlist) {
ts = le64toh(ts); Py_DECREF(retlist);
return NULL;
}
/* Check limits */ /* Timestamp */
if (ts <= last_timestamp) PyObject *entry = extract_pyobject(self->file,
return raise_int(i, 0, ERR_NON_MONOTONIC, ts); LAYOUT_TYPE_FLOAT64);
last_timestamp = ts; if (!entry || (PyList_SetItem(rowlist, 0, entry) < 0)) {
if (ts < start || ts >= end) Py_DECREF(rowlist);
return raise_int(i, 0, ERR_OUT_OF_INTERVAL, ts); Py_DECREF(retlist);
} return NULL;
}
/* Write binary data */ /* Data */
if (fwrite(data, self->binary_size, rows, self->file) != (size_t)rows) { int i;
PyErr_SetFromErrno(PyExc_OSError); for (i = 0; i < self->layout_count; i++) {
return NULL; PyObject *ent = extract_pyobject(self->file,
} self->layout_type);
fflush(self->file); if (!ent || (PyList_SetItem(rowlist, i+1, ent) < 0)) {
Py_DECREF(rowlist);
Py_DECREF(retlist);
return NULL;
}
}
/* Build return value and return */ /* Add row to return value */
PyObject *o; if (PyList_Append(retlist, rowlist) < 0) {
o = Py_BuildValue("(iiLi)", rows, offset + rows * self->binary_size, Py_DECREF(rowlist);
(long long)last_timestamp, linenum); Py_DECREF(retlist);
return o; return NULL;
}
Py_DECREF(rowlist);
}
return retlist;
} }
/**** /****
* Extract to binary bytes object containing ASCII text-formatted data * Extract to string
*/ */
static PyObject *Rocket_extract_string(Rocket *self, PyObject *args) static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
{ {
long count; long count;
long offset; if (_extract_handle_params(self, args, &count) < 0)
if (!PyArg_ParseTuple(args, "ll", &offset, &count))
return NULL; return NULL;
if (!self->file) {
PyErr_SetString(PyExc_Exception, "no file");
return NULL;
}
/* Seek to target location */
if (fseek(self->file, offset, SEEK_SET) < 0) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
}
char *str = NULL, *new; char *str = NULL, *new;
long len_alloc = 0; long len_alloc = 0;
@ -543,7 +626,8 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
if (fread(&t64.u, 8, 1, self->file) != 1) if (fread(&t64.u, 8, 1, self->file) != 1)
goto err; goto err;
t64.u = le64toh(t64.u); t64.u = le64toh(t64.u);
ret = sprintf(&str[len], "%" PRId64, t64.i); /* Timestamps are always printed to the microsecond */
ret = sprintf(&str[len], "%.6f", t64.d);
if (ret <= 0) if (ret <= 0)
goto err; goto err;
len += ret; len += ret;
@ -555,7 +639,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
/* read and format in a loop */ \ /* read and format in a loop */ \
for (i = 0; i < self->layout_count; i++) { \ for (i = 0; i < self->layout_count; i++) { \
if (fread(&disktype, bytes, \ if (fread(&disktype, bytes, \
1, self->file) != 1) \ 1, self->file) < 0) \
goto err; \ goto err; \
disktype = letoh(disktype); \ disktype = letoh(disktype); \
ret = sprintf(&str[len], " " fmt, \ ret = sprintf(&str[len], " " fmt, \
@ -565,14 +649,14 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
len += ret; \ len += ret; \
} \ } \
break break
CASE(INT8, "%" PRId8, t8.i, t8.u, , 1); CASE(INT8, "%hhd", t8.i, t8.u, , 1);
CASE(UINT8, "%" PRIu8, t8.u, t8.u, , 1); CASE(UINT8, "%hhu", t8.u, t8.u, , 1);
CASE(INT16, "%" PRId16, t16.i, t16.u, le16toh, 2); CASE(INT16, "%hd", t16.i, t16.u, le16toh, 2);
CASE(UINT16, "%" PRIu16, t16.u, t16.u, le16toh, 2); CASE(UINT16, "%hu", t16.u, t16.u, le16toh, 2);
CASE(INT32, "%" PRId32, t32.i, t32.u, le32toh, 4); CASE(INT32, "%d", t32.i, t32.u, le32toh, 4);
CASE(UINT32, "%" PRIu32, t32.u, t32.u, le32toh, 4); CASE(UINT32, "%u", t32.u, t32.u, le32toh, 4);
CASE(INT64, "%" PRId64, t64.i, t64.u, le64toh, 8); CASE(INT64, "%ld", t64.i, t64.u, le64toh, 8);
CASE(UINT64, "%" PRIu64, t64.u, t64.u, le64toh, 8); CASE(UINT64, "%lu", t64.u, t64.u, le64toh, 8);
/* These next two are a bit debatable. floats /* These next two are a bit debatable. floats
are 6-9 significant figures, so we print 7. are 6-9 significant figures, so we print 7.
Doubles are 15-19, so we print 17. This is Doubles are 15-19, so we print 17. This is
@ -589,7 +673,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
str[len++] = '\n'; str[len++] = '\n';
} }
PyObject *pystr = PyBytes_FromStringAndSize(str, len); PyObject *pystr = PyString_FromStringAndSize(str, len);
free(str); free(str);
return pystr; return pystr;
err: err:
@ -598,73 +682,6 @@ err:
return NULL; return NULL;
} }
/****
* Extract to binary bytes object containing raw little-endian binary data
*/
static PyObject *Rocket_extract_binary(Rocket *self, PyObject *args)
{
long count;
long offset;
if (!PyArg_ParseTuple(args, "ll", &offset, &count))
return NULL;
if (!self->file) {
PyErr_SetString(PyExc_Exception, "no file");
return NULL;
}
/* Seek to target location */
if (fseek(self->file, offset, SEEK_SET) < 0) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
}
uint8_t *str;
int len = count * self->binary_size;
str = malloc(len);
if (str == NULL) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
}
/* Data in the file is already in the desired little-endian
binary format, so just read it directly. */
if (fread(str, self->binary_size, count, self->file) != (size_t)count) {
free(str);
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
}
PyObject *pystr = PyBytes_FromStringAndSize((char *)str, len);
free(str);
return pystr;
}
/****
* Extract timestamp
*/
static PyObject *Rocket_extract_timestamp(Rocket *self, PyObject *args)
{
long offset;
union64_t t64;
if (!PyArg_ParseTuple(args, "l", &offset))
return NULL;
if (!self->file) {
PyErr_SetString(PyExc_Exception, "no file");
return NULL;
}
/* Seek to target location and read timestamp */
if ((fseek(self->file, offset, SEEK_SET) < 0) ||
(fread(&t64.u, 8, 1, self->file) != 1)) {
PyErr_SetFromErrno(PyExc_OSError);
return NULL;
}
/* Convert and return */
t64.u = le64toh(t64.u);
return Py_BuildValue("L", (long long)t64.i);
}
/**** /****
* Module and type setup * Module and type setup
*/ */
@ -682,13 +699,15 @@ static PyMemberDef Rocket_members[] = {
}; };
static PyMethodDef Rocket_methods[] = { static PyMethodDef Rocket_methods[] = {
{ "close", { "close", (PyCFunction)Rocket_close, METH_NOARGS,
(PyCFunction)Rocket_close, METH_NOARGS,
"close(self)\n\n" "close(self)\n\n"
"Close file handle" }, "Close file handle" },
{ "append_string", { "append_iter", (PyCFunction)Rocket_append_iter, METH_VARARGS,
(PyCFunction)Rocket_append_string, METH_VARARGS, "append_iter(self, maxrows, iterable)\n\n"
"Append up to maxrows of data from iter to the file" },
{ "append_string", (PyCFunction)Rocket_append_string, METH_VARARGS,
"append_string(self, count, data, offset, line, start, end, ts)\n\n" "append_string(self, count, data, offset, line, start, end, ts)\n\n"
"Parse string and append data.\n" "Parse string and append data.\n"
"\n" "\n"
@ -703,56 +722,26 @@ static PyMethodDef Rocket_methods[] = {
"Raises ParseError if timestamps are non-monotonic, outside\n" "Raises ParseError if timestamps are non-monotonic, outside\n"
"the start/end interval etc.\n" "the start/end interval etc.\n"
"\n" "\n"
"On success, return a tuple:\n" "On success, return a tuple with three values:\n"
" added_rows: how many rows were added from the file\n" " added_rows: how many rows were added from the file\n"
" data_offset: current offset into the data string\n" " data_offset: current offset into the data string\n"
" last_timestamp: last timestamp we parsed\n" " last_timestamp: last timestamp we parsed" },
" linenum: current line number" },
{ "append_binary", { "extract_list", (PyCFunction)Rocket_extract_list, METH_VARARGS,
(PyCFunction)Rocket_append_binary, METH_VARARGS, "extract_list(self, offset, count)\n\n"
"append_binary(self, count, data, offset, line, start, end, ts)\n\n" "Extract count rows of data from the file at offset offset.\n"
"Append binary data, which must match the data layout.\n" "Return a list of lists [[row],[row],...]" },
"\n"
" count: maximum number of rows to add\n"
" data: binary data\n"
" offset: byte offset into data to start adding\n"
" line: current line number (unused)\n"
" start: starting timestamp for interval\n"
" end: end timestamp for interval\n"
" ts: last timestamp that was previously parsed\n"
"\n"
"Raises ParseError if timestamps are non-monotonic, outside\n"
"the start/end interval etc.\n"
"\n"
"On success, return a tuple:\n"
" added_rows: how many rows were added from the file\n"
" data_offset: current offset into the data string\n"
" last_timestamp: last timestamp we parsed\n"
" linenum: current line number (copied from argument)" },
{ "extract_string", { "extract_string", (PyCFunction)Rocket_extract_string, METH_VARARGS,
(PyCFunction)Rocket_extract_string, METH_VARARGS,
"extract_string(self, offset, count)\n\n" "extract_string(self, offset, count)\n\n"
"Extract count rows of data from the file at offset offset.\n" "Extract count rows of data from the file at offset offset.\n"
"Return an ascii formatted string according to the layout" }, "Return an ascii formatted string according to the layout" },
{ "extract_binary",
(PyCFunction)Rocket_extract_binary, METH_VARARGS,
"extract_binary(self, offset, count)\n\n"
"Extract count rows of data from the file at offset offset.\n"
"Return a raw binary string of data matching the data layout." },
{ "extract_timestamp",
(PyCFunction)Rocket_extract_timestamp, METH_VARARGS,
"extract_timestamp(self, offset)\n\n"
"Extract a single timestamp from the file" },
{ NULL }, { NULL },
}; };
static PyTypeObject RocketType = { static PyTypeObject RocketType = {
PyVarObject_HEAD_INIT(NULL, 0) PyObject_HEAD_INIT(NULL)
.tp_name = "rocket.Rocket", .tp_name = "rocket.Rocket",
.tp_basicsize = sizeof(Rocket), .tp_basicsize = sizeof(Rocket),
@ -777,23 +766,17 @@ static PyMethodDef module_methods[] = {
{ NULL }, { NULL },
}; };
static struct PyModuleDef moduledef = { PyMODINIT_FUNC
PyModuleDef_HEAD_INIT, initrocket(void)
.m_name = "rocker",
.m_doc = "Rocket data parsing and formatting module",
.m_size = -1,
.m_methods = module_methods,
};
PyMODINIT_FUNC PyInit_rocket(void)
{ {
PyObject *module; PyObject *module;
RocketType.tp_new = PyType_GenericNew; RocketType.tp_new = PyType_GenericNew;
if (PyType_Ready(&RocketType) < 0) if (PyType_Ready(&RocketType) < 0)
return NULL; return;
module = PyModule_Create(&moduledef); module = Py_InitModule3("rocket", module_methods,
"Rocket data parsing and formatting module");
Py_INCREF(&RocketType); Py_INCREF(&RocketType);
PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType); PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
@ -802,5 +785,5 @@ PyMODINIT_FUNC PyInit_rocket(void)
PyModule_AddObject(module, "ParseError", ParseError); PyModule_AddObject(module, "ParseError", ParseError);
add_parseerror_codes(module); add_parseerror_codes(module);
return module; return;
} }

View File

@ -1,49 +1,150 @@
"""CherryPy-based server for accessing NILM database via HTTP""" """CherryPy-based server for accessing NILM database via HTTP"""
import os # Need absolute_import so that "import nilmdb" won't pull in
import json # nilmdb.py, but will pull the nilmdb module instead.
import socket from __future__ import absolute_import
import traceback
import psutil
import cherrypy
import nilmdb.server import nilmdb.server
from nilmdb.utils.printf import sprintf from nilmdb.utils.printf import *
from nilmdb.server.errors import NilmDBError from nilmdb.server.errors import NilmDBError
from nilmdb.utils.time import string_to_timestamp
from nilmdb.server.serverutil import ( import cherrypy
chunked_response, import sys
response_type, import os
exception_to_httperror, import simplejson as json
CORS_allow, import decorator
json_to_request_params, import psutil
json_error_page,
cherrypy_start,
cherrypy_stop,
bool_param,
)
# Add CORS_allow tool class NilmApp(object):
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
class NilmApp():
def __init__(self, db): def __init__(self, db):
self.db = db self.db = db
# Decorators
def chunked_response(func):
"""Decorator to enable chunked responses."""
# Set this to False to get better tracebacks from some requests
# (/stream/extract, /stream/intervals).
func._cp_config = { 'response.stream': True }
return func
def response_type(content_type):
"""Return a decorator-generating function that sets the
response type to the specified string."""
def wrapper(func, *args, **kwargs):
cherrypy.response.headers['Content-Type'] = content_type
return func(*args, **kwargs)
return decorator.decorator(wrapper)
@decorator.decorator
def workaround_cp_bug_1200(func, *args, **kwargs): # pragma: no cover
"""Decorator to work around CherryPy bug #1200 in a response
generator.
Even if chunked responses are disabled, LookupError or
UnicodeError exceptions may still be swallowed by CherryPy due to
bug #1200. This throws them as generic Exceptions instead so that
they make it through.
"""
exc_info = None
try:
for val in func(*args, **kwargs):
yield val
except (LookupError, UnicodeError):
# Re-raise it, but maintain the original traceback
exc_info = sys.exc_info()
new_exc = Exception(exc_info[0].__name__ + ": " + str(exc_info[1]))
raise new_exc, None, exc_info[2]
finally:
del exc_info
def exception_to_httperror(*expected):
"""Return a decorator-generating function that catches expected
errors and throws a HTTPError describing it instead.
@exception_to_httperror(NilmDBError, ValueError)
def foo():
pass
"""
def wrapper(func, *args, **kwargs):
exc_info = None
try:
return func(*args, **kwargs)
except expected:
# Re-raise it, but maintain the original traceback
exc_info = sys.exc_info()
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
raise new_exc, None, exc_info[2]
finally:
del exc_info
# We need to preserve the function's argspecs for CherryPy to
# handle argument errors correctly. Decorator.decorator takes
# care of that.
return decorator.decorator(wrapper)
# Custom CherryPy tools
def CORS_allow(methods):
"""This does several things:
Handles CORS preflight requests.
Adds Allow: header to all requests.
Raise 405 if request.method not in method.
It is similar to cherrypy.tools.allow, with the CORS stuff added.
"""
request = cherrypy.request.headers
response = cherrypy.response.headers
if not isinstance(methods, (tuple, list)): # pragma: no cover
methods = [ methods ]
methods = [ m.upper() for m in methods if m ]
if not methods: # pragma: no cover
methods = [ 'GET', 'HEAD' ]
elif 'GET' in methods and 'HEAD' not in methods: # pragma: no cover
methods.append('HEAD')
response['Allow'] = ', '.join(methods)
# Allow all origins
if 'Origin' in request:
response['Access-Control-Allow-Origin'] = request['Origin']
# If it's a CORS request, send response.
request_method = request.get("Access-Control-Request-Method", None)
request_headers = request.get("Access-Control-Request-Headers", None)
if (cherrypy.request.method == "OPTIONS" and
request_method and request_headers):
response['Access-Control-Allow-Headers'] = request_headers
response['Access-Control-Allow-Methods'] = ', '.join(methods)
# Try to stop further processing and return a 200 OK
cherrypy.response.status = "200 OK"
cherrypy.response.body = ""
cherrypy.request.handler = lambda: ""
return
# Reject methods that were not explicitly allowed
if cherrypy.request.method not in methods:
raise cherrypy.HTTPError(405)
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
# Helper for json_in tool to process JSON data into normal request
# parameters.
def json_to_request_params(body):
cherrypy.lib.jsontools.json_processor(body)
if not isinstance(cherrypy.request.json, dict):
raise cherrypy.HTTPError(415)
cherrypy.request.params.update(cherrypy.request.json)
# CherryPy apps # CherryPy apps
class Root(NilmApp): class Root(NilmApp):
"""Root application for NILM database""" """Root application for NILM database"""
def __init__(self, db):
super(Root, self).__init__(db)
# / # /
@cherrypy.expose @cherrypy.expose
def index(self): def index(self):
cherrypy.response.headers['Content-Type'] = 'text/plain' raise cherrypy.NotFound()
msg = sprintf("This is NilmDB version %s, running on host %s.\n",
nilmdb.__version__, socket.getfqdn())
return msg
# /favicon.ico # /favicon.ico
@cherrypy.expose @cherrypy.expose
@ -63,58 +164,27 @@ class Root(NilmApp):
"""Return a dictionary with the database path, """Return a dictionary with the database path,
size of the database in bytes, and free disk space in bytes""" size of the database in bytes, and free disk space in bytes"""
path = self.db.get_basepath() path = self.db.get_basepath()
usage = psutil.disk_usage(path) return { "path": path,
dbsize = nilmdb.utils.du(path) "size": nilmdb.utils.du(path),
return { "free": psutil.disk_usage(path).free }
"path": path,
"size": dbsize,
"other": max(usage.used - dbsize, 0),
"reserved": max(usage.total - usage.used - usage.free, 0),
"free": usage.free
}
class Stream(NilmApp): class Stream(NilmApp):
"""Stream-specific operations""" """Stream-specific operations"""
# Helpers
def _get_times(self, start_param, end_param):
(start, end) = (None, None)
try:
if start_param is not None:
start = string_to_timestamp(start_param)
except Exception:
raise cherrypy.HTTPError("400 Bad Request", sprintf(
"invalid start (%s): must be a numeric timestamp",
start_param))
try:
if end_param is not None:
end = string_to_timestamp(end_param)
except Exception:
raise cherrypy.HTTPError("400 Bad Request", sprintf(
"invalid end (%s): must be a numeric timestamp", end_param))
if start is not None and end is not None:
if start >= end:
raise cherrypy.HTTPError(
"400 Bad Request",
sprintf("start must precede end (%s >= %s)",
start_param, end_param))
return (start, end)
# /stream/list # /stream/list
# /stream/list?layout=float32_8 # /stream/list?layout=float32_8
# /stream/list?path=/newton/prep&extended=1 # /stream/list?path=/newton/prep&extended=1
@cherrypy.expose @cherrypy.expose
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
def list(self, path=None, layout=None, extended=None): def list(self, path = None, layout = None, extended = None):
"""List all streams in the database. With optional path or """List all streams in the database. With optional path or
layout parameter, just list streams that match the given path layout parameter, just list streams that match the given path
or layout. or layout.
If extended is missing or zero, returns a list of lists If extent is not given, returns a list of lists containing
containing the path and layout: [ path, layout ] the path and layout: [ path, layout ]
If extended is true, returns a list of lists containing If extended is provided, returns a list of lists containing
extended info: [ path, layout, extent_min, extent_max, extended info: [ path, layout, extent_min, extent_max,
total_rows, total_seconds ]. More data may be added. total_rows, total_seconds ]. More data may be added.
""" """
@ -125,7 +195,7 @@ class Stream(NilmApp):
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@exception_to_httperror(NilmDBError, ValueError) @exception_to_httperror(NilmDBError, ValueError)
@cherrypy.tools.CORS_allow(methods=["POST"]) @cherrypy.tools.CORS_allow(methods = ["POST"])
def create(self, path, layout): def create(self, path, layout):
"""Create a new stream in the database. Provide path """Create a new stream in the database. Provide path
and one of the nilmdb.layout.layouts keys. and one of the nilmdb.layout.layouts keys.
@ -137,21 +207,11 @@ class Stream(NilmApp):
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@exception_to_httperror(NilmDBError) @exception_to_httperror(NilmDBError)
@cherrypy.tools.CORS_allow(methods=["POST"]) @cherrypy.tools.CORS_allow(methods = ["POST"])
def destroy(self, path): def destroy(self, path):
"""Delete a stream. Fails if any data is still present.""" """Delete a stream and its associated data."""
return self.db.stream_destroy(path) return self.db.stream_destroy(path)
# /stream/rename?oldpath=/newton/prep&newpath=/newton/prep/1
@cherrypy.expose
@cherrypy.tools.json_in()
@cherrypy.tools.json_out()
@exception_to_httperror(NilmDBError, ValueError)
@cherrypy.tools.CORS_allow(methods=["POST"])
def rename(self, oldpath, newpath):
"""Rename a stream."""
return self.db.stream_rename(oldpath, newpath)
# /stream/get_metadata?path=/newton/prep # /stream/get_metadata?path=/newton/prep
# /stream/get_metadata?path=/newton/prep&key=foo&key=bar # /stream/get_metadata?path=/newton/prep&key=foo&key=bar
@cherrypy.expose @cherrypy.expose
@ -163,16 +223,16 @@ class Stream(NilmApp):
try: try:
data = self.db.stream_get_metadata(path) data = self.db.stream_get_metadata(path)
except nilmdb.server.nilmdb.StreamError as e: except nilmdb.server.nilmdb.StreamError as e:
raise cherrypy.HTTPError("404 Not Found", str(e)) raise cherrypy.HTTPError("404 Not Found", e.message)
if key is None: # If no keys specified, return them all if key is None: # If no keys specified, return them all
key = list(data.keys()) key = data.keys()
elif not isinstance(key, list): elif not isinstance(key, list):
key = [key] key = [ key ]
result = {} result = {}
for k in key: for k in key:
if k in data: if k in data:
result[k] = data[k] result[k] = data[k]
else: # Return "None" for keys with no matching value else: # Return "None" for keys with no matching value
result[k] = None result[k] = None
return result return result
@ -182,9 +242,11 @@ class Stream(NilmApp):
try: try:
data = dict(json.loads(data)) data = dict(json.loads(data))
except TypeError as e: except TypeError as e:
raise NilmDBError("can't parse 'data' parameter: " + str(e)) raise NilmDBError("can't parse 'data' parameter: " + e.message)
for key in data: for key in data:
if not isinstance(data[key], (str, float, int)): if not (isinstance(data[key], basestring) or
isinstance(data[key], float) or
isinstance(data[key], int)):
raise NilmDBError("metadata values must be a string or number") raise NilmDBError("metadata values must be a string or number")
function(path, data) function(path, data)
@ -193,7 +255,7 @@ class Stream(NilmApp):
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@exception_to_httperror(NilmDBError, LookupError) @exception_to_httperror(NilmDBError, LookupError)
@cherrypy.tools.CORS_allow(methods=["POST"]) @cherrypy.tools.CORS_allow(methods = ["POST"])
def set_metadata(self, path, data): def set_metadata(self, path, data):
"""Set metadata for the named stream, replacing any existing """Set metadata for the named stream, replacing any existing
metadata. Data can be json-encoded or a plain dictionary.""" metadata. Data can be json-encoded or a plain dictionary."""
@ -204,7 +266,7 @@ class Stream(NilmApp):
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@exception_to_httperror(NilmDBError, LookupError, ValueError) @exception_to_httperror(NilmDBError, LookupError, ValueError)
@cherrypy.tools.CORS_allow(methods=["POST"]) @cherrypy.tools.CORS_allow(methods = ["POST"])
def update_metadata(self, path, data): def update_metadata(self, path, data):
"""Set metadata for the named stream, replacing any existing """Set metadata for the named stream, replacing any existing
metadata. Data can be json-encoded or a plain dictionary.""" metadata. Data can be json-encoded or a plain dictionary."""
@ -214,46 +276,33 @@ class Stream(NilmApp):
@cherrypy.expose @cherrypy.expose
@cherrypy.tools.json_out() @cherrypy.tools.json_out()
@exception_to_httperror(NilmDBError, ValueError) @exception_to_httperror(NilmDBError, ValueError)
@cherrypy.tools.CORS_allow(methods=["PUT"]) @cherrypy.tools.CORS_allow(methods = ["PUT"])
def insert(self, path, start, end, binary=False): def insert(self, path, start, end):
""" """
Insert new data into the database. Provide textual data Insert new data into the database. Provide textual data
(matching the path's layout) as a HTTP PUT. (matching the path's layout) as a HTTP PUT.
If 'binary' is True, expect raw binary data, rather than lines
of ASCII-formatted data. Raw binary data is always
little-endian and matches the database types (including an
int64 timestamp).
""" """
binary = bool_param(binary)
# Important that we always read the input before throwing any # Important that we always read the input before throwing any
# errors, to keep lengths happy for persistent connections. # errors, to keep lengths happy for persistent connections.
# Note that CherryPy 3.2.2 has a bug where this fails for GET # Note that CherryPy 3.2.2 has a bug where this fails for GET
# requests, if we ever want to handle those (issue #1134) # requests, if we ever want to handle those (issue #1134)
body = cherrypy.request.body.read() body = cherrypy.request.body.read()
# Verify content type for binary data
content_type = cherrypy.request.headers.get('content-type')
if binary and content_type:
if content_type != "application/octet-stream":
raise cherrypy.HTTPError("400", "Content type must be "
"application/octet-stream for "
"binary data, not " + content_type)
# Note that non-binary data is *not* decoded from bytes to string,
# but rather passed directly to stream_insert.
# Check path and get layout # Check path and get layout
if len(self.db.stream_list(path=path)) != 1: streams = self.db.stream_list(path = path)
raise cherrypy.HTTPError("404", "No such stream: " + path) if len(streams) != 1:
raise cherrypy.HTTPError("404 Not Found", "No such stream")
# Check limits # Check limits
(start, end) = self._get_times(start, end) start = float(start)
end = float(end)
if start >= end:
raise cherrypy.HTTPError("400 Bad Request",
"start must precede end")
# Pass the data directly to nilmdb, which will parse it and # Pass the data directly to nilmdb, which will parse it and
# raise a ValueError if there are any problems. # raise a ValueError if there are any problems.
self.db.stream_insert(path, start, end, body, binary) self.db.stream_insert(path, start, end, body)
# Done # Done
return return
@ -262,72 +311,62 @@ class Stream(NilmApp):
# /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0 # /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0
@cherrypy.expose @cherrypy.expose
@cherrypy.tools.json_in() @cherrypy.tools.json_in()
@cherrypy.tools.CORS_allow(methods=["POST"]) @cherrypy.tools.json_out()
@chunked_response @exception_to_httperror(NilmDBError)
@response_type("application/x-json-stream") @cherrypy.tools.CORS_allow(methods = ["POST"])
def remove(self, path, start=None, end=None): def remove(self, path, start = None, end = None):
""" """
Remove data from the backend database. Removes all data in Remove data from the backend database. Removes all data in
the interval [start, end). the interval [start, end). Returns the number of data points
removed.
Returns the number of data points removed. Since this is a potentially
long-running operation, multiple numbers may be returned as the
data gets removed from the backend database. The total number of
points removed is the sum of all of these numbers.
""" """
(start, end) = self._get_times(start, end) if start is not None:
start = float(start)
if len(self.db.stream_list(path=path)) != 1: if end is not None:
raise cherrypy.HTTPError("404", "No such stream: " + path) end = float(end)
if start is not None and end is not None:
def content(start, end): if start >= end:
# Note: disable chunked responses to see tracebacks from here. raise cherrypy.HTTPError("400 Bad Request",
while True: "start must precede end")
(removed, restart) = self.db.stream_remove(path, start, end) return self.db.stream_remove(path, start, end)
response = json.dumps(removed) + "\r\n"
yield response.encode('utf-8')
if restart is None:
break
start = restart
return content(start, end)
# /stream/intervals?path=/newton/prep # /stream/intervals?path=/newton/prep
# /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0 # /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0
# /stream/intervals?path=/newton/prep&diffpath=/newton/prep2
@cherrypy.expose @cherrypy.expose
@chunked_response @chunked_response
@response_type("application/x-json-stream") @response_type("application/x-json-stream")
def intervals(self, path, start=None, end=None, diffpath=None): def intervals(self, path, start = None, end = None):
""" """
Get intervals from backend database. Streams the resulting Get intervals from backend database. Streams the resulting
intervals as JSON strings separated by CR LF pairs. This may intervals as JSON strings separated by CR LF pairs. This may
make multiple requests to the nilmdb backend to avoid causing make multiple requests to the nilmdb backend to avoid causing
it to block for too long. it to block for too long.
Returns intervals between 'start' and 'end' belonging to
'path'. If 'diff' is provided, the set-difference between
intervals in 'path' and intervals in 'diffpath' are
returned instead.
Note that the response type is the non-standard Note that the response type is the non-standard
'application/x-json-stream' for lack of a better option. 'application/x-json-stream' for lack of a better option.
""" """
(start, end) = self._get_times(start, end) if start is not None:
start = float(start)
if end is not None:
end = float(end)
if len(self.db.stream_list(path=path)) != 1: if start is not None and end is not None:
raise cherrypy.HTTPError("404", "No such stream: " + path) if start >= end:
raise cherrypy.HTTPError("400 Bad Request",
"start must precede end")
if diffpath and len(self.db.stream_list(path=diffpath)) != 1: streams = self.db.stream_list(path = path)
raise cherrypy.HTTPError("404", "No such stream: " + diffpath) if len(streams) != 1:
raise cherrypy.HTTPError("404 Not Found", "No such stream")
@workaround_cp_bug_1200
def content(start, end): def content(start, end):
# Note: disable chunked responses to see tracebacks from here. # Note: disable chunked responses to see tracebacks from here.
while True: while True:
(ints, restart) = self.db.stream_intervals(path, start, end, (ints, restart) = self.db.stream_intervals(path, start, end)
diffpath) response = ''.join([ json.dumps(i) + "\r\n" for i in ints ])
response = ''.join([json.dumps(i) + "\r\n" for i in ints]) yield response
yield response.encode('utf-8') if restart == 0:
if restart is None:
break break
start = restart start = restart
return content(start, end) return content(start, end)
@ -335,87 +374,71 @@ class Stream(NilmApp):
# /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0 # /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0
@cherrypy.expose @cherrypy.expose
@chunked_response @chunked_response
def extract(self, path, start=None, end=None, @response_type("text/plain")
count=False, markup=False, binary=False): def extract(self, path, start = None, end = None, count = False):
""" """
Extract data from backend database. Streams the resulting Extract data from backend database. Streams the resulting
entries as ASCII text lines separated by newlines. This may entries as ASCII text lines separated by newlines. This may
make multiple requests to the nilmdb backend to avoid causing make multiple requests to the nilmdb backend to avoid causing
it to block for too long. it to block for too long.
If 'count' is True, returns a count rather than actual data. Add count=True to return a count rather than actual data.
If 'markup' is True, adds comments to the stream denoting each
interval's start and end timestamp.
If 'binary' is True, return raw binary data, rather than lines
of ASCII-formatted data. Raw binary data is always
little-endian and matches the database types (including an
int64 timestamp).
""" """
binary = bool_param(binary) if start is not None:
markup = bool_param(markup) start = float(start)
count = bool_param(count) if end is not None:
end = float(end)
(start, end) = self._get_times(start, end) # Check parameters
if start is not None and end is not None:
if start >= end:
raise cherrypy.HTTPError("400 Bad Request",
"start must precede end")
# Check path and get layout # Check path and get layout
if len(self.db.stream_list(path=path)) != 1: streams = self.db.stream_list(path = path)
raise cherrypy.HTTPError("404", "No such stream: " + path) if len(streams) != 1:
raise cherrypy.HTTPError("404 Not Found", "No such stream")
if binary: @workaround_cp_bug_1200
content_type = "application/octet-stream" def content(start, end, count):
if markup or count:
raise cherrypy.HTTPError("400", "can't mix binary and "
"markup or count modes")
else:
content_type = "text/plain"
cherrypy.response.headers['Content-Type'] = content_type
def content(start, end):
# Note: disable chunked responses to see tracebacks from here. # Note: disable chunked responses to see tracebacks from here.
if count: if count:
matched = self.db.stream_extract(path, start, end, matched = self.db.stream_extract(path, start, end, count)
count=True) yield sprintf("%d\n", matched)
yield sprintf(b"%d\n", matched)
return return
while True: while True:
(data, restart) = self.db.stream_extract( (data, restart) = self.db.stream_extract(path, start, end)
path, start, end, count=False,
markup=markup, binary=binary)
yield data yield data
if restart is None: if restart == 0:
return return
start = restart start = restart
return content(start, end) return content(start, end, count)
class Exiter(object):
class Exiter():
"""App that exits the server, for testing""" """App that exits the server, for testing"""
@cherrypy.expose @cherrypy.expose
def index(self): def index(self):
cherrypy.response.headers['Content-Type'] = 'text/plain' cherrypy.response.headers['Content-Type'] = 'text/plain'
def content(): def content():
yield b'Exiting by request' yield 'Exiting by request'
raise SystemExit raise SystemExit
return content() return content()
index._cp_config = {'response.stream': True} index._cp_config = { 'response.stream': True }
class Server(object):
class Server(): def __init__(self, db, host = '127.0.0.1', port = 8080,
def __init__(self, db, host='127.0.0.1', port=8080, stoppable = False, # whether /exit URL exists
stoppable=False, # whether /exit URL exists embedded = True, # hide diagnostics and output, etc
fast_shutdown=False, # don't wait for clients to disconn. fast_shutdown = False, # don't wait for clients to disconn.
force_traceback=False, # include traceback in all errors force_traceback = False # include traceback in all errors
basepath='', # base URL path for cherrypy.tree
): ):
# Save server version, just for verification during tests # Save server version, just for verification during tests
self.version = nilmdb.__version__ self.version = nilmdb.__version__
self.embedded = embedded
self.db = db self.db = db
if not getattr(db, "_thread_safe", None): if not getattr(db, "_thread_safe", None):
raise KeyError("Database object " + str(db) + " doesn't claim " raise KeyError("Database object " + str(db) + " doesn't claim "
@ -425,12 +448,13 @@ class Server():
# Build up global server configuration # Build up global server configuration
cherrypy.config.update({ cherrypy.config.update({
'environment': 'embedded',
'server.socket_host': host, 'server.socket_host': host,
'server.socket_port': port, 'server.socket_port': port,
'engine.autoreload.on': False, 'engine.autoreload_on': False,
'server.max_request_body_size': 8*1024*1024, 'server.max_request_body_size': 8*1024*1024,
}) })
if self.embedded:
cherrypy.config.update({ 'environment': 'embedded' })
# Build up application specific configuration # Build up application specific configuration
app_config = {} app_config = {}
@ -439,23 +463,23 @@ class Server():
}) })
# Some default headers to just help identify that things are working # Some default headers to just help identify that things are working
app_config.update({'response.headers.X-Jim-Is-Awesome': 'yeah'}) app_config.update({ 'response.headers.X-Jim-Is-Awesome': 'yeah' })
# Set up Cross-Origin Resource Sharing (CORS) handler so we # Set up Cross-Origin Resource Sharing (CORS) handler so we
# can correctly respond to browsers' CORS preflight requests. # can correctly respond to browsers' CORS preflight requests.
# This also limits verbs to GET and HEAD by default. # This also limits verbs to GET and HEAD by default.
app_config.update({'tools.CORS_allow.on': True, app_config.update({ 'tools.CORS_allow.on': True,
'tools.CORS_allow.methods': ['GET', 'HEAD']}) 'tools.CORS_allow.methods': ['GET', 'HEAD'] })
# Configure the 'json_in' tool to also allow other content-types # Configure the 'json_in' tool to also allow other content-types
# (like x-www-form-urlencoded), and to treat JSON as a dict that # (like x-www-form-urlencoded), and to treat JSON as a dict that
# fills requests.param. # fills requests.param.
app_config.update({'tools.json_in.force': False, app_config.update({ 'tools.json_in.force': False,
'tools.json_in.processor': json_to_request_params}) 'tools.json_in.processor': json_to_request_params })
# Send tracebacks in error responses. They're hidden by the # Send tracebacks in error responses. They're hidden by the
# error_page function for client errors (code 400-499). # error_page function for client errors (code 400-499).
app_config.update({'request.show_tracebacks': True}) app_config.update({ 'request.show_tracebacks' : True })
self.force_traceback = force_traceback self.force_traceback = force_traceback
# Patch CherryPy error handler to never pad out error messages. # Patch CherryPy error handler to never pad out error messages.
@ -469,78 +493,79 @@ class Server():
if stoppable: if stoppable:
root.exit = Exiter() root.exit = Exiter()
cherrypy.tree.apps = {} cherrypy.tree.apps = {}
cherrypy.tree.mount(root, basepath, config={"/": app_config}) cherrypy.tree.mount(root, "/", config = { "/" : app_config })
# Shutdowns normally wait for clients to disconnect. To speed # Shutdowns normally wait for clients to disconnect. To speed
# up tests, set fast_shutdown = True # up tests, set fast_shutdown = True
if fast_shutdown: if fast_shutdown:
cherrypy.server.shutdown_timeout = 0 # Setting timeout to 0 triggers os._exit(70) at shutdown, grr...
cherrypy.server.shutdown_timeout = 0.01
else: else:
cherrypy.server.shutdown_timeout = 5 cherrypy.server.shutdown_timeout = 5
# Set up the WSGI application pointer for external programs
self.wsgi_application = cherrypy.tree
def json_error_page(self, status, message, traceback, version): def json_error_page(self, status, message, traceback, version):
"""Return a custom error page in JSON so the client can parse it""" """Return a custom error page in JSON so the client can parse it"""
return json_error_page(status, message, traceback, version, errordata = { "status" : status,
self.force_traceback) "message" : message,
"traceback" : traceback }
# Don't send a traceback if the error was 400-499 (client's fault)
try:
code = int(status.split()[0])
if not self.force_traceback:
if code >= 400 and code <= 499:
errordata["traceback"] = ""
except Exception: # pragma: no cover
pass
# Override the response type, which was previously set to text/html
cherrypy.serving.response.headers['Content-Type'] = (
"application/json;charset=utf-8" )
# Undo the HTML escaping that cherrypy's get_error_page function applies
# (cherrypy issue 1135)
for k, v in errordata.iteritems():
v = v.replace("&lt;","<")
v = v.replace("&gt;",">")
v = v.replace("&amp;","&")
errordata[k] = v
return json.dumps(errordata, separators=(',',':'))
def start(self, blocking=False, event=None): def start(self, blocking = False, event = None):
cherrypy_start(blocking, event)
if not self.embedded: # pragma: no cover
# Handle signals nicely
if hasattr(cherrypy.engine, "signal_handler"):
cherrypy.engine.signal_handler.subscribe()
if hasattr(cherrypy.engine, "console_control_handler"):
cherrypy.engine.console_control_handler.subscribe()
# Cherrypy stupidly calls os._exit(70) when it can't bind the
# port. At least try to print a reasonable error and continue
# in this case, rather than just dying silently (as we would
# otherwise do in embedded mode)
real_exit = os._exit
def fake_exit(code): # pragma: no cover
if code == os.EX_SOFTWARE:
fprintf(sys.stderr, "error: CherryPy called os._exit!\n")
else:
real_exit(code)
os._exit = fake_exit
cherrypy.engine.start()
os._exit = real_exit
# Signal that the engine has started successfully
if event is not None:
event.set()
if blocking:
try:
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
interval = 0.1, channel = 'main')
except (KeyboardInterrupt, IOError): # pragma: no cover
cherrypy.engine.log('Keyboard Interrupt: shutting down bus')
cherrypy.engine.exit()
except SystemExit: # pragma: no cover
cherrypy.engine.log('SystemExit raised: shutting down bus')
cherrypy.engine.exit()
raise
def stop(self): def stop(self):
cherrypy_stop() cherrypy.engine.exit()
# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
# instance since the database can only be opened once. For this to
# work, the web server must use only a single process and single
# Python interpreter. Multiple threads are OK.
_wsgi_server = None
def wsgi_application(dbpath, basepath):
"""Return a WSGI application object with a database at the
specified path.
'dbpath' is a filesystem location, e.g. /home/nilm/db
'basepath' is the URL path of the application base, which
is the same as the first argument to Apache's WSGIScriptAlias
directive.
"""
def application(environ, start_response):
global _wsgi_server
if _wsgi_server is None:
# Try to start the server
try:
db = nilmdb.utils.serializer_proxy(
nilmdb.server.NilmDB)(dbpath)
_wsgi_server = nilmdb.server.Server(
db, basepath=basepath.rstrip('/'))
except Exception:
# Build an error message on failure
import pprint
err = sprintf("Initializing database at path '%s' failed:\n\n",
dbpath)
err += traceback.format_exc()
import pwd
import grp
err += sprintf("\nRunning as: uid=%d (%s), gid=%d (%s) "
"on host %s, pid %d\n",
os.getuid(), pwd.getpwuid(os.getuid())[0],
os.getgid(), grp.getgrgid(os.getgid())[0],
socket.gethostname(), os.getpid())
err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
if _wsgi_server is None:
# Serve up the error with our own mini WSGI app.
err_b = err.encode('utf-8')
headers = [('Content-type', 'text/plain; charset=utf-8'),
('Content-length', str(len(err_b)))]
start_response("500 Internal Server Error", headers)
return [err_b]
# Call the normal application
return _wsgi_server.wsgi_application(environ, start_response)
return application

View File

@ -1,225 +0,0 @@
"""Miscellaneous decorators and other helpers for running a CherryPy
server"""
import os
import sys
import json
import decorator
import functools
import threading
import cherrypy
# Helper to parse parameters into booleans
def bool_param(s):
"""Return a bool indicating whether parameter 's' was True or False,
supporting a few different types for 's'."""
try:
ss = s.lower()
if ss in ["0", "false", "f", "no", "n"]:
return False
if ss in ["1", "true", "t", "yes", "y"]:
return True
except Exception:
return bool(s)
raise cherrypy.HTTPError("400 Bad Request",
"can't parse parameter: " + ss)
# Decorators
def chunked_response(func):
"""Decorator to enable chunked responses."""
# Set this to False to get better tracebacks from some requests
# (/stream/extract, /stream/intervals).
func._cp_config = {'response.stream': True}
return func
def response_type(content_type):
"""Return a decorator-generating function that sets the
response type to the specified string."""
def wrapper(func, *args, **kwargs):
cherrypy.response.headers['Content-Type'] = content_type
return func(*args, **kwargs)
return decorator.decorator(wrapper)
def exception_to_httperror(*expected):
"""Return a decorator-generating function that catches expected
errors and throws a HTTPError describing it instead.
@exception_to_httperror(NilmDBError, ValueError)
def foo():
pass
"""
def wrapper(func, *args, **kwargs):
exc_info = None
try:
return func(*args, **kwargs)
except expected:
# Re-raise it, but maintain the original traceback
exc_info = sys.exc_info()
new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
raise new_exc.with_traceback(exc_info[2])
finally:
del exc_info
# We need to preserve the function's argspecs for CherryPy to
# handle argument errors correctly. Decorator.decorator takes
# care of that.
return decorator.decorator(wrapper)
# Custom CherryPy tools
def CORS_allow(methods):
"""This does several things:
Handles CORS preflight requests.
Adds Allow: header to all requests.
Raise 405 if request.method not in method.
It is similar to cherrypy.tools.allow, with the CORS stuff added.
Add this to CherryPy with:
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
"""
request = cherrypy.request.headers
response = cherrypy.response.headers
if not isinstance(methods, (tuple, list)):
methods = [methods]
methods = [m.upper() for m in methods if m]
if not methods:
methods = ['GET', 'HEAD']
elif 'GET' in methods and 'HEAD' not in methods:
methods.append('HEAD')
response['Allow'] = ', '.join(methods)
# Allow all origins
if 'Origin' in request:
response['Access-Control-Allow-Origin'] = request['Origin']
# If it's a CORS request, send response.
request_method = request.get("Access-Control-Request-Method", None)
request_headers = request.get("Access-Control-Request-Headers", None)
if (cherrypy.request.method == "OPTIONS" and
request_method and request_headers):
response['Access-Control-Allow-Headers'] = request_headers
response['Access-Control-Allow-Methods'] = ', '.join(methods)
# Try to stop further processing and return a 200 OK
cherrypy.response.status = "200 OK"
cherrypy.response.body = b""
cherrypy.request.handler = lambda: ""
return
# Reject methods that were not explicitly allowed
if cherrypy.request.method not in methods:
raise cherrypy.HTTPError(405)
# Helper for json_in tool to process JSON data into normal request
# parameters.
def json_to_request_params(body):
cherrypy.lib.jsontools.json_processor(body)
if not isinstance(cherrypy.request.json, dict):
raise cherrypy.HTTPError(415)
cherrypy.request.params.update(cherrypy.request.json)
# Used as an "error_page.default" handler
def json_error_page(status, message, traceback, version,
force_traceback=False):
"""Return a custom error page in JSON so the client can parse it"""
errordata = {"status": status,
"message": message,
"version": version,
"traceback": traceback}
# Don't send a traceback if the error was 400-499 (client's fault)
code = int(status.split()[0])
if not force_traceback:
if 400 <= code <= 499:
errordata["traceback"] = ""
# Override the response type, which was previously set to text/html
cherrypy.serving.response.headers['Content-Type'] = (
"application/json;charset=utf-8")
# Undo the HTML escaping that cherrypy's get_error_page function applies
# (cherrypy issue 1135)
for k, v in errordata.items():
v = v.replace("&lt;", "<")
v = v.replace("&gt;", ">")
v = v.replace("&amp;", "&")
errordata[k] = v
return json.dumps(errordata, separators=(',', ':'))
class CherryPyExit(SystemExit):
pass
def cherrypy_patch_exit():
# Cherrypy stupidly calls os._exit(70) when it can't bind the port
# and exits. Instead of that, raise a CherryPyExit (derived from
# SystemExit). This exception may not make it back up to the caller
# due to internal thread use in the CherryPy engine, but there should
# be at least some indication that it happened.
bus = cherrypy.process.wspbus.bus
if "_patched_exit" in bus.__dict__:
return
bus._patched_exit = True
def patched_exit(orig):
real_exit = os._exit
def fake_exit(code):
raise CherryPyExit(code)
os._exit = fake_exit
try:
orig()
finally:
os._exit = real_exit
bus.exit = functools.partial(patched_exit, bus.exit)
# A behavior change in Python 3.8 means that some thread exceptions,
# derived from SystemExit, now print tracebacks where they didn't
# used to: https://bugs.python.org/issue1230540
# Install a thread exception hook that ignores CherryPyExit;
# to make this match the behavior where we didn't set
# threading.excepthook, we also need to ignore SystemExit.
def hook(args):
if args.exc_type == CherryPyExit or args.exc_type == SystemExit:
return
sys.excepthook(args.exc_type, args.exc_value,
args.exc_traceback) # pragma: no cover
threading.excepthook = hook
# Start/stop CherryPy standalone server
def cherrypy_start(blocking=False, event=False):
"""Start the CherryPy server, handling errors and signals
somewhat gracefully."""
cherrypy_patch_exit()
# Start the server
cherrypy.engine.start()
# Signal that the engine has started successfully
if event is not None:
event.set()
if blocking:
try:
cherrypy.engine.wait(cherrypy.engine.states.EXITING,
interval=0.1, channel='main')
except (KeyboardInterrupt, IOError):
cherrypy.engine.log('Keyboard Interrupt: shutting down')
cherrypy.engine.exit()
except SystemExit:
cherrypy.engine.log('SystemExit raised: shutting down')
cherrypy.engine.exit()
raise
# Stop CherryPy server
def cherrypy_stop():
cherrypy.engine.exit()

View File

@ -1,7 +1,7 @@
"""NilmDB utilities""" """NilmDB utilities"""
from nilmdb.utils.timer import Timer from nilmdb.utils.timer import Timer
from nilmdb.utils.iteratorizer import Iteratorizer
from nilmdb.utils.serializer import serializer_proxy from nilmdb.utils.serializer import serializer_proxy
from nilmdb.utils.lrucache import lru_cache from nilmdb.utils.lrucache import lru_cache
from nilmdb.utils.diskusage import du, human_size from nilmdb.utils.diskusage import du, human_size
@ -10,7 +10,3 @@ from nilmdb.utils import atomic
import nilmdb.utils.threadsafety import nilmdb.utils.threadsafety
import nilmdb.utils.fallocate import nilmdb.utils.fallocate
import nilmdb.utils.time import nilmdb.utils.time
import nilmdb.utils.iterator
import nilmdb.utils.interval
import nilmdb.utils.lock
import nilmdb.utils.sort

View File

@ -2,12 +2,12 @@
import os import os
def replace_file(filename, content): def replace_file(filename, content):
"""Attempt to atomically and durably replace the filename with the """Attempt to atomically and durably replace the filename with the
given contents""" given contents. This is intended to be 'pretty good on most
OSes', but not necessarily bulletproof."""
newfilename = filename + b".new" newfilename = filename + ".new"
# Write to new file, flush it # Write to new file, flush it
with open(newfilename, "wb") as f: with open(newfilename, "wb") as f:
@ -16,4 +16,11 @@ def replace_file(filename, content):
os.fsync(f.fileno()) os.fsync(f.fileno())
# Move new file over old one # Move new file over old one
os.replace(newfilename, filename) try:
os.rename(newfilename, filename)
except OSError: # pragma: no cover
# Some OSes might not support renaming over an existing file.
# This is definitely NOT atomic!
os.remove(filename)
os.rename(newfilename, filename)

View File

@ -0,0 +1,710 @@
#!/usr/bin/python
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Disable the invalid name warning as we are inheriting from a standard library
# object.
# pylint: disable-msg=C6409,W0212
"""A version of the datetime module which *cares* about timezones.
This module will never return a naive datetime object. This requires the module
know your local timezone, which it tries really hard to figure out.
You can override the detection by using the datetime.tzaware.defaulttz_set
method. It the module is unable to figure out the timezone itself this method
*must* be called before the normal module is imported. If done before importing
it can also speed up the time taken to import as the defaulttz will no longer
try and do the detection.
"""
__author__ = "tansell@google.com (Tim Ansell)"
import calendar
import datetime
import os
import os.path
import re
import time
import warnings
import dateutil.parser
import dateutil.relativedelta
import dateutil.tz
import pytz
import pytz_abbr
try:
# pylint: disable-msg=C6204
import functools
except ImportError, e:
class functools(object):
"""Fake replacement for a full functools."""
# pylint: disable-msg=W0613
@staticmethod
def wraps(f, *args, **kw):
return f
# Need to patch pytz.utc to have a _utcoffset so you can normalize/localize
# using it.
pytz.utc._utcoffset = datetime.timedelta()
timedelta = datetime.timedelta
def _tzinfome(tzinfo):
"""Gets a tzinfo object from a string.
Args:
tzinfo: A string (or string like) object, or a datetime.tzinfo object.
Returns:
An datetime.tzinfo object.
Raises:
UnknownTimeZoneError: If the timezone given can't be decoded.
"""
if not isinstance(tzinfo, datetime.tzinfo):
try:
tzinfo = pytz.timezone(tzinfo)
except AttributeError:
raise pytz.UnknownTimeZoneError("Unknown timezone! %s" % tzinfo)
return tzinfo
# Our "local" timezone
_localtz = None
def localtz():
"""Get the local timezone.
Returns:
The localtime timezone as a tzinfo object.
"""
# pylint: disable-msg=W0603
global _localtz
if _localtz is None:
_localtz = detect_timezone()
return _localtz
def localtz_set(timezone):
"""Set the local timezone."""
# pylint: disable-msg=W0603
global _localtz
_localtz = _tzinfome(timezone)
def detect_timezone():
"""Try and detect the timezone that Python is currently running in.
We have a bunch of different methods for trying to figure this out (listed in
order they are attempted).
* Try TZ environment variable.
* Try and find /etc/timezone file (with timezone name).
* Try and find /etc/localtime file (with timezone data).
* Try and match a TZ to the current dst/offset/shortname.
Returns:
The detected local timezone as a tzinfo object
Raises:
pytz.UnknownTimeZoneError: If it was unable to detect a timezone.
"""
# First we try the TZ variable
tz = _detect_timezone_environ()
if tz is not None:
return tz
# Second we try /etc/timezone and use the value in that
tz = _detect_timezone_etc_timezone()
if tz is not None:
return tz
# Next we try and see if something matches the tzinfo in /etc/localtime
tz = _detect_timezone_etc_localtime()
if tz is not None:
return tz
# Next we try and use a similiar method to what PHP does.
# We first try to search on time.tzname, time.timezone, time.daylight to
# match a pytz zone.
warnings.warn("Had to fall back to worst detection method (the 'PHP' "
"method).")
tz = _detect_timezone_php()
if tz is not None:
return tz
raise pytz.UnknownTimeZoneError("Unable to detect your timezone!")
def _detect_timezone_environ():
if "TZ" in os.environ:
try:
return pytz.timezone(os.environ["TZ"])
except (IOError, pytz.UnknownTimeZoneError):
warnings.warn("You provided a TZ environment value (%r) we did not "
"understand!" % os.environ["TZ"])
def _detect_timezone_etc_timezone():
if os.path.exists("/etc/timezone"):
try:
tz = file("/etc/timezone").read().strip()
try:
return pytz.timezone(tz)
except (IOError, pytz.UnknownTimeZoneError), ei:
warnings.warn("Your /etc/timezone file references a timezone (%r) that"
" is not valid (%r)." % (tz, ei))
# Problem reading the /etc/timezone file
except IOError, eo:
warnings.warn("Could not access your /etc/timezone file: %s" % eo)
def _detect_timezone_etc_localtime():
matches = []
if os.path.exists("/etc/localtime"):
localtime = pytz.tzfile.build_tzinfo("/etc/localtime",
file("/etc/localtime"))
# See if we can find a "Human Name" for this..
for tzname in pytz.all_timezones:
tz = _tzinfome(tzname)
if dir(tz) != dir(localtime):
continue
for attrib in dir(tz):
# Ignore functions and specials
if callable(getattr(tz, attrib)) or attrib.startswith("__"):
continue
# This will always be different
if attrib == "zone" or attrib == "_tzinfos":
continue
if getattr(tz, attrib) != getattr(localtime, attrib):
break
# We get here iff break didn't happen, i.e. no meaningful attributes
# differ between tz and localtime
else:
matches.append(tzname)
if len(matches) == 1:
return _tzinfome(matches[0])
else:
# Warn the person about this!
warning = "Could not get a human name for your timezone: "
if len(matches) > 1:
warning += ("We detected multiple matches for your /etc/localtime. "
"(Matches where %s)" % matches)
return _tzinfome(matches[0])
else:
warning += "We detected no matches for your /etc/localtime."
warnings.warn(warning)
# Register /etc/localtime as the timezone loaded.
pytz._tzinfo_cache['/etc/localtime'] = localtime
return localtime
def _detect_timezone_php():
tomatch = (time.tzname[0], time.timezone, time.daylight)
now = datetime.datetime.now()
matches = []
for tzname in pytz.all_timezones:
try:
tz = pytz.timezone(tzname)
except IOError:
continue
try:
indst = tz.localize(now).timetuple()[-1]
if tomatch == (tz._tzname, -tz._utcoffset.seconds, indst):
matches.append(tzname)
# pylint: disable-msg=W0704
except AttributeError:
pass
if len(matches) > 1:
warnings.warn("We detected multiple matches for the timezone, choosing "
"the first %s. (Matches where %s)" % (matches[0], matches))
return pytz.timezone(matches[0])
class datetime_tz(datetime.datetime):
"""An extension of the inbuilt datetime adding more functionality.
The extra functionality includes:
* Partial parsing support (IE 2006/02/30 matches %Y/%M/%D %H:%M)
* Full integration with pytz (just give it the string of the timezone!)
* Proper support for going to/from Unix timestamps (which are in UTC!).
"""
__slots__ = ["is_dst"]
def __new__(cls, *args, **kw):
args = list(args)
if not args:
raise TypeError("Not enough arguments given.")
# See if we are given a tzinfo object...
tzinfo = None
if isinstance(args[-1], (datetime.tzinfo, basestring)):
tzinfo = _tzinfome(args.pop(-1))
elif kw.get("tzinfo", None) is not None:
tzinfo = _tzinfome(kw.pop("tzinfo"))
# Create a datetime object if we don't have one
if isinstance(args[0], datetime.datetime):
# Convert the datetime instance to a datetime object.
newargs = (list(args[0].timetuple()[0:6]) +
[args[0].microsecond, args[0].tzinfo])
dt = datetime.datetime(*newargs)
if tzinfo is None and dt.tzinfo is None:
raise TypeError("Must specify a timezone!")
if tzinfo is not None and dt.tzinfo is not None:
raise TypeError("Can not give a timezone with timezone aware"
" datetime object! (Use localize.)")
else:
dt = datetime.datetime(*args, **kw)
if dt.tzinfo is not None:
# Re-normalize the dt object
dt = dt.tzinfo.normalize(dt)
else:
if tzinfo is None:
tzinfo = localtz()
try:
dt = tzinfo.localize(dt, is_dst=None)
except pytz.AmbiguousTimeError:
is_dst = None
if "is_dst" in kw:
is_dst = kw.pop("is_dst")
try:
dt = tzinfo.localize(dt, is_dst)
except IndexError:
raise pytz.AmbiguousTimeError("No such time exists!")
newargs = list(dt.timetuple()[0:6])+[dt.microsecond, dt.tzinfo]
obj = datetime.datetime.__new__(cls, *newargs)
obj.is_dst = obj.dst() != datetime.timedelta(0)
return obj
def asdatetime(self, naive=True):
"""Return this datetime_tz as a datetime object.
Args:
naive: Return *without* any tz info.
Returns:
This datetime_tz as a datetime object.
"""
args = list(self.timetuple()[0:6])+[self.microsecond]
if not naive:
args.append(self.tzinfo)
return datetime.datetime(*args)
def asdate(self):
"""Return this datetime_tz as a date object.
Returns:
This datetime_tz as a date object.
"""
return datetime.date(self.year, self.month, self.day)
def totimestamp(self):
"""Convert this datetime object back to a unix timestamp.
The Unix epoch is the time 00:00:00 UTC on January 1, 1970.
Returns:
Unix timestamp.
"""
return calendar.timegm(self.utctimetuple())+1e-6*self.microsecond
def astimezone(self, tzinfo):
"""Returns a version of this timestamp converted to the given timezone.
Args:
tzinfo: Either a datetime.tzinfo object or a string (which will be looked
up in pytz.
Returns:
A datetime_tz object in the given timezone.
"""
# Assert we are not a naive datetime object
assert self.tzinfo is not None
tzinfo = _tzinfome(tzinfo)
d = self.asdatetime(naive=False).astimezone(tzinfo)
return datetime_tz(d)
# pylint: disable-msg=C6113
def replace(self, **kw):
"""Return datetime with new specified fields given as arguments.
For example, dt.replace(days=4) would return a new datetime_tz object with
exactly the same as dt but with the days attribute equal to 4.
Any attribute can be replaced, but tzinfo can not be set to None.
Args:
Any datetime_tz attribute.
Returns:
A datetime_tz object with the attributes replaced.
Raises:
TypeError: If the given replacement is invalid.
"""
if "tzinfo" in kw:
if kw["tzinfo"] is None:
raise TypeError("Can not remove the timezone use asdatetime()")
is_dst = None
if "is_dst" in kw:
is_dst = kw["is_dst"]
del kw["is_dst"]
else:
# Use our own DST setting..
is_dst = self.is_dst
replaced = self.asdatetime().replace(**kw)
return datetime_tz(replaced, tzinfo=self.tzinfo.zone, is_dst=is_dst)
# pylint: disable-msg=C6310
@classmethod
def smartparse(cls, toparse, tzinfo=None):
"""Method which uses dateutil.parse and extras to try and parse the string.
Valid dates are found at:
http://labix.org/python-dateutil#head-1443e0f14ad5dff07efd465e080d1110920673d8-2
Other valid formats include:
"now" or "today"
"yesterday"
"tommorrow"
"5 minutes ago"
"10 hours ago"
"10h5m ago"
"start of yesterday"
"end of tommorrow"
"end of 3rd of March"
Args:
toparse: The string to parse.
tzinfo: Timezone for the resultant datetime_tz object should be in.
(Defaults to your local timezone.)
Returns:
New datetime_tz object.
Raises:
ValueError: If unable to make sense of the input.
"""
# Default for empty fields are:
# year/month/day == now
# hour/minute/second/microsecond == 0
toparse = toparse.strip()
if tzinfo is None:
dt = cls.now()
else:
dt = cls.now(tzinfo)
default = dt.replace(hour=0, minute=0, second=0, microsecond=0)
# Remove "start of " and "end of " prefix in the string
if toparse.lower().startswith("end of "):
toparse = toparse[7:].strip()
dt += datetime.timedelta(days=1)
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
dt -= datetime.timedelta(microseconds=1)
default = dt
elif toparse.lower().startswith("start of "):
toparse = toparse[9:].strip()
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
default = dt
# Handle strings with "now", "today", "yesterday", "tomorrow" and "ago".
# Need to use lowercase
toparselower = toparse.lower()
if toparselower in ["now", "today"]:
pass
elif toparselower == "yesterday":
dt -= datetime.timedelta(days=1)
elif toparselower == "tommorrow":
dt += datetime.timedelta(days=1)
elif "ago" in toparselower:
# Remove the "ago" bit
toparselower = toparselower[:-3]
# Replace all "a day and an hour" with "1 day 1 hour"
toparselower = toparselower.replace("a ", "1 ")
toparselower = toparselower.replace("an ", "1 ")
toparselower = toparselower.replace(" and ", " ")
# Match the following
# 1 hour ago
# 1h ago
# 1 h ago
# 1 hour ago
# 2 hours ago
# Same with minutes, seconds, etc.
tocheck = ("seconds", "minutes", "hours", "days", "weeks", "months",
"years")
result = {}
for match in re.finditer("([0-9]+)([^0-9]*)", toparselower):
amount = int(match.group(1))
unit = match.group(2).strip()
for bit in tocheck:
regex = "^([%s]|((%s)s?))$" % (
bit[0], bit[:-1])
bitmatch = re.search(regex, unit)
if bitmatch:
result[bit] = amount
break
else:
raise ValueError("Was not able to parse date unit %r!" % unit)
delta = dateutil.relativedelta.relativedelta(**result)
dt -= delta
else:
# Handle strings with normal datetime format, use original case.
dt = dateutil.parser.parse(toparse, default=default.asdatetime(),
tzinfos=pytz_abbr.tzinfos)
if dt is None:
raise ValueError("Was not able to parse date!")
if dt.tzinfo is pytz_abbr.unknown:
dt = dt.replace(tzinfo=None)
if dt.tzinfo is None:
if tzinfo is None:
tzinfo = localtz()
dt = cls(dt, tzinfo)
else:
if isinstance(dt.tzinfo, pytz_abbr.tzabbr):
abbr = dt.tzinfo
dt = dt.replace(tzinfo=None)
dt = cls(dt, abbr.zone, is_dst=abbr.dst)
dt = cls(dt)
return dt
@classmethod
def utcfromtimestamp(cls, timestamp):
"""Returns a datetime object of a given timestamp (in UTC)."""
obj = datetime.datetime.utcfromtimestamp(timestamp)
obj = pytz.utc.localize(obj)
return cls(obj)
@classmethod
def fromtimestamp(cls, timestamp):
"""Returns a datetime object of a given timestamp (in local tz)."""
d = cls.utcfromtimestamp(timestamp)
return d.astimezone(localtz())
@classmethod
def utcnow(cls):
"""Return a new datetime representing UTC day and time."""
obj = datetime.datetime.utcnow()
obj = cls(obj, tzinfo=pytz.utc)
return obj
@classmethod
def now(cls, tzinfo=None):
"""[tz] -> new datetime with tz's local day and time."""
obj = cls.utcnow()
if tzinfo is None:
tzinfo = localtz()
return obj.astimezone(tzinfo)
today = now
@staticmethod
def fromordinal(ordinal):
raise SyntaxError("Not enough information to create a datetime_tz object "
"from an ordinal. Please use datetime.date.fromordinal")
class iterate(object):
"""Helpful iterators for working with datetime_tz objects."""
@staticmethod
def between(start, delta, end=None):
"""Return an iterator between this date till given end point.
Example usage:
>>> d = datetime_tz.smartparse("5 days ago")
2008/05/12 11:45
>>> for i in d.between(timedelta(days=1), datetime_tz.now()):
>>> print i
2008/05/12 11:45
2008/05/13 11:45
2008/05/14 11:45
2008/05/15 11:45
2008/05/16 11:45
Args:
start: The date to start at.
delta: The interval to iterate with.
end: (Optional) Date to end at. If not given the iterator will never
terminate.
Yields:
datetime_tz objects.
"""
toyield = start
while end is None or toyield < end:
yield toyield
toyield += delta
@staticmethod
def weeks(start, end=None):
"""Iterate over the weeks between the given datetime_tzs.
Args:
start: datetime_tz to start from.
end: (Optional) Date to end at, if not given the iterator will never
terminate.
Returns:
An iterator which generates datetime_tz objects a week apart.
"""
return iterate.between(start, datetime.timedelta(days=7), end)
@staticmethod
def days(start, end=None):
"""Iterate over the days between the given datetime_tzs.
Args:
start: datetime_tz to start from.
end: (Optional) Date to end at, if not given the iterator will never
terminate.
Returns:
An iterator which generates datetime_tz objects a day apart.
"""
return iterate.between(start, datetime.timedelta(days=1), end)
@staticmethod
def hours(start, end=None):
"""Iterate over the hours between the given datetime_tzs.
Args:
start: datetime_tz to start from.
end: (Optional) Date to end at, if not given the iterator will never
terminate.
Returns:
An iterator which generates datetime_tz objects a hour apart.
"""
return iterate.between(start, datetime.timedelta(hours=1), end)
@staticmethod
def minutes(start, end=None):
"""Iterate over the minutes between the given datetime_tzs.
Args:
start: datetime_tz to start from.
end: (Optional) Date to end at, if not given the iterator will never
terminate.
Returns:
An iterator which generates datetime_tz objects a minute apart.
"""
return iterate.between(start, datetime.timedelta(minutes=1), end)
@staticmethod
def seconds(start, end=None):
"""Iterate over the seconds between the given datetime_tzs.
Args:
start: datetime_tz to start from.
end: (Optional) Date to end at, if not given the iterator will never
terminate.
Returns:
An iterator which generates datetime_tz objects a second apart.
"""
return iterate.between(start, datetime.timedelta(minutes=1), end)
def _wrap_method(name):
"""Wrap a method.
Patch a method which might return a datetime.datetime to return a
datetime_tz.datetime_tz instead.
Args:
name: The name of the method to patch
"""
method = getattr(datetime.datetime, name)
# Have to give the second argument as method has no __module__ option.
@functools.wraps(method, ("__name__", "__doc__"), ())
def wrapper(*args, **kw):
r = method(*args, **kw)
if isinstance(r, datetime.datetime) and not isinstance(r, datetime_tz):
r = datetime_tz(r)
return r
setattr(datetime_tz, name, wrapper)
for methodname in ["__add__", "__radd__", "__rsub__", "__sub__", "combine"]:
# Make sure we have not already got an override for this method
assert methodname not in datetime_tz.__dict__
_wrap_method(methodname)
__all__ = ['datetime_tz', 'detect_timezone', 'iterate', 'localtz',
'localtz_set', 'timedelta', '_detect_timezone_environ',
'_detect_timezone_etc_localtime', '_detect_timezone_etc_timezone',
'_detect_timezone_php']

View File

@ -0,0 +1,230 @@
#!/usr/bin/python2.4
# -*- coding: utf-8 -*-
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
"""
Common time zone acronyms/abbreviations for use with the datetime_tz module.
*WARNING*: There are lots of caveats when using this module which are listed
below.
CAVEAT 1: The acronyms/abbreviations are not globally unique, they are not even
unique within a region. For example, EST can mean any of,
Eastern Standard Time in Australia (which is 10 hour ahead of UTC)
Eastern Standard Time in North America (which is 5 hours behind UTC)
Where there are two abbreviations the more popular one will appear in the all
dictionary, while the less common one will only appear in that countries region
dictionary. IE If using all, EST will be mapped to Eastern Standard Time in
North America.
CAVEAT 2: Many of the acronyms don't map to a neat Oslon timezones. For example,
Eastern European Summer Time (EEDT) is used by many different countries in
Europe *at different times*! If the acronym does not map neatly to one zone it
is mapped to the Etc/GMT+-XX Oslon zone. This means that any date manipulations
can end up with idiot things like summer time in the middle of winter.
CAVEAT 3: The Summer/Standard time difference is really important! For an hour
each year it is needed to determine which time you are actually talking about.
2002-10-27 01:20:00 EST != 2002-10-27 01:20:00 EDT
"""
import datetime
import pytz
import pytz.tzfile
class tzabbr(datetime.tzinfo):
"""A timezone abbreviation.
*WARNING*: This is not a tzinfo implementation! Trying to use this as tzinfo
object will result in failure. We inherit from datetime.tzinfo so we can get
through the dateutil checks.
"""
pass
# A "marker" tzinfo object which is used to signify an unknown timezone.
unknown = datetime.tzinfo(0)
regions = {'all': {}, 'military': {}}
# Create a special alias for the all and military regions
all = regions['all']
military = regions['military']
def tzabbr_register(abbr, name, region, zone, dst):
"""Register a new timezone abbreviation in the global registry.
If another abbreviation with the same name has already been registered it new
abbreviation will only be registered in region specific dictionary.
"""
newabbr = tzabbr()
newabbr.abbr = abbr
newabbr.name = name
newabbr.region = region
newabbr.zone = zone
newabbr.dst = dst
if abbr not in all:
all[abbr] = newabbr
if not region in regions:
regions[region] = {}
assert abbr not in regions[region]
regions[region][abbr] = newabbr
def tzinfos_create(use_region):
abbrs = regions[use_region]
def tzinfos(abbr, offset):
if abbr:
if abbr in abbrs:
result = abbrs[abbr]
if offset:
# FIXME: Check the offset matches the abbreviation we just selected.
pass
return result
else:
raise ValueError, "Unknown timezone found %s" % abbr
if offset == 0:
return pytz.utc
if offset:
return pytz.FixedOffset(offset/60)
return unknown
return tzinfos
# Create a special alias for the all tzinfos
tzinfos = tzinfos_create('all')
# Create the abbreviations.
# *WARNING*: Order matters!
tzabbr_register("A", u"Alpha Time Zone", u"Military", "Etc/GMT-1", False)
tzabbr_register("ACDT", u"Australian Central Daylight Time", u"Australia",
"Australia/Adelaide", True)
tzabbr_register("ACST", u"Australian Central Standard Time", u"Australia",
"Australia/Adelaide", False)
tzabbr_register("ADT", u"Atlantic Daylight Time", u"North America",
"America/Halifax", True)
tzabbr_register("AEDT", u"Australian Eastern Daylight Time", u"Australia",
"Australia/Sydney", True)
tzabbr_register("AEST", u"Australian Eastern Standard Time", u"Australia",
"Australia/Sydney", False)
tzabbr_register("AKDT", u"Alaska Daylight Time", u"North America",
"US/Alaska", True)
tzabbr_register("AKST", u"Alaska Standard Time", u"North America",
"US/Alaska", False)
tzabbr_register("AST", u"Atlantic Standard Time", u"North America",
"America/Halifax", False)
tzabbr_register("AWDT", u"Australian Western Daylight Time", u"Australia",
"Australia/West", True)
tzabbr_register("AWST", u"Australian Western Standard Time", u"Australia",
"Australia/West", False)
tzabbr_register("B", u"Bravo Time Zone", u"Military", "Etc/GMT-2", False)
tzabbr_register("BST", u"British Summer Time", u"Europe", "Europe/London", True)
tzabbr_register("C", u"Charlie Time Zone", u"Military", "Etc/GMT-2", False)
tzabbr_register("CDT", u"Central Daylight Time", u"North America",
"US/Central", True)
tzabbr_register("CEDT", u"Central European Daylight Time", u"Europe",
"Etc/GMT+2", True)
tzabbr_register("CEST", u"Central European Summer Time", u"Europe",
"Etc/GMT+2", True)
tzabbr_register("CET", u"Central European Time", u"Europe", "Etc/GMT+1", False)
tzabbr_register("CST", u"Central Standard Time", u"North America",
"US/Central", False)
tzabbr_register("CXT", u"Christmas Island Time", u"Australia",
"Indian/Christmas", False)
tzabbr_register("D", u"Delta Time Zone", u"Military", "Etc/GMT-2", False)
tzabbr_register("E", u"Echo Time Zone", u"Military", "Etc/GMT-2", False)
tzabbr_register("EDT", u"Eastern Daylight Time", u"North America",
"US/Eastern", True)
tzabbr_register("EEDT", u"Eastern European Daylight Time", u"Europe",
"Etc/GMT+3", True)
tzabbr_register("EEST", u"Eastern European Summer Time", u"Europe",
"Etc/GMT+3", True)
tzabbr_register("EET", u"Eastern European Time", u"Europe", "Etc/GMT+2", False)
tzabbr_register("EST", u"Eastern Standard Time", u"North America",
"US/Eastern", False)
tzabbr_register("F", u"Foxtrot Time Zone", u"Military", "Etc/GMT-6", False)
tzabbr_register("G", u"Golf Time Zone", u"Military", "Etc/GMT-7", False)
tzabbr_register("GMT", u"Greenwich Mean Time", u"Europe", pytz.utc, False)
tzabbr_register("H", u"Hotel Time Zone", u"Military", "Etc/GMT-8", False)
#tzabbr_register("HAA", u"Heure Avancée de l'Atlantique", u"North America", u"UTC - 3 hours")
#tzabbr_register("HAC", u"Heure Avancée du Centre", u"North America", u"UTC - 5 hours")
tzabbr_register("HADT", u"Hawaii-Aleutian Daylight Time", u"North America",
"Pacific/Honolulu", True)
#tzabbr_register("HAE", u"Heure Avancée de l'Est", u"North America", u"UTC - 4 hours")
#tzabbr_register("HAP", u"Heure Avancée du Pacifique", u"North America", u"UTC - 7 hours")
#tzabbr_register("HAR", u"Heure Avancée des Rocheuses", u"North America", u"UTC - 6 hours")
tzabbr_register("HAST", u"Hawaii-Aleutian Standard Time", u"North America",
"Pacific/Honolulu", False)
#tzabbr_register("HAT", u"Heure Avancée de Terre-Neuve", u"North America", u"UTC - 2:30 hours")
#tzabbr_register("HAY", u"Heure Avancée du Yukon", u"North America", u"UTC - 8 hours")
tzabbr_register("HDT", u"Hawaii Daylight Time", u"North America",
"Pacific/Honolulu", True)
#tzabbr_register("HNA", u"Heure Normale de l'Atlantique", u"North America", u"UTC - 4 hours")
#tzabbr_register("HNC", u"Heure Normale du Centre", u"North America", u"UTC - 6 hours")
#tzabbr_register("HNE", u"Heure Normale de l'Est", u"North America", u"UTC - 5 hours")
#tzabbr_register("HNP", u"Heure Normale du Pacifique", u"North America", u"UTC - 8 hours")
#tzabbr_register("HNR", u"Heure Normale des Rocheuses", u"North America", u"UTC - 7 hours")
#tzabbr_register("HNT", u"Heure Normale de Terre-Neuve", u"North America", u"UTC - 3:30 hours")
#tzabbr_register("HNY", u"Heure Normale du Yukon", u"North America", u"UTC - 9 hours")
tzabbr_register("HST", u"Hawaii Standard Time", u"North America",
"Pacific/Honolulu", False)
tzabbr_register("I", u"India Time Zone", u"Military", "Etc/GMT-9", False)
tzabbr_register("IST", u"Irish Summer Time", u"Europe", "Europe/Dublin", True)
tzabbr_register("K", u"Kilo Time Zone", u"Military", "Etc/GMT-10", False)
tzabbr_register("L", u"Lima Time Zone", u"Military", "Etc/GMT-11", False)
tzabbr_register("M", u"Mike Time Zone", u"Military", "Etc/GMT-12", False)
tzabbr_register("MDT", u"Mountain Daylight Time", u"North America",
"US/Mountain", True)
#tzabbr_register("MESZ", u"Mitteleuroäische Sommerzeit", u"Europe", u"UTC + 2 hours")
#tzabbr_register("MEZ", u"Mitteleuropäische Zeit", u"Europe", u"UTC + 1 hour")
tzabbr_register("MSD", u"Moscow Daylight Time", u"Europe",
"Europe/Moscow", True)
tzabbr_register("MSK", u"Moscow Standard Time", u"Europe",
"Europe/Moscow", False)
tzabbr_register("MST", u"Mountain Standard Time", u"North America",
"US/Mountain", False)
tzabbr_register("N", u"November Time Zone", u"Military", "Etc/GMT+1", False)
tzabbr_register("NDT", u"Newfoundland Daylight Time", u"North America",
"America/St_Johns", True)
tzabbr_register("NFT", u"Norfolk (Island) Time", u"Australia",
"Pacific/Norfolk", False)
tzabbr_register("NST", u"Newfoundland Standard Time", u"North America",
"America/St_Johns", False)
tzabbr_register("O", u"Oscar Time Zone", u"Military", "Etc/GMT+2", False)
tzabbr_register("P", u"Papa Time Zone", u"Military", "Etc/GMT+3", False)
tzabbr_register("PDT", u"Pacific Daylight Time", u"North America",
"US/Pacific", True)
tzabbr_register("PST", u"Pacific Standard Time", u"North America",
"US/Pacific", False)
tzabbr_register("Q", u"Quebec Time Zone", u"Military", "Etc/GMT+4", False)
tzabbr_register("R", u"Romeo Time Zone", u"Military", "Etc/GMT+5", False)
tzabbr_register("S", u"Sierra Time Zone", u"Military", "Etc/GMT+6", False)
tzabbr_register("T", u"Tango Time Zone", u"Military", "Etc/GMT+7", False)
tzabbr_register("U", u"Uniform Time Zone", u"Military", "Etc/GMT+8", False)
tzabbr_register("UTC", u"Coordinated Universal Time", u"Europe",
pytz.utc, False)
tzabbr_register("V", u"Victor Time Zone", u"Military", "Etc/GMT+9", False)
tzabbr_register("W", u"Whiskey Time Zone", u"Military", "Etc/GMT+10", False)
tzabbr_register("WDT", u"Western Daylight Time", u"Australia",
"Australia/West", True)
tzabbr_register("WEDT", u"Western European Daylight Time", u"Europe",
"Etc/GMT+1", True)
tzabbr_register("WEST", u"Western European Summer Time", u"Europe",
"Etc/GMT+1", True)
tzabbr_register("WET", u"Western European Time", u"Europe", pytz.utc, False)
tzabbr_register("WST", u"Western Standard Time", u"Australia",
"Australia/West", False)
tzabbr_register("X", u"X-ray Time Zone", u"Military", "Etc/GMT+11", False)
tzabbr_register("Y", u"Yankee Time Zone", u"Military", "Etc/GMT+12", False)
tzabbr_register("Z", u"Zulu Time Zone", u"Military", pytz.utc, False)

View File

@ -1,36 +1,25 @@
import os import os
import errno
from math import log from math import log
def human_size(num): def human_size(num):
"""Human friendly file size""" """Human friendly file size"""
unit_list = list(zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'], unit_list = zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'], [0, 0, 1, 2, 2])
[0, 0, 1, 2, 2])) if num > 1:
if num == 0: exponent = min(int(log(num, 1024)), len(unit_list) - 1)
quotient = float(num) / 1024**exponent
unit, num_decimals = unit_list[exponent]
format_string = '{:.%sf} {}' % (num_decimals)
return format_string.format(quotient, unit)
if num == 0: # pragma: no cover
return '0 bytes' return '0 bytes'
if num == 1: if num == 1: # pragma: no cover
return '1 byte' return '1 byte'
exponent = min(int(log(num, 1024)), len(unit_list) - 1)
quotient = float(num) / 1024**exponent
unit, num_decimals = unit_list[exponent]
format_string = '{:.%sf} {}' % (num_decimals)
return format_string.format(quotient, unit)
def du(path): def du(path):
"""Like du -sb, returns total size of path in bytes. Ignore """Like du -sb, returns total size of path in bytes."""
errors that might occur if we encounter broken symlinks or size = os.path.getsize(path)
files in the process of being removed.""" if os.path.isdir(path):
try: for thisfile in os.listdir(path):
st = os.stat(path) filepath = os.path.join(path, thisfile)
size = st.st_blocks * 512 size += du(filepath)
if os.path.isdir(path): return size
for thisfile in os.listdir(path):
filepath = os.path.join(path, thisfile)
size += du(filepath)
return size
except OSError as e:
if e.errno != errno.ENOENT:
raise
return 0

View File

@ -1,20 +1,49 @@
# Implementation of hole punching via fallocate, if the OS # Implementation of hole punching via fallocate, if the OS
# and filesystem support it. # and filesystem support it.
import fallocate try:
import os
import ctypes
import ctypes.util
def make_fallocate():
libc_name = ctypes.util.find_library('c')
libc = ctypes.CDLL(libc_name, use_errno=True)
def punch_hole(filename, offset, length, ignore_errors=True): _fallocate = libc.fallocate
_fallocate.restype = ctypes.c_int
_fallocate.argtypes = [ ctypes.c_int, ctypes.c_int,
ctypes.c_int64, ctypes.c_int64 ]
del libc
del libc_name
def fallocate(fd, mode, offset, len_):
res = _fallocate(fd, mode, offset, len_)
if res != 0: # pragma: no cover
errno = ctypes.get_errno()
raise IOError(errno, os.strerror(errno))
return fallocate
fallocate = make_fallocate()
del make_fallocate
except Exception: # pragma: no cover
fallocate = None
FALLOC_FL_KEEP_SIZE = 0x01
FALLOC_FL_PUNCH_HOLE = 0x02
def punch_hole(filename, offset, length, ignore_errors = True):
"""Punch a hole in the file. This isn't well supported, so errors """Punch a hole in the file. This isn't well supported, so errors
are ignored by default.""" are ignored by default."""
try: try:
if fallocate is None: # pragma: no cover
raise IOError("fallocate not available")
with open(filename, "r+") as f: with open(filename, "r+") as f:
fallocate.fallocate( fallocate(f.fileno(),
f.fileno(), FALLOC_FL_KEEP_SIZE | FALLOC_FL_PUNCH_HOLE,
offset, offset, length)
length, except IOError: # pragma: no cover
fallocate.FALLOC_FL_KEEP_SIZE | fallocate.FALLOC_FL_PUNCH_HOLE)
except Exception:
if ignore_errors: if ignore_errors:
return return
raise raise

View File

@ -1,168 +0,0 @@
"""Interval. Like nilmdb.server.interval, but re-implemented here
in plain Python so clients have easier access to it, and with a few
helper functions.
Intervals are half-open, ie. they include data points with timestamps
[start, end)
"""
import nilmdb.utils.time
import nilmdb.utils.iterator
class IntervalError(Exception):
"""Error due to interval overlap, etc"""
pass
# Interval
class Interval:
"""Represents an interval of time."""
def __init__(self, start, end):
"""
'start' and 'end' are arbitrary numbers that represent time
"""
if start >= end:
# Explicitly disallow zero-width intervals, since they're half-open
raise IntervalError("start %s must precede end %s" % (start, end))
self.start = start
self.end = end
def __repr__(self):
s = repr(self.start) + ", " + repr(self.end)
return self.__class__.__name__ + "(" + s + ")"
def __str__(self):
return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
" -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
def human_string(self):
return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
" -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
# Compare two intervals. If non-equal, order by start then end
def __lt__(self, other):
return (self.start, self.end) < (other.start, other.end)
def __gt__(self, other):
return (self.start, self.end) > (other.start, other.end)
def __le__(self, other):
return (self.start, self.end) <= (other.start, other.end)
def __ge__(self, other):
return (self.start, self.end) >= (other.start, other.end)
def __eq__(self, other):
return (self.start, self.end) == (other.start, other.end)
def __ne__(self, other):
return (self.start, self.end) != (other.start, other.end)
def intersects(self, other):
"""Return True if two Interval objects intersect"""
if not isinstance(other, Interval):
raise TypeError("need an Interval")
if self.end <= other.start or self.start >= other.end:
return False
return True
def subset(self, start, end):
"""Return a new Interval that is a subset of this one"""
# A subclass that tracks additional data might override this.
if start < self.start or end > self.end:
raise IntervalError("not a subset")
return Interval(start, end)
def _interval_math_helper(a, b, op, subset=True):
"""Helper for set_difference, intersection functions,
to compute interval subsets based on a math operator on ranges
present in A and B. Subsets are computed from A, or new intervals
are generated if subset = False."""
# Iterate through all starts and ends in sorted order. Add a
# tag to the iterator so that we can figure out which one they
# were, after sorting.
def decorate(it, key_start, key_end):
for i in it:
yield i.start, key_start, i
yield i.end, key_end, i
a_iter = decorate(iter(a), 0, 2)
b_iter = decorate(iter(b), 1, 3)
# Now iterate over the timestamps of each start and end.
# At each point, evaluate which type of end it is, to determine
# how to build up the output intervals.
a_interval = None
in_a = False
in_b = False
out_start = None
for (ts, k, i) in nilmdb.utils.iterator.imerge(a_iter, b_iter):
if k == 0:
a_interval = i
in_a = True
elif k == 1:
in_b = True
elif k == 2:
in_a = False
else: # k == 3
in_b = False
include = op(in_a, in_b)
if include and out_start is None:
out_start = ts
elif not include:
if out_start is not None and out_start != ts:
if subset:
yield a_interval.subset(out_start, ts)
else:
yield Interval(out_start, ts)
out_start = None
def set_difference(a, b):
"""
Compute the difference (a \\ b) between the intervals in 'a' and
the intervals in 'b'; i.e., the ranges that are present in 'self'
but not 'other'.
'a' and 'b' must both be iterables.
Returns a generator that yields each interval in turn.
Output intervals are built as subsets of the intervals in the
first argument (a).
"""
return _interval_math_helper(a, b, (lambda a, b: a and not b))
def intersection(a, b):
"""
Compute the intersection between the intervals in 'a' and the
intervals in 'b'; i.e., the ranges that are present in both 'a'
and 'b'.
'a' and 'b' must both be iterables.
Returns a generator that yields each interval in turn.
Output intervals are built as subsets of the intervals in the
first argument (a).
"""
return _interval_math_helper(a, b, (lambda a, b: a and b))
def optimize(it):
"""
Given an iterable 'it' with intervals, optimize them by joining
together intervals that are adjacent in time, and return a generator
that yields the new intervals.
"""
saved_int = None
for interval in it:
if saved_int is not None:
if saved_int.end == interval.start:
interval.start = saved_int.start
else:
yield saved_int
saved_int = interval
if saved_int is not None:
yield saved_int

View File

@ -1,38 +0,0 @@
# Misc iterator tools
# Iterator merging, based on http://code.activestate.com/recipes/491285/
import heapq
def imerge(*iterables):
'''Merge multiple sorted inputs into a single sorted output.
Equivalent to: sorted(itertools.chain(*iterables))
>>> list(imerge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25]))
[0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25]
'''
heappop, siftup, _Stop = heapq.heappop, heapq._siftup, StopIteration
h = []
h_append = h.append
for it in map(iter, iterables):
try:
nexter = it.__next__
h_append([nexter(), nexter])
except _Stop:
pass
heapq.heapify(h)
while 1:
try:
while 1:
v, nexter = s = h[0] # raises IndexError when h is empty
yield v
s[0] = nexter() # raises StopIteration when exhausted
siftup(h, 0) # restore heap condition
except _Stop:
heappop(h) # remove empty iterator
except IndexError:
return

View File

@ -0,0 +1,100 @@
import Queue
import threading
import sys
import contextlib
# This file provides a context manager that converts a function
# that takes a callback into a generator that returns an iterable.
# This is done by running the function in a new thread.
# Based partially on http://stackoverflow.com/questions/9968592/
class IteratorizerThread(threading.Thread):
def __init__(self, queue, function, curl_hack):
"""
function: function to execute, which takes the
callback (provided by this class) as an argument
"""
threading.Thread.__init__(self)
self.name = "Iteratorizer-" + function.__name__ + "-" + self.name
self.function = function
self.queue = queue
self.die = False
self.curl_hack = curl_hack
def callback(self, data):
try:
if self.die:
raise Exception() # trigger termination
self.queue.put((1, data))
except:
if self.curl_hack:
# We can't raise exceptions, because the pycurl
# extension module will unconditionally print the
# exception itself, and not pass it up to the caller.
# Instead, just return a value that tells curl to
# abort. (-1 would be best, in case we were given 0
# bytes, but the extension doesn't support that).
self.queue.put((2, sys.exc_info()))
return 0
raise
def run(self):
try:
result = self.function(self.callback)
except:
self.queue.put((2, sys.exc_info()))
else:
self.queue.put((0, result))
@contextlib.contextmanager
def Iteratorizer(function, curl_hack = False):
"""
Context manager that takes a function expecting a callback,
and provides an iterable that yields the values passed to that
callback instead.
function: function to execute, which takes a callback
(provided by this context manager) as an argument
with iteratorizer(func) as it:
for i in it:
print 'callback was passed:', i
print 'function returned:', it.retval
"""
queue = Queue.Queue(maxsize = 1)
thread = IteratorizerThread(queue, function, curl_hack)
thread.daemon = True
thread.start()
class iteratorizer_gen(object):
def __init__(self, queue):
self.queue = queue
self.retval = None
def __iter__(self):
return self
def next(self):
(typ, data) = self.queue.get()
if typ == 0:
# function has returned
self.retval = data
raise StopIteration
elif typ == 1:
# data is available
return data
else:
# callback raised an exception
raise data[0], data[1], data[2]
try:
yield iteratorizer_gen(queue)
finally:
# Ask the thread to die, if it's still running.
thread.die = True
while thread.isAlive():
try:
queue.get(True, 0.01)
except: # pragma: no cover
pass

View File

@ -1,22 +0,0 @@
# File locking
import fcntl
import errno
def exclusive_lock(f):
"""Acquire an exclusive lock. Returns True on successful
lock, or False on error."""
try:
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
return False
else:
raise
return True
def exclusive_unlock(f):
"""Release an exclusive lock."""
fcntl.flock(f.fileno(), fcntl.LOCK_UN)

View File

@ -6,11 +6,10 @@
import collections import collections
import decorator import decorator
def lru_cache(size = 10, onremove = None, keys = slice(None)):
def lru_cache(size=10, onremove=None, keys=slice(None)):
"""Least-recently-used cache decorator. """Least-recently-used cache decorator.
@lru_cache(size=10, onremove=None) @lru_cache(size = 10, onevict = None)
def f(...): def f(...):
pass pass
@ -27,7 +26,7 @@ def lru_cache(size=10, onremove=None, keys=slice(None)):
""" """
def decorate(func): def decorate(func):
cache = collections.OrderedDict() # order: least- to most-recent cache = collections.OrderedDict() # order: least- to most-recent
def evict(value): def evict(value):
if onremove: if onremove:
@ -44,8 +43,8 @@ def lru_cache(size=10, onremove=None, keys=slice(None)):
value = orig(*args) value = orig(*args)
orig.cache_misses += 1 orig.cache_misses += 1
if len(cache) >= size: if len(cache) >= size:
evict(cache.popitem(0)[1]) # evict LRU cache entry evict(cache.popitem(0)[1]) # evict LRU cache entry
cache[key] = value # (re-)insert this key at end cache[key] = value # (re-)insert this key at end
return value return value
def cache_remove(*args): def cache_remove(*args):
@ -54,17 +53,14 @@ def lru_cache(size=10, onremove=None, keys=slice(None)):
if key in cache: if key in cache:
evict(cache.pop(key)) evict(cache.pop(key))
else: else:
if cache: if len(cache) > 0 and len(args) != len(cache.iterkeys().next()):
if len(args) != len(next(iter(cache.keys()))): raise KeyError("trying to remove from LRU cache, but "
raise KeyError("trying to remove from LRU cache, but " "number of arguments doesn't match the "
"number of arguments doesn't match the " "cache key length")
"cache key length")
def cache_remove_all(): def cache_remove_all():
nonlocal cache
for key in cache: for key in cache:
evict(cache[key]) evict(cache.pop(key))
cache = collections.OrderedDict()
def cache_info(): def cache_info():
return (func.cache_hits, func.cache_misses) return (func.cache_hits, func.cache_misses)

View File

@ -1,10 +1,9 @@
from nilmdb.utils.printf import *
import sys import sys
import inspect import inspect
import decorator import decorator
from nilmdb.utils.printf import fprintf
def must_close(errorfile = sys.stderr, wrap_verify = False):
def must_close(errorfile=sys.stderr, wrap_verify=False):
"""Class decorator that warns on 'errorfile' at deletion time if """Class decorator that warns on 'errorfile' at deletion time if
the class's close() member wasn't called. the class's close() member wasn't called.
@ -13,17 +12,12 @@ def must_close(errorfile=sys.stderr, wrap_verify=False):
already been called.""" already been called."""
def class_decorator(cls): def class_decorator(cls):
def is_method_or_function(x):
return inspect.ismethod(x) or inspect.isfunction(x)
def wrap_class_method(wrapper): def wrap_class_method(wrapper):
try: try:
orig = getattr(cls, wrapper.__name__) orig = getattr(cls, wrapper.__name__).im_func
except AttributeError: except:
orig = lambda x: None orig = lambda x: None
if is_method_or_function(orig): setattr(cls, wrapper.__name__, decorator.decorator(wrapper, orig))
setattr(cls, wrapper.__name__,
decorator.decorator(wrapper, orig))
@wrap_class_method @wrap_class_method
def __init__(orig, self, *args, **kwargs): def __init__(orig, self, *args, **kwargs):
@ -34,13 +28,10 @@ def must_close(errorfile=sys.stderr, wrap_verify=False):
@wrap_class_method @wrap_class_method
def __del__(orig, self, *args, **kwargs): def __del__(orig, self, *args, **kwargs):
try: if "_must_close" in self.__dict__:
if "_must_close" in self.__dict__: fprintf(errorfile, "error: %s.close() wasn't called!\n",
fprintf(errorfile, "error: %s.close() wasn't called!\n", self.__class__.__name__)
self.__class__.__name__) return orig(self, *args, **kwargs)
return orig(self, *args, **kwargs)
except:
pass
@wrap_class_method @wrap_class_method
def close(orig, self, *args, **kwargs): def close(orig, self, *args, **kwargs):
@ -51,21 +42,20 @@ def must_close(errorfile=sys.stderr, wrap_verify=False):
# Optionally wrap all other functions # Optionally wrap all other functions
def verifier(orig, self, *args, **kwargs): def verifier(orig, self, *args, **kwargs):
if ("_must_close" not in self.__dict__ and if ("_must_close" not in self.__dict__ and
"_must_close_initialized" in self.__dict__): "_must_close_initialized" in self.__dict__):
raise AssertionError("called " + str(orig) + " after close") raise AssertionError("called " + str(orig) + " after close")
return orig(self, *args, **kwargs) return orig(self, *args, **kwargs)
if wrap_verify: if wrap_verify:
for (name, method) in inspect.getmembers(cls, for (name, method) in inspect.getmembers(cls, inspect.ismethod):
is_method_or_function): # Skip class methods
if method.__self__ is not None:
continue
# Skip some methods # Skip some methods
if name in ["__del__", "__init__"]: if name in [ "__del__", "__init__" ]:
continue continue
# Set up wrapper # Set up wrapper
if inspect.ismethod(method): setattr(cls, name, decorator.decorator(verifier,
func = method.__func__ method.im_func))
else:
func = method
setattr(cls, name, decorator.decorator(verifier, func))
return cls return cls
return class_decorator return class_decorator

View File

@ -1,13 +1,9 @@
"""printf, fprintf, sprintf""" """printf, fprintf, sprintf"""
from __future__ import print_function
def printf(_str, *args): def printf(_str, *args):
print(_str % args, end='') print(_str % args, end='')
def fprintf(_file, _str, *args): def fprintf(_file, _str, *args):
print(_str % args, end='', file=_file) print(_str % args, end='', file=_file)
def sprintf(_str, *args): def sprintf(_str, *args):
return (_str % args) return (_str % args)

View File

@ -1,6 +1,10 @@
import queue import Queue
import threading import threading
import sys import sys
import decorator
import inspect
import types
import functools
# This file provides a class that will wrap an object and serialize # This file provides a class that will wrap an object and serialize
# all calls to its methods. All calls to that object will be queued # all calls to its methods. All calls to that object will be queued
@ -9,7 +13,6 @@ import sys
# Based partially on http://stackoverflow.com/questions/2642515/ # Based partially on http://stackoverflow.com/questions/2642515/
class SerializerThread(threading.Thread): class SerializerThread(threading.Thread):
"""Thread that retrieves call information from the queue, makes the """Thread that retrieves call information from the queue, makes the
call, and returns the results.""" call, and returns the results."""
@ -27,7 +30,7 @@ class SerializerThread(threading.Thread):
exception = None exception = None
result = None result = None
try: try:
result = func(*args, **kwargs) # wrapped result = func(*args, **kwargs) # wrapped
except: except:
exception = sys.exc_info() exception = sys.exc_info()
# Ensure we delete these before returning a result, so # Ensure we delete these before returning a result, so
@ -37,7 +40,6 @@ class SerializerThread(threading.Thread):
result_queue.put((exception, result)) result_queue.put((exception, result))
del exception, result del exception, result
def serializer_proxy(obj_or_type): def serializer_proxy(obj_or_type):
"""Wrap the given object or type in a SerializerObjectProxy. """Wrap the given object or type in a SerializerObjectProxy.
@ -47,88 +49,61 @@ def serializer_proxy(obj_or_type):
The proxied requests, including instantiation, are performed in a The proxied requests, including instantiation, are performed in a
single thread and serialized between caller threads. single thread and serialized between caller threads.
""" """
class SerializerCallProxy(): class SerializerCallProxy(object):
def __init__(self, call_queue, func, objectproxy): def __init__(self, call_queue, func, objectproxy):
self.call_queue = call_queue self.call_queue = call_queue
self.func = func self.func = func
# Need to hold a reference to object proxy so it doesn't # Need to hold a reference to object proxy so it doesn't
# go away (and kill the thread) until after get called. # go away (and kill the thread) until after get called.
self.objectproxy = objectproxy self.objectproxy = objectproxy
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
result_queue = queue.Queue() result_queue = Queue.Queue()
self.call_queue.put((result_queue, self.func, args, kwargs)) self.call_queue.put((result_queue, self.func, args, kwargs))
(exc_info, result) = result_queue.get() ( exc_info, result ) = result_queue.get()
if exc_info is None: if exc_info is None:
return result return result
else: else:
raise exc_info[1].with_traceback(exc_info[2]) raise exc_info[0], exc_info[1], exc_info[2]
class SerializerObjectProxy(): class SerializerObjectProxy(object):
def __init__(self, obj_or_type, *args, **kwargs): def __init__(self, obj_or_type, *args, **kwargs):
self.__object = obj_or_type self.__object = obj_or_type
if isinstance(obj_or_type, type): try:
classname = obj_or_type.__name__ if type(obj_or_type) in (types.TypeType, types.ClassType):
else: classname = obj_or_type.__name__
classname = obj_or_type.__class__.__name__ else:
self.__call_queue = queue.Queue() classname = obj_or_type.__class__.__name__
except AttributeError: # pragma: no cover
classname = "???"
self.__call_queue = Queue.Queue()
self.__thread = SerializerThread(classname, self.__call_queue) self.__thread = SerializerThread(classname, self.__call_queue)
self.__thread.daemon = True self.__thread.daemon = True
self.__thread.start() self.__thread.start()
self._thread_safe = True self._thread_safe = True
def __getattr__(self, key): def __getattr__(self, key):
# If the attribute is a function, we want to return a if key.startswith("_SerializerObjectProxy__"): # pragma: no cover
# proxy that will perform the call through the serializer raise AttributeError
# when called. Otherwise, we want to return the value
# directly. This means we need to grab the attribute once,
# and therefore self.__object.__getattr__ may be called
# in an unsafe way, from the caller's thread.
attr = getattr(self.__object, key) attr = getattr(self.__object, key)
if not callable(attr): if not callable(attr):
# It's not callable, so perform the getattr from within
# the serializer thread, then return its value.
# That may differ from the "attr" value we just grabbed
# from here, due to forced ordering in the serializer.
getter = SerializerCallProxy(self.__call_queue, getattr, self) getter = SerializerCallProxy(self.__call_queue, getattr, self)
return getter(self.__object, key) return getter(self.__object, key)
else: r = SerializerCallProxy(self.__call_queue, attr, self)
# It is callable, so return an object that will proxy through return r
# the serializer when called.
r = SerializerCallProxy(self.__call_queue, attr, self)
return r
# For an interable object, on __iter__(), save the object's
# iterator and return this proxy. On next(), call the object's
# iterator through this proxy.
def __iter__(self):
attr = getattr(self.__object, "__iter__")
self.__iter = SerializerCallProxy(self.__call_queue, attr, self)()
return self
def __next__(self):
return SerializerCallProxy(self.__call_queue,
self.__iter.__next__, self)()
def __getitem__(self, key):
return self.__getattr__("__getitem__")(key)
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
"""Call this to instantiate the type, if a type was passed """Call this to instantiate the type, if a type was passed
to serializer_proxy. Otherwise, pass the call through.""" to serializer_proxy. Otherwise, pass the call through."""
ret = SerializerCallProxy(self.__call_queue, ret = SerializerCallProxy(self.__call_queue,
self.__object, self)(*args, **kwargs) self.__object, self)(*args, **kwargs)
if isinstance(self.__object, type): if type(self.__object) in (types.TypeType, types.ClassType):
# Instantiation # Instantiation
self.__object = ret self.__object = ret
return self return self
return ret return ret
def __del__(self): def __del__(self):
try: self.__call_queue.put((None, None, None, None))
# Signal thread to exit, but don't wait for it. self.__thread.join()
self.__call_queue.put((None, None, None, None))
except:
pass
return SerializerObjectProxy(obj_or_type) return SerializerObjectProxy(obj_or_type)

View File

@ -1,19 +0,0 @@
import re
def sort_human(items, key=None):
"""Human-friendly sort (/stream/2 before /stream/10)"""
def to_num(val):
try:
return int(val)
except Exception:
return val
def human_key(text):
if key:
text = key(text)
# Break into character and numeric chunks.
chunks = re.split(r'([0-9]+)', text)
return [to_num(c) for c in chunks]
return sorted(items, key=human_key)

View File

@ -1,25 +1,26 @@
from nilmdb.utils.printf import *
import threading import threading
from nilmdb.utils.printf import sprintf import warnings
import types
def verify_proxy(obj_or_type, exception = False, check_thread = True,
def verify_proxy(obj_or_type, check_thread=True, check_concurrent = True):
check_concurrent=True):
"""Wrap the given object or type in a VerifyObjectProxy. """Wrap the given object or type in a VerifyObjectProxy.
Returns a VerifyObjectProxy that proxies all method calls to the Returns a VerifyObjectProxy that proxies all method calls to the
given object, as well as attribute retrievals. given object, as well as attribute retrievals.
When calling methods, the following checks are performed. On When calling methods, the following checks are performed. If
failure, an exception is raised. exception is True, an exception is raised. Otherwise, a warning
is printed.
check_thread = True # Fail if two different threads call methods. check_thread = True # Warn/fail if two different threads call methods.
check_concurrent = True # Fail if two functions are concurrently check_concurrent = True # Warn/fail if two functions are concurrently
# run through this proxy # run through this proxy
""" """
class Namespace(): class Namespace(object):
pass pass
class VerifyCallProxy(object):
class VerifyCallProxy():
def __init__(self, func, parent_namespace): def __init__(self, func, parent_namespace):
self.func = func self.func = func
self.parent_namespace = parent_namespace self.parent_namespace = parent_namespace
@ -41,16 +42,22 @@ def verify_proxy(obj_or_type, check_thread=True,
" but %s called %s.%s", " but %s called %s.%s",
p.thread.name, p.classname, p.thread_callee, p.thread.name, p.classname, p.thread_callee,
this.name, p.classname, callee) this.name, p.classname, callee)
raise AssertionError(err) if exception:
raise AssertionError(err)
else: # pragma: no cover
warnings.warn(err)
need_concur_unlock = False need_concur_unlock = False
if check_concurrent: if check_concurrent:
if not p.concur_lock.acquire(False): if p.concur_lock.acquire(False) == False:
err = sprintf("unsafe concurrency: %s called %s.%s " err = sprintf("unsafe concurrency: %s called %s.%s "
"while %s is still in %s.%s", "while %s is still in %s.%s",
this.name, p.classname, callee, this.name, p.classname, callee,
p.concur_tname, p.classname, p.concur_callee) p.concur_tname, p.classname, p.concur_callee)
raise AssertionError(err) if exception:
raise AssertionError(err)
else: # pragma: no cover
warnings.warn(err)
else: else:
p.concur_tname = this.name p.concur_tname = this.name
p.concur_callee = callee p.concur_callee = callee
@ -63,7 +70,7 @@ def verify_proxy(obj_or_type, check_thread=True,
p.concur_lock.release() p.concur_lock.release()
return ret return ret
class VerifyObjectProxy(): class VerifyObjectProxy(object):
def __init__(self, obj_or_type, *args, **kwargs): def __init__(self, obj_or_type, *args, **kwargs):
p = Namespace() p = Namespace()
self.__ns = p self.__ns = p
@ -73,12 +80,17 @@ def verify_proxy(obj_or_type, check_thread=True,
p.concur_tname = None p.concur_tname = None
p.concur_callee = None p.concur_callee = None
self.__obj = obj_or_type self.__obj = obj_or_type
if isinstance(obj_or_type, type): try:
p.classname = self.__obj.__name__ if type(obj_or_type) in (types.TypeType, types.ClassType):
else: p.classname = self.__obj.__name__
p.classname = self.__obj.__class__.__name__ else:
p.classname = self.__obj.__class__.__name__
except AttributeError: # pragma: no cover
p.classname = "???"
def __getattr__(self, key): def __getattr__(self, key):
if key.startswith("_VerifyObjectProxy__"): # pragma: no cover
raise AttributeError
attr = getattr(self.__obj, key) attr = getattr(self.__obj, key)
if not callable(attr): if not callable(attr):
return VerifyCallProxy(getattr, self.__ns)(self.__obj, key) return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
@ -88,7 +100,7 @@ def verify_proxy(obj_or_type, check_thread=True,
"""Call this to instantiate the type, if a type was passed """Call this to instantiate the type, if a type was passed
to verify_proxy. Otherwise, pass the call through.""" to verify_proxy. Otherwise, pass the call through."""
ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs) ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
if isinstance(self.__obj, type): if type(self.__obj) in (types.TypeType, types.ClassType):
# Instantiation # Instantiation
self.__obj = ret self.__obj = ret
return self return self

View File

@ -1,95 +1,12 @@
from nilmdb.utils import datetime_tz
import re import re
import time
import datetime_tz
# Range
min_timestamp = (-2**63)
max_timestamp = (2**63 - 1)
# Smallest representable step
epsilon = 1
def string_to_timestamp(string):
"""Convert a string that represents an integer number of microseconds
since epoch."""
try:
# Parse a string like "1234567890123456" and return an integer
return int(string)
except ValueError:
# Try parsing as a float, in case it's "1234567890123456.0"
return int(round(float(string)))
def timestamp_to_string(timestamp):
"""Convert a timestamp (integer microseconds since epoch) to a string"""
if isinstance(timestamp, float):
return str(int(round(timestamp)))
else:
return str(timestamp)
def timestamp_to_bytes(timestamp):
"""Convert a timestamp (integer microseconds since epoch) to a Python
bytes object"""
return timestamp_to_string(timestamp).encode('utf-8')
def timestamp_to_human(timestamp):
"""Convert a timestamp (integer microseconds since epoch) to a
human-readable string, using the local timezone for display
(e.g. from the TZ env var)."""
if timestamp == min_timestamp:
return "(minimum)"
if timestamp == max_timestamp:
return "(maximum)"
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp_to_unix(timestamp))
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
def unix_to_timestamp(unix):
"""Convert a Unix timestamp (floating point seconds since epoch)
into a NILM timestamp (integer microseconds since epoch)"""
return int(round(unix * 1e6))
def timestamp_to_unix(timestamp):
"""Convert a NILM timestamp (integer microseconds since epoch)
into a Unix timestamp (floating point seconds since epoch)"""
return timestamp / 1e6
seconds_to_timestamp = unix_to_timestamp
timestamp_to_seconds = timestamp_to_unix
def rate_to_period(hz, cycles=1):
"""Convert a rate (in Hz) to a period (in timestamp units).
Returns an integer."""
period = unix_to_timestamp(cycles) / float(hz)
return int(round(period))
def parse_time(toparse): def parse_time(toparse):
""" """
Parse a free-form time string and return a nilmdb timestamp Parse a free-form time string and return a datetime_tz object.
(integer microseconds since epoch). If the string doesn't contain a If the string doesn't contain a timestamp, the current local
timestamp, the current local timezone is assumed (e.g. from the TZ timezone is assumed (e.g. from the TZ env var).
env var).
""" """
if toparse == "min":
return min_timestamp
if toparse == "max":
return max_timestamp
# If it starts with @, treat it as a NILM timestamp
# (integer microseconds since epoch)
try:
if toparse[0] == '@':
return int(toparse[1:])
except (ValueError, KeyError, IndexError):
pass
# If string isn't "now" and doesn't contain at least 4 digits, # If string isn't "now" and doesn't contain at least 4 digits,
# consider it invalid. smartparse might otherwise accept # consider it invalid. smartparse might otherwise accept
# empty strings and strings with just separators. # empty strings and strings with just separators.
@ -98,20 +15,17 @@ def parse_time(toparse):
# Try to just parse the time as given # Try to just parse the time as given
try: try:
return unix_to_timestamp(datetime_tz.datetime_tz. return datetime_tz.datetime_tz.smartparse(toparse)
smartparse(toparse).totimestamp()) except ValueError:
except (ValueError, OverflowError, TypeError):
pass pass
# If it's parseable as a float, treat it as a Unix or NILM # Try to treat it as a single double
# timestamp based on its range.
try: try:
val = float(toparse) timestamp = float(toparse)
# range is from about year 2001 - 2128 # range is from about year 2001 - 2065
if 1e9 < val < 5e9: if timestamp < 1e9 or timestamp > 3e9:
return unix_to_timestamp(val) raise ValueError
if 1e15 < val < 5e15: return datetime_tz.datetime_tz.fromtimestamp(timestamp)
return val
except ValueError: except ValueError:
pass pass
@ -133,8 +47,7 @@ def parse_time(toparse):
r")", toparse) r")", toparse)
if res is not None: if res is not None:
try: try:
return unix_to_timestamp(datetime_tz.datetime_tz. return datetime_tz.datetime_tz.smartparse(res.group(2))
smartparse(res.group(2)).totimestamp())
except ValueError: except ValueError:
pass pass
@ -142,7 +55,15 @@ def parse_time(toparse):
# just give up for now. # just give up for now.
raise ValueError("unable to parse timestamp") raise ValueError("unable to parse timestamp")
def format_time(timestamp):
"""
Convert a Unix timestamp to a string for printing, using the
local timezone for display (e.g. from the TZ env var).
"""
dt = datetime_tz.datetime_tz.fromtimestamp(timestamp)
return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
def now(): def float_time_to_string(timestamp):
"""Return current timestamp""" """Convert a floating-point Unix timestamp to a string,
return unix_to_timestamp(time.time()) like '1234567890.000000'"""
return "%.6f" % timestamp

View File

@ -5,17 +5,18 @@
# with nilmdb.utils.Timer("flush"): # with nilmdb.utils.Timer("flush"):
# foo.flush() # foo.flush()
from __future__ import print_function
from __future__ import absolute_import
import contextlib import contextlib
import time import time
@contextlib.contextmanager @contextlib.contextmanager
def Timer(name=None, tosyslog=False): def Timer(name = None, tosyslog = False):
start = time.time() start = time.time()
yield yield
elapsed = int((time.time() - start) * 1000) elapsed = int((time.time() - start) * 1000)
msg = (name or 'elapsed') + ": " + str(elapsed) + " ms" msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
if tosyslog: if tosyslog: # pragma: no cover
import syslog import syslog
syslog.syslog(msg) syslog.syslog(msg)
else: else:

View File

@ -1,17 +1,16 @@
"""File-like objects that add timestamps to the input lines""" """File-like objects that add timestamps to the input lines"""
from nilmdb.utils.printf import sprintf from nilmdb.utils.printf import *
import nilmdb.utils.time from nilmdb.utils import datetime_tz
class Timestamper(object):
class Timestamper():
"""A file-like object that adds timestamps to lines of an input file.""" """A file-like object that adds timestamps to lines of an input file."""
def __init__(self, infile, ts_iter): def __init__(self, infile, ts_iter):
"""file: filename, or another file-like object """file: filename, or another file-like object
ts_iter: iterator that returns a timestamp string for ts_iter: iterator that returns a timestamp string for
each line of the file""" each line of the file"""
if isinstance(infile, str): if isinstance(infile, basestring):
self.file = open(infile, "rb") self.file = open(infile, "r")
else: else:
self.file = infile self.file = infile
self.ts_iter = ts_iter self.ts_iter = ts_iter
@ -23,19 +22,17 @@ class Timestamper():
while True: while True:
line = self.file.readline(*args) line = self.file.readline(*args)
if not line: if not line:
return b"" return ""
if line[0:1] == b'#': if line[0] == '#':
continue continue
# For some reason, coverage on python 3.8 reports that break
# we never hit this break, even though we definitely do.
break # pragma: no cover
try: try:
return next(self.ts_iter) + line return self.ts_iter.next() + line
except StopIteration: except StopIteration:
return b"" return ""
def readlines(self, size=None): def readlines(self, size = None):
out = b"" out = ""
while True: while True:
line = self.readline() line = self.readline()
out += line out += line
@ -46,16 +43,15 @@ class Timestamper():
def __iter__(self): def __iter__(self):
return self return self
def __next__(self): def next(self):
result = self.readline() result = self.readline()
if not result: if not result:
raise StopIteration raise StopIteration
return result return result
class TimestamperRate(Timestamper): class TimestamperRate(Timestamper):
"""Timestamper that uses a start time and a fixed rate""" """Timestamper that uses a start time and a fixed rate"""
def __init__(self, infile, start, rate, end=None): def __init__(self, infile, start, rate, end = None):
""" """
file: file name or object file: file name or object
@ -65,39 +61,31 @@ class TimestamperRate(Timestamper):
end: If specified, raise StopIteration before outputting a value end: If specified, raise StopIteration before outputting a value
greater than this.""" greater than this."""
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
rate_to_period = nilmdb.utils.time.rate_to_period
def iterator(start, rate, end): def iterator(start, rate, end):
n = 0 n = 0
rate = float(rate) rate = float(rate)
while True: while True:
now = start + rate_to_period(rate, n) now = start + n / rate
if end and now >= end: if end and now >= end:
return raise StopIteration
yield timestamp_to_bytes(now) + b" " yield sprintf("%.6f ", start + n / rate)
n += 1 n += 1
Timestamper.__init__(self, infile, iterator(start, rate, end)) Timestamper.__init__(self, infile, iterator(start, rate, end))
self.start = start self.start = start
self.rate = rate self.rate = rate
def __str__(self): def __str__(self):
start = datetime_tz.datetime_tz.fromtimestamp(self.start)
start = start.strftime("%a, %d %b %Y %H:%M:%S %Z")
return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)", return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
nilmdb.utils.time.timestamp_to_human(self.start), str(start), self.rate)
self.rate)
class TimestamperNow(Timestamper): class TimestamperNow(Timestamper):
"""Timestamper that uses current time""" """Timestamper that uses current time"""
def __init__(self, infile): def __init__(self, infile):
timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
get_now = nilmdb.utils.time.now
def iterator(): def iterator():
while True: while True:
yield timestamp_to_bytes(get_now()) + b" " now = datetime_tz.datetime_tz.utcnow().totimestamp()
yield sprintf("%.6f ", now)
Timestamper.__init__(self, infile, iterator()) Timestamper.__init__(self, infile, iterator())
def __str__(self): def __str__(self):
return "TimestamperNow(...)" return "TimestamperNow(...)"

View File

@ -1,41 +0,0 @@
argcomplete==1.12.0
CherryPy==18.6.0
coverage==5.2.1
Cython==0.29.21
decorator==4.4.2
fallocate==1.6.4
flake8==3.8.3
nose==1.3.7
numpy==1.19.1
progressbar==2.5
psutil==5.7.2
python-datetime-tz==0.5.4
python-dateutil==2.8.1
requests==2.24.0
tz==0.2.2
yappi==1.2.5
## The following requirements were added by pip freeze:
beautifulsoup4==4.9.1
certifi==2020.6.20
chardet==3.0.4
cheroot==8.4.2
idna==2.10
jaraco.classes==3.1.0
jaraco.collections==3.0.0
jaraco.functools==3.0.1
jaraco.text==3.2.0
mccabe==0.6.1
more-itertools==8.4.0
portend==2.6
pycodestyle==2.6.0
pyflakes==2.2.0
pytz==2020.1
six==1.15.0
soupsieve==2.0.1
tempora==4.0.0
urllib3==1.25.10
waitress==1.4.4
WebOb==1.8.6
WebTest==2.0.35
zc.lockfile==2.0

View File

@ -13,6 +13,8 @@ cover-package=nilmdb
cover-erase=1 cover-erase=1
# this works, puts html output in cover/ dir: # this works, puts html output in cover/ dir:
# cover-html=1 # cover-html=1
# need nose 1.1.3 for this:
# cover-branches=1
#debug=nose #debug=nose
#debug-log=nose.log #debug-log=nose.log
stop=1 stop=1
@ -37,23 +39,3 @@ tests=tests
#with-profile=1 #with-profile=1
#profile-sort=time #profile-sort=time
##profile-restrict=10 # doesn't work right, treated as string or something ##profile-restrict=10 # doesn't work right, treated as string or something
[versioneer]
VCS=git
style=pep440
versionfile_source=nilmdb/_version.py
versionfile_build=nilmdb/_version.py
tag_prefix=nilmdb-
parentdir_prefix=nilmdb-
[flake8]
exclude=_version.py
extend-ignore=E731
per-file-ignores=__init__.py:F401,E402 \
serializer.py:E722 \
mustclose.py:E722 \
fsck.py:E266
[pylint]
ignore=_version.py
disable=C0103,C0111,R0913,R0914

114
setup.py
View File

@ -1,62 +1,134 @@
#!/usr/bin/env python3 #!/usr/bin/python
# To release a new version, tag it: # To release a new version, tag it:
# git tag -a nilmdb-1.1 -m "Version 1.1" # git tag -a nilmdb-1.1 -m "Version 1.1"
# git push --tags # git push --tags
# Then just package it up: # Then just package it up:
# python3 setup.py sdist # python setup.py sdist
# This is supposed to be using Distribute:
#
# distutils provides a "setup" method.
# setuptools is a set of monkeypatches on top of that.
# distribute is a particular version/implementation of setuptools.
#
# So we don't really know if this is using the old setuptools or the
# Distribute-provided version of setuptools.
import traceback
import sys import sys
import os import os
from setuptools import setup
from distutils.extension import Extension try:
from setuptools import setup, find_packages
from distutils.extension import Extension
import distutils.version
except ImportError:
traceback.print_exc()
print "Please install the prerequisites listed in README.txt"
sys.exit(1)
# Versioneer manages version numbers from git tags. # Versioneer manages version numbers from git tags.
# https://github.com/warner/python-versioneer # https://github.com/warner/python-versioneer
import versioneer import versioneer
versioneer.versionfile_source = 'nilmdb/_version.py'
versioneer.versionfile_build = 'nilmdb/_version.py'
versioneer.tag_prefix = 'nilmdb-'
versioneer.parentdir_prefix = 'nilmdb-'
# Hack to workaround logging/multiprocessing issue:
# https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ
try: import multiprocessing
except: pass
# Use Cython if it's new enough, otherwise use preexisting C files.
cython_modules = [ 'nilmdb.server.interval',
'nilmdb.server.layout',
'nilmdb.server.rbtree' ]
try:
import Cython
from Cython.Build import cythonize
if (distutils.version.LooseVersion(Cython.__version__) <
distutils.version.LooseVersion("0.16")):
print "Cython version", Cython.__version__, "is too old; not using it."
raise ImportError()
use_cython = True
except ImportError:
use_cython = False
# External modules that need to be built
ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ] ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
# Use Cython.
cython_modules = [ 'nilmdb.server.interval', 'nilmdb.server.rbtree' ]
import Cython
from Cython.Build import cythonize
for modulename in cython_modules: for modulename in cython_modules:
filename = modulename.replace('.','/') filename = modulename.replace('.','/')
ext_modules.extend(cythonize(filename + ".pyx")) if use_cython:
ext_modules.extend(cythonize(filename + ".pyx"))
else:
cfile = filename + ".c"
if not os.path.exists(cfile):
raise Exception("Missing source file " + cfile + ". "
"Try installing cython >= 0.16.")
ext_modules.append(Extension(modulename, [ cfile ]))
# Get list of requirements to use in `install_requires` below. Note # We need a MANIFEST.in. Generate it here rather than polluting the
# that we don't make a distinction between things that are actually # repository with yet another setup-related file.
# required for end-users vs developers (or use `test_requires` or with open("MANIFEST.in", "w") as m:
# anything else) -- just install everything for simplicity. m.write("""
install_requires = open('requirements.txt').readlines() # Root
include README.txt
include setup.cfg
include setup.py
include versioneer.py
include Makefile
include .coveragerc
include .pylintrc
# Cython files -- include source.
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
# Tests
recursive-include tests *.py
recursive-include tests/data *
include tests/test.order
# Docs
recursive-include docs Makefile *.md
""")
# Run setup # Run setup
setup(name='nilmdb', setup(name='nilmdb',
version = versioneer.get_version(), version = versioneer.get_version(),
cmdclass = versioneer.get_cmdclass(), cmdclass = versioneer.get_cmdclass(),
url = 'https://git.jim.sh/nilm/nilmdb.git', url = 'https://git.jim.sh/jim/lees/nilmdb.git',
author = 'Jim Paris', author = 'Jim Paris',
description = "NILM Database", description = "NILM Database",
long_description = "NILM Database", long_description = "NILM Database",
license = "Proprietary", license = "Proprietary",
author_email = 'jim@jtan.com', author_email = 'jim@jtan.com',
setup_requires = [ 'setuptools' ], tests_require = [ 'nose',
install_requires = install_requires, 'coverage',
],
setup_requires = [ 'distribute',
],
install_requires = [ 'decorator',
'cherrypy >= 3.2',
'simplejson',
'pycurl',
'python-dateutil',
'pytz',
'psutil >= 0.3.0',
'requests >= 1.1.0, < 2.0.0',
],
packages = [ 'nilmdb', packages = [ 'nilmdb',
'nilmdb.utils', 'nilmdb.utils',
'nilmdb.utils.datetime_tz',
'nilmdb.server', 'nilmdb.server',
'nilmdb.client', 'nilmdb.client',
'nilmdb.cmdline', 'nilmdb.cmdline',
'nilmdb.scripts', 'nilmdb.scripts',
'nilmdb.fsck',
], ],
entry_points = { entry_points = {
'console_scripts': [ 'console_scripts': [
'nilmtool = nilmdb.scripts.nilmtool:main', 'nilmtool = nilmdb.scripts.nilmtool:main',
'nilmdb-server = nilmdb.scripts.nilmdb_server:main', 'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
'nilmdb-fsck = nilmdb.scripts.nilmdb_fsck:main',
], ],
}, },
ext_modules = ext_modules, ext_modules = ext_modules,

View File

@ -2,123 +2,123 @@
# layout: float32_8 # layout: float32_8
# start: Fri, 23 Mar 2012 10:00:30.000000 +0000 # start: Fri, 23 Mar 2012 10:00:30.000000 +0000
# end: Fri, 23 Mar 2012 10:00:31.000000 +0000 # end: Fri, 23 Mar 2012 10:00:31.000000 +0000
1332496830000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03 1332496830.000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03 1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03 1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03 1332496830.025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03 1332496830.033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03 1332496830.041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03 1332496830.050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03 1332496830.058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03 1332496830.066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03 1332496830.075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03 1332496830.083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03 1332496830.091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03 1332496830.100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03 1332496830.108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03 1332496830.116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03 1332496830.125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03 1332496830.133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03 1332496830.141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03 1332496830.150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03 1332496830.158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03 1332496830.166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03 1332496830.175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03 1332496830.183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03 1332496830.191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03 1332496830.200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03 1332496830.208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03 1332496830.216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03 1332496830.225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03 1332496830.233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03 1332496830.241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03 1332496830.250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03 1332496830.258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03 1332496830.266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03 1332496830.275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03 1332496830.283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03 1332496830.291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03 1332496830.300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03 1332496830.308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03 1332496830.316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03 1332496830.325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03 1332496830.333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03 1332496830.341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03 1332496830.350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03 1332496830.358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03 1332496830.366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03 1332496830.375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03 1332496830.383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03 1332496830.391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03 1332496830.400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03 1332496830.408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03 1332496830.416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03 1332496830.425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03 1332496830.433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03 1332496830.441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03 1332496830.450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03 1332496830.458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03 1332496830.466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03 1332496830.475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03 1332496830.483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03 1332496830.491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03 1332496830.500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03 1332496830.508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03 1332496830.516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03 1332496830.525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03 1332496830.533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03 1332496830.541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03 1332496830.550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03 1332496830.558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03 1332496830.566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03 1332496830.575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03 1332496830.583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03 1332496830.591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03 1332496830.600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03 1332496830.608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03 1332496830.616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03 1332496830.625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03 1332496830.633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03 1332496830.641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03 1332496830.650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03 1332496830.658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03 1332496830.666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03 1332496830.675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03 1332496830.683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03 1332496830.691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03 1332496830.700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03 1332496830.708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03 1332496830.716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03 1332496830.725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03 1332496830.733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03 1332496830.741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03 1332496830.750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03 1332496830.758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03 1332496830.766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03 1332496830.775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03 1332496830.783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03 1332496830.791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03 1332496830.800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03 1332496830.808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03 1332496830.816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03 1332496830.825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03 1332496830.833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03 1332496830.841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03 1332496830.850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03 1332496830.858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03 1332496830.866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03 1332496830.875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03 1332496830.883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03 1332496830.891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03 1332496830.900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03 1332496830.908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03 1332496830.916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03 1332496830.925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03 1332496830.933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03 1332496830.941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03 1332496830.950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03 1332496830.958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03 1332496830.966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03 1332496830.975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03 1332496830.983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03 1332496830.991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03

View File

@ -1,119 +1,119 @@
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03 1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03 1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03 1332496830.025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03 1332496830.033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03 1332496830.041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03 1332496830.050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03 1332496830.058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03 1332496830.066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03 1332496830.075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03 1332496830.083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03 1332496830.091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03 1332496830.100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03 1332496830.108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03 1332496830.116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03 1332496830.125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03 1332496830.133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03 1332496830.141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03 1332496830.150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03 1332496830.158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03 1332496830.166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03 1332496830.175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03 1332496830.183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03 1332496830.191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03 1332496830.200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03 1332496830.208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03 1332496830.216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03 1332496830.225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03 1332496830.233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03 1332496830.241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03 1332496830.250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03 1332496830.258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03 1332496830.266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03 1332496830.275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03 1332496830.283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03 1332496830.291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03 1332496830.300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03 1332496830.308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03 1332496830.316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03 1332496830.325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03 1332496830.333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03 1332496830.341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03 1332496830.350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03 1332496830.358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03 1332496830.366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03 1332496830.375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03 1332496830.383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03 1332496830.391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03 1332496830.400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03 1332496830.408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03 1332496830.416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03 1332496830.425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03 1332496830.433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03 1332496830.441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03 1332496830.450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03 1332496830.458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03 1332496830.466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03 1332496830.475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03 1332496830.483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03 1332496830.491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03 1332496830.500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03 1332496830.508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03 1332496830.516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03 1332496830.525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03 1332496830.533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03 1332496830.541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03 1332496830.550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03 1332496830.558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03 1332496830.566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03 1332496830.575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03 1332496830.583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03 1332496830.591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03 1332496830.600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03 1332496830.608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03 1332496830.616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03 1332496830.625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03 1332496830.633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03 1332496830.641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03 1332496830.650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03 1332496830.658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03 1332496830.666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03 1332496830.675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03 1332496830.683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03 1332496830.691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03 1332496830.700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03 1332496830.708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03 1332496830.716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03 1332496830.725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03 1332496830.733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03 1332496830.741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03 1332496830.750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03 1332496830.758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03 1332496830.766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03 1332496830.775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03 1332496830.783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03 1332496830.791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03 1332496830.800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03 1332496830.808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03 1332496830.816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03 1332496830.825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03 1332496830.833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03 1332496830.841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03 1332496830.850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03 1332496830.858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03 1332496830.866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03 1332496830.875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03 1332496830.883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03 1332496830.891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03 1332496830.900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03 1332496830.908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03 1332496830.916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03 1332496830.925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03 1332496830.933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03 1332496830.941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03 1332496830.950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03 1332496830.958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03 1332496830.966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03 1332496830.975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03 1332496830.983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03 1332496830.991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03

View File

@ -1 +1 @@
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03 1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03

View File

@ -1,2 +1,2 @@
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03 1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03 1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03

View File

@ -1,124 +1,124 @@
# path: /newton/prep # path: /newton/prep
# layout: float32_8 # layout: float32_8
# start: 1332496830000000 # start: 1332496830.000000
# end: 1332496830999000 # end: 1332496830.999000
1332496830000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03 1332496830.000000 2.517740e+05 2.242410e+05 5.688100e+03 1.915530e+03 9.329220e+03 4.183710e+03 1.212350e+03 2.641790e+03
1332496830008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03 1332496830.008333 2.595670e+05 2.226980e+05 6.207600e+03 6.786720e+02 9.380230e+03 4.575580e+03 2.830610e+03 2.688630e+03
1332496830016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03 1332496830.016667 2.630730e+05 2.233040e+05 4.961640e+03 2.197120e+03 7.687310e+03 4.861860e+03 2.732780e+03 3.008540e+03
1332496830025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03 1332496830.025000 2.576140e+05 2.233230e+05 5.003660e+03 3.525140e+03 7.165310e+03 4.685620e+03 1.715380e+03 3.440480e+03
1332496830033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03 1332496830.033333 2.557800e+05 2.219150e+05 6.357310e+03 2.145290e+03 8.426970e+03 3.775350e+03 1.475390e+03 3.797240e+03
1332496830041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03 1332496830.041667 2.601660e+05 2.230080e+05 6.702590e+03 1.484960e+03 9.288100e+03 3.330830e+03 1.228500e+03 3.214320e+03
1332496830050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03 1332496830.050000 2.612310e+05 2.264260e+05 4.980060e+03 2.982380e+03 8.499630e+03 4.267670e+03 9.940890e+02 2.292890e+03
1332496830058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03 1332496830.058333 2.551170e+05 2.266420e+05 4.584410e+03 4.656440e+03 7.860150e+03 5.317310e+03 1.473600e+03 2.111690e+03
1332496830066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03 1332496830.066667 2.533000e+05 2.235540e+05 6.455090e+03 3.036650e+03 8.869750e+03 4.986310e+03 2.607360e+03 2.839590e+03
1332496830075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03 1332496830.075000 2.610610e+05 2.212630e+05 6.951980e+03 1.500240e+03 9.386100e+03 3.791680e+03 2.677010e+03 3.980630e+03
1332496830083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03 1332496830.083333 2.665030e+05 2.231980e+05 5.189610e+03 2.594560e+03 8.571530e+03 3.175000e+03 9.198400e+02 3.792010e+03
1332496830091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03 1332496830.091667 2.606920e+05 2.251840e+05 3.782480e+03 4.642880e+03 7.662960e+03 3.917790e+03 -2.510970e+02 2.907060e+03
1332496830100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03 1332496830.100000 2.539630e+05 2.250810e+05 5.123530e+03 3.839550e+03 8.669030e+03 4.877820e+03 9.437240e+02 2.527450e+03
1332496830108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03 1332496830.108333 2.565550e+05 2.241690e+05 5.930600e+03 2.298540e+03 8.906710e+03 5.331680e+03 2.549910e+03 3.053560e+03
1332496830116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03 1332496830.116667 2.608890e+05 2.250100e+05 4.681130e+03 2.971870e+03 7.900040e+03 4.874080e+03 2.322430e+03 3.649120e+03
1332496830125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03 1332496830.125000 2.579440e+05 2.249230e+05 3.291140e+03 4.357090e+03 7.131590e+03 4.385560e+03 1.077050e+03 3.664040e+03
1332496830133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03 1332496830.133333 2.550090e+05 2.230180e+05 4.584820e+03 2.864000e+03 8.469490e+03 3.625580e+03 9.855570e+02 3.504230e+03
1332496830141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03 1332496830.141667 2.601140e+05 2.219470e+05 5.676190e+03 1.210340e+03 9.393780e+03 3.390240e+03 1.654020e+03 3.018700e+03
1332496830150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03 1332496830.150000 2.642770e+05 2.244380e+05 4.446620e+03 2.176720e+03 8.142090e+03 4.584880e+03 2.327830e+03 2.615800e+03
1332496830158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03 1332496830.158333 2.592210e+05 2.264710e+05 2.734440e+03 4.182760e+03 6.389550e+03 5.540520e+03 1.958880e+03 2.720120e+03
1332496830166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03 1332496830.166667 2.526500e+05 2.248310e+05 4.163640e+03 2.989990e+03 7.179200e+03 5.213060e+03 1.929550e+03 3.457660e+03
1332496830175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03 1332496830.175000 2.570830e+05 2.220480e+05 5.759040e+03 7.024410e+02 8.566550e+03 3.552020e+03 1.832940e+03 3.956190e+03
1332496830183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03 1332496830.183333 2.631300e+05 2.229670e+05 5.141140e+03 1.166120e+03 8.666960e+03 2.720370e+03 9.713740e+02 3.479730e+03
1332496830191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03 1332496830.191667 2.602360e+05 2.252650e+05 3.425140e+03 3.339080e+03 7.853610e+03 3.674950e+03 5.259080e+02 2.443310e+03
1332496830200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03 1332496830.200000 2.535030e+05 2.245270e+05 4.398130e+03 2.927430e+03 8.110280e+03 4.842470e+03 1.513870e+03 2.467100e+03
1332496830208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03 1332496830.208333 2.561260e+05 2.226930e+05 6.043530e+03 6.562240e+02 8.797560e+03 4.832410e+03 2.832370e+03 3.426140e+03
1332496830216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03 1332496830.216667 2.616770e+05 2.236080e+05 5.830460e+03 1.033910e+03 8.123940e+03 3.980690e+03 1.927960e+03 4.092720e+03
1332496830225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03 1332496830.225000 2.594570e+05 2.255360e+05 4.015570e+03 2.995990e+03 7.135440e+03 3.713550e+03 3.072200e+02 3.849430e+03
1332496830233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03 1332496830.233333 2.533520e+05 2.242160e+05 4.650560e+03 3.196620e+03 8.131280e+03 3.586160e+03 7.083230e+01 3.074180e+03
1332496830241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03 1332496830.241667 2.561240e+05 2.215130e+05 6.100480e+03 8.219800e+02 9.757540e+03 3.474510e+03 1.647520e+03 2.559860e+03
1332496830250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03 1332496830.250000 2.630240e+05 2.215590e+05 5.789960e+03 6.994170e+02 9.129740e+03 4.153080e+03 2.829250e+03 2.677270e+03
1332496830258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03 1332496830.258333 2.617200e+05 2.240150e+05 4.358500e+03 2.645360e+03 7.414110e+03 4.810670e+03 2.225990e+03 3.185990e+03
1332496830266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03 1332496830.266667 2.547560e+05 2.242400e+05 4.857380e+03 3.229680e+03 7.539310e+03 4.769140e+03 1.507130e+03 3.668260e+03
1332496830275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03 1332496830.275000 2.568890e+05 2.226580e+05 6.473420e+03 1.214110e+03 9.010760e+03 3.848730e+03 1.303840e+03 3.778500e+03
1332496830283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03 1332496830.283333 2.642080e+05 2.233160e+05 5.700450e+03 1.116560e+03 9.087610e+03 3.846680e+03 1.293590e+03 2.891560e+03
1332496830291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03 1332496830.291667 2.633100e+05 2.257190e+05 3.936120e+03 3.252360e+03 7.552850e+03 4.897860e+03 1.156630e+03 2.037160e+03
1332496830300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03 1332496830.300000 2.550790e+05 2.250860e+05 4.536450e+03 3.960110e+03 7.454590e+03 5.479070e+03 1.596360e+03 2.190800e+03
1332496830308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03 1332496830.308333 2.544870e+05 2.225080e+05 6.635860e+03 1.758850e+03 8.732970e+03 4.466970e+03 2.650360e+03 3.139310e+03
1332496830316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03 1332496830.316667 2.612410e+05 2.224320e+05 6.702270e+03 1.085130e+03 8.989230e+03 3.112990e+03 1.933560e+03 3.828410e+03
1332496830325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03 1332496830.325000 2.621190e+05 2.255870e+05 4.714950e+03 2.892360e+03 8.107820e+03 2.961310e+03 2.399780e+02 3.273720e+03
1332496830333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03 1332496830.333333 2.549990e+05 2.265140e+05 4.532090e+03 4.126900e+03 8.200130e+03 3.872590e+03 5.608900e+01 2.370580e+03
1332496830341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03 1332496830.341667 2.542890e+05 2.240330e+05 6.538810e+03 2.251440e+03 9.419430e+03 4.564450e+03 2.077810e+03 2.508170e+03
1332496830350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03 1332496830.350000 2.618900e+05 2.219600e+05 6.846090e+03 1.475270e+03 9.125590e+03 4.598290e+03 3.299220e+03 3.475420e+03
1332496830358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03 1332496830.358333 2.645020e+05 2.230850e+05 5.066380e+03 3.270560e+03 7.933170e+03 4.173710e+03 1.908910e+03 3.867460e+03
1332496830366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03 1332496830.366667 2.578890e+05 2.236560e+05 4.201660e+03 4.473640e+03 7.688340e+03 4.161580e+03 6.875790e+02 3.653690e+03
1332496830375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03 1332496830.375000 2.542700e+05 2.231510e+05 5.715140e+03 2.752140e+03 9.273320e+03 3.772950e+03 8.964040e+02 3.256060e+03
1332496830383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03 1332496830.383333 2.582570e+05 2.242170e+05 6.114310e+03 1.856860e+03 9.604320e+03 4.200490e+03 1.764380e+03 2.939220e+03
1332496830391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03 1332496830.391667 2.600200e+05 2.268680e+05 4.237530e+03 3.605880e+03 8.066220e+03 5.430250e+03 2.138580e+03 2.696710e+03
1332496830400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03 1332496830.400000 2.550830e+05 2.259240e+05 3.350310e+03 4.853070e+03 7.045820e+03 5.925200e+03 1.893610e+03 2.897340e+03
1332496830408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03 1332496830.408333 2.544530e+05 2.221270e+05 5.271330e+03 2.491500e+03 8.436680e+03 5.032080e+03 2.436050e+03 3.724590e+03
1332496830416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03 1332496830.416667 2.625880e+05 2.199500e+05 5.994620e+03 7.892740e+02 9.029650e+03 3.515740e+03 1.953570e+03 4.014520e+03
1332496830425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03 1332496830.425000 2.656100e+05 2.233330e+05 4.391410e+03 2.400960e+03 8.146460e+03 3.536960e+03 5.302320e+02 3.133920e+03
1332496830433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03 1332496830.433333 2.574700e+05 2.269770e+05 2.975320e+03 4.633530e+03 7.278560e+03 4.640100e+03 -5.015020e+01 2.024960e+03
1332496830441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03 1332496830.441667 2.506870e+05 2.263310e+05 4.517860e+03 3.183800e+03 8.072600e+03 5.281660e+03 1.605140e+03 2.335140e+03
1332496830450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03 1332496830.450000 2.555630e+05 2.244950e+05 5.551000e+03 1.101300e+03 8.461490e+03 4.725700e+03 2.726670e+03 3.480540e+03
1332496830458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03 1332496830.458333 2.613350e+05 2.246450e+05 4.764680e+03 1.557020e+03 7.833350e+03 3.524810e+03 1.577410e+03 4.038620e+03
1332496830466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03 1332496830.466667 2.602690e+05 2.240080e+05 3.558030e+03 2.987610e+03 7.362440e+03 3.279230e+03 5.624420e+02 3.786550e+03
1332496830475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03 1332496830.475000 2.574350e+05 2.217770e+05 4.972600e+03 2.166880e+03 8.481440e+03 3.328720e+03 1.037130e+03 3.271370e+03
1332496830483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03 1332496830.483333 2.610460e+05 2.215500e+05 5.816180e+03 5.902170e+02 9.120930e+03 3.895400e+03 2.382670e+03 2.824170e+03
1332496830491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03 1332496830.491667 2.627660e+05 2.244730e+05 4.835050e+03 1.785770e+03 7.880760e+03 4.745620e+03 2.443660e+03 3.229550e+03
1332496830500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03 1332496830.500000 2.565090e+05 2.264130e+05 3.758870e+03 3.461200e+03 6.743770e+03 4.928960e+03 1.536620e+03 3.546690e+03
1332496830508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03 1332496830.508333 2.507930e+05 2.243720e+05 5.218490e+03 2.865260e+03 7.803960e+03 4.351090e+03 1.333820e+03 3.680490e+03
1332496830516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03 1332496830.516667 2.563190e+05 2.220660e+05 6.403970e+03 7.323450e+02 9.627760e+03 3.089300e+03 1.516780e+03 3.653690e+03
1332496830525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03 1332496830.525000 2.633430e+05 2.232350e+05 5.200430e+03 1.388580e+03 9.372850e+03 3.371230e+03 1.450390e+03 2.678910e+03
1332496830533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03 1332496830.533333 2.609030e+05 2.251100e+05 3.722580e+03 3.246660e+03 7.876540e+03 4.716810e+03 1.498440e+03 2.116520e+03
1332496830541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03 1332496830.541667 2.544160e+05 2.237690e+05 4.841650e+03 2.956400e+03 8.115920e+03 5.392360e+03 2.142810e+03 2.652320e+03
1332496830550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03 1332496830.550000 2.566980e+05 2.221720e+05 6.471230e+03 9.703960e+02 8.834980e+03 4.816840e+03 2.376630e+03 3.605860e+03
1332496830558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03 1332496830.558333 2.618410e+05 2.235370e+05 5.500740e+03 1.189660e+03 8.365730e+03 4.016470e+03 1.042270e+03 3.821200e+03
1332496830566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03 1332496830.566667 2.595030e+05 2.258400e+05 3.827930e+03 3.088840e+03 7.676140e+03 3.978310e+03 -3.570070e+02 3.016420e+03
1332496830575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03 1332496830.575000 2.534570e+05 2.246360e+05 4.914610e+03 3.097450e+03 8.224900e+03 4.321440e+03 1.713740e+02 2.412360e+03
1332496830583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03 1332496830.583333 2.560290e+05 2.222210e+05 6.841800e+03 1.028500e+03 9.252300e+03 4.387570e+03 2.418140e+03 2.510100e+03
1332496830591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03 1332496830.591667 2.628400e+05 2.225500e+05 6.210250e+03 1.410730e+03 8.538900e+03 4.152580e+03 3.009300e+03 3.219760e+03
1332496830600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03 1332496830.600000 2.616330e+05 2.250650e+05 4.284530e+03 3.357210e+03 7.282170e+03 3.823590e+03 1.402840e+03 3.644670e+03
1332496830608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03 1332496830.608333 2.545910e+05 2.251090e+05 4.693160e+03 3.647740e+03 7.745160e+03 3.686380e+03 4.901610e+02 3.448860e+03
1332496830616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03 1332496830.616667 2.547800e+05 2.235990e+05 6.527380e+03 1.569870e+03 9.438430e+03 3.456580e+03 1.162520e+03 3.252010e+03
1332496830625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03 1332496830.625000 2.606390e+05 2.241070e+05 6.531050e+03 1.633050e+03 9.283720e+03 4.174020e+03 2.089550e+03 2.775750e+03
1332496830633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03 1332496830.633333 2.611080e+05 2.254720e+05 4.968260e+03 3.527850e+03 7.692870e+03 5.137100e+03 2.207390e+03 2.436660e+03
1332496830641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03 1332496830.641667 2.557750e+05 2.237080e+05 4.963450e+03 4.017370e+03 7.701420e+03 5.269650e+03 2.284400e+03 2.842080e+03
1332496830650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03 1332496830.650000 2.573980e+05 2.209470e+05 6.767500e+03 1.645710e+03 9.107070e+03 4.000180e+03 2.548860e+03 3.624770e+03
1332496830658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03 1332496830.658333 2.649240e+05 2.215590e+05 6.471460e+03 1.110330e+03 9.459650e+03 3.108170e+03 1.696970e+03 3.893440e+03
1332496830666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03 1332496830.666667 2.653390e+05 2.257330e+05 4.348800e+03 3.459510e+03 8.475300e+03 4.031240e+03 5.733470e+02 2.910270e+03
1332496830675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03 1332496830.675000 2.568140e+05 2.269950e+05 3.479540e+03 4.949790e+03 7.499910e+03 5.624710e+03 7.516560e+02 2.347710e+03
1332496830683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03 1332496830.683333 2.533160e+05 2.251610e+05 5.147060e+03 3.218430e+03 8.460160e+03 5.869300e+03 2.336320e+03 2.987960e+03
1332496830691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03 1332496830.691667 2.593600e+05 2.231010e+05 5.549120e+03 1.869950e+03 8.740760e+03 4.668940e+03 2.457910e+03 3.758820e+03
1332496830700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03 1332496830.700000 2.620120e+05 2.240160e+05 4.173610e+03 3.004130e+03 8.157040e+03 3.704730e+03 9.879640e+02 3.652750e+03
1332496830708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03 1332496830.708333 2.571760e+05 2.244200e+05 3.517300e+03 4.118750e+03 7.822240e+03 3.718230e+03 3.726490e+01 2.953680e+03
1332496830716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03 1332496830.716667 2.551460e+05 2.233220e+05 4.923980e+03 2.330680e+03 9.095910e+03 3.792400e+03 1.013070e+03 2.711240e+03
1332496830725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03 1332496830.725000 2.605240e+05 2.236510e+05 5.413630e+03 1.146210e+03 8.817170e+03 4.419650e+03 2.446650e+03 2.832050e+03
1332496830733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03 1332496830.733333 2.620980e+05 2.257520e+05 4.262980e+03 2.270970e+03 7.135480e+03 5.067120e+03 2.294680e+03 3.376620e+03
1332496830741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03 1332496830.741667 2.568890e+05 2.253790e+05 3.606460e+03 3.568190e+03 6.552650e+03 4.970270e+03 1.516380e+03 3.662570e+03
1332496830750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03 1332496830.750000 2.539480e+05 2.226310e+05 5.511700e+03 2.066300e+03 7.952660e+03 4.019910e+03 1.513140e+03 3.752630e+03
1332496830758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03 1332496830.758333 2.597990e+05 2.220670e+05 5.873500e+03 6.085840e+02 9.253780e+03 2.870740e+03 1.348240e+03 3.344200e+03
1332496830766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03 1332496830.766667 2.625470e+05 2.249010e+05 4.346080e+03 1.928100e+03 8.590970e+03 3.455460e+03 9.043910e+02 2.379270e+03
1332496830775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03 1332496830.775000 2.561370e+05 2.267610e+05 3.423560e+03 3.379080e+03 7.471150e+03 4.894170e+03 1.153540e+03 2.031410e+03
1332496830783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03 1332496830.783333 2.503260e+05 2.250130e+05 5.519980e+03 2.423970e+03 7.991760e+03 5.117950e+03 2.098790e+03 3.099240e+03
1332496830791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03 1332496830.791667 2.554540e+05 2.229920e+05 6.547950e+03 4.964960e+02 8.751340e+03 3.900560e+03 2.132290e+03 4.076810e+03
1332496830800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03 1332496830.800000 2.612860e+05 2.234890e+05 5.152850e+03 1.501510e+03 8.425610e+03 2.888030e+03 7.761140e+02 3.786360e+03
1332496830808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03 1332496830.808333 2.589690e+05 2.240690e+05 3.832610e+03 3.001980e+03 7.979260e+03 3.182310e+03 5.271600e+01 2.874800e+03
1332496830816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03 1332496830.816667 2.549460e+05 2.220350e+05 5.317880e+03 2.139800e+03 9.103140e+03 3.955610e+03 1.235170e+03 2.394150e+03
1332496830825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03 1332496830.825000 2.586760e+05 2.212050e+05 6.594910e+03 5.053440e+02 9.423360e+03 4.562470e+03 2.913740e+03 2.892350e+03
1332496830833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03 1332496830.833333 2.621250e+05 2.235660e+05 5.116750e+03 1.773600e+03 8.082200e+03 4.776370e+03 2.386390e+03 3.659730e+03
1332496830841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03 1332496830.841667 2.578350e+05 2.259180e+05 3.714300e+03 3.477080e+03 7.205370e+03 4.554610e+03 7.115390e+02 3.878420e+03
1332496830850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03 1332496830.850000 2.536600e+05 2.243710e+05 5.022450e+03 2.592430e+03 8.277200e+03 4.119370e+03 4.865080e+02 3.666740e+03
1332496830858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03 1332496830.858333 2.595030e+05 2.220610e+05 6.589950e+03 6.599360e+02 9.596920e+03 3.598100e+03 1.702490e+03 3.036600e+03
1332496830866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03 1332496830.866667 2.654950e+05 2.228430e+05 5.541850e+03 1.728430e+03 8.459960e+03 4.492000e+03 2.231970e+03 2.430620e+03
1332496830875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03 1332496830.875000 2.609290e+05 2.249960e+05 4.000950e+03 3.745990e+03 6.983790e+03 5.430860e+03 1.855260e+03 2.533380e+03
1332496830883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03 1332496830.883333 2.527160e+05 2.243350e+05 5.086560e+03 3.401150e+03 7.597970e+03 5.196120e+03 1.755720e+03 3.079760e+03
1332496830891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03 1332496830.891667 2.541100e+05 2.231110e+05 6.822190e+03 1.229080e+03 9.164340e+03 3.761230e+03 1.679390e+03 3.584880e+03
1332496830900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03 1332496830.900000 2.599690e+05 2.246930e+05 6.183950e+03 1.538500e+03 9.222080e+03 3.139170e+03 9.499020e+02 3.180800e+03
1332496830908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03 1332496830.908333 2.590780e+05 2.269130e+05 4.388890e+03 3.694820e+03 8.195020e+03 3.933000e+03 4.260800e+02 2.388450e+03
1332496830916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03 1332496830.916667 2.545630e+05 2.247600e+05 5.168440e+03 4.020940e+03 8.450270e+03 4.758910e+03 1.458900e+03 2.286430e+03
1332496830925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03 1332496830.925000 2.580590e+05 2.212170e+05 6.883460e+03 1.649530e+03 9.232780e+03 4.457650e+03 3.057820e+03 3.031950e+03
1332496830933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03 1332496830.933333 2.646670e+05 2.211770e+05 6.218510e+03 1.645730e+03 8.657180e+03 3.663500e+03 2.528280e+03 3.978340e+03
1332496830941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03 1332496830.941667 2.629250e+05 2.243820e+05 4.627500e+03 3.635930e+03 7.892800e+03 3.431320e+03 6.045090e+02 3.901370e+03
1332496830950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03 1332496830.950000 2.547080e+05 2.254480e+05 4.408250e+03 4.461040e+03 8.197170e+03 3.953750e+03 -4.453460e+01 3.154870e+03
1332496830958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03 1332496830.958333 2.537020e+05 2.246350e+05 5.825770e+03 2.577050e+03 9.590050e+03 4.569250e+03 1.460270e+03 2.785170e+03
1332496830966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03 1332496830.966667 2.602060e+05 2.241400e+05 5.387980e+03 1.951160e+03 8.789510e+03 5.131660e+03 2.706380e+03 2.972480e+03
1332496830975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03 1332496830.975000 2.612400e+05 2.247370e+05 3.860810e+03 3.418310e+03 7.414530e+03 5.284520e+03 2.271380e+03 3.183150e+03
1332496830983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03 1332496830.983333 2.561400e+05 2.232520e+05 3.850010e+03 3.957140e+03 7.262650e+03 4.964640e+03 1.499510e+03 3.453130e+03
1332496830991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03 1332496830.991667 2.561160e+05 2.213490e+05 5.594480e+03 2.054400e+03 8.835130e+03 3.662010e+03 1.485510e+03 3.613010e+03

View File

@ -1,28 +0,0 @@
# interval-start 1332496919900000
1332496919900000 2.523050e+05 2.254020e+05 4.779410e+03 3.638030e+03 8.138070e+03 4.334460e+03 1.083780e+03 3.743730e+03
1332496919908333 2.551190e+05 2.237870e+05 5.965640e+03 2.076350e+03 9.468790e+03 3.693880e+03 1.247860e+03 3.393680e+03
1332496919916667 2.616370e+05 2.247980e+05 4.848970e+03 2.315620e+03 9.323300e+03 4.225460e+03 1.805780e+03 2.593050e+03
1332496919925000 2.606460e+05 2.251300e+05 3.061360e+03 3.951840e+03 7.662910e+03 5.341410e+03 1.986520e+03 2.276780e+03
1332496919933333 2.559710e+05 2.235030e+05 4.096030e+03 3.296970e+03 7.827080e+03 5.452120e+03 2.492520e+03 2.929450e+03
1332496919941667 2.579260e+05 2.217080e+05 5.472320e+03 1.555700e+03 8.495760e+03 4.491140e+03 2.379780e+03 3.741710e+03
1332496919950000 2.610180e+05 2.242350e+05 4.669770e+03 1.876190e+03 8.366680e+03 3.677510e+03 9.021690e+02 3.549040e+03
1332496919958333 2.569150e+05 2.274650e+05 2.785070e+03 3.751930e+03 7.440320e+03 3.964860e+03 -3.227860e+02 2.460890e+03
1332496919966667 2.509510e+05 2.262000e+05 3.772710e+03 3.131950e+03 8.159860e+03 4.539860e+03 7.375190e+02 2.126750e+03
1332496919975000 2.556710e+05 2.223720e+05 5.826200e+03 8.715560e+02 9.120240e+03 4.545110e+03 2.804310e+03 2.721000e+03
1332496919983333 2.649730e+05 2.214860e+05 5.839130e+03 4.659180e+02 8.628300e+03 3.934870e+03 2.972490e+03 3.773730e+03
1332496919991667 2.652170e+05 2.233920e+05 3.718770e+03 2.834970e+03 7.209900e+03 3.460260e+03 1.324930e+03 4.075960e+03
# interval-end 1332496919991668
# interval-start 1332496920000000
1332496920000000 2.564370e+05 2.244300e+05 4.011610e+03 3.475340e+03 7.495890e+03 3.388940e+03 2.613970e+02 3.731260e+03
1332496920008333 2.539630e+05 2.241670e+05 5.621070e+03 1.548010e+03 9.165170e+03 3.522930e+03 1.058930e+03 2.996960e+03
1332496920016667 2.585080e+05 2.249300e+05 6.011400e+03 8.188660e+02 9.039950e+03 4.482440e+03 2.490390e+03 2.679340e+03
1332496920025000 2.596270e+05 2.260220e+05 4.474500e+03 2.423020e+03 7.414190e+03 5.071970e+03 2.439380e+03 2.962960e+03
1332496920033333 2.551870e+05 2.246320e+05 4.738570e+03 3.398040e+03 7.395120e+03 4.726450e+03 1.839030e+03 3.393530e+03
1332496920041667 2.571020e+05 2.216230e+05 6.144130e+03 1.441090e+03 8.756480e+03 3.495320e+03 1.869940e+03 3.752530e+03
1332496920050000 2.636530e+05 2.217700e+05 6.221770e+03 7.389620e+02 9.547600e+03 2.666820e+03 1.462660e+03 3.332570e+03
1332496920058333 2.636130e+05 2.252560e+05 4.477120e+03 2.437450e+03 8.510210e+03 3.855630e+03 9.594420e+02 2.387180e+03
1332496920066667 2.553500e+05 2.262640e+05 4.283720e+03 3.923940e+03 7.912470e+03 5.466520e+03 1.284990e+03 2.093720e+03
1332496920075000 2.527270e+05 2.246090e+05 5.851930e+03 2.491980e+03 8.540630e+03 5.623050e+03 2.339780e+03 3.007140e+03
1332496920083333 2.584750e+05 2.235780e+05 5.924870e+03 1.394480e+03 8.779620e+03 4.544180e+03 2.132030e+03 3.849760e+03
1332496920091667 2.615630e+05 2.246090e+05 4.336140e+03 2.455750e+03 8.055380e+03 3.469110e+03 6.278730e+02 3.664200e+03
# interval-end 1332496920100000

View File

@ -1,4 +1,4 @@
# comments are cool? what if they contain â†UNICODEâ†<C3A2> or invalid utf-8 like Ã( # comments are cool?
2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03 2.66568e+05 2.24029e+05 5.16140e+03 2.52517e+03 8.35084e+03 3.72470e+03 1.35534e+03 2.03900e+03
2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03 2.57914e+05 2.27183e+05 4.30368e+03 4.13080e+03 7.25535e+03 4.89047e+03 1.63859e+03 1.93496e+03
2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03 2.51717e+05 2.26047e+05 5.99445e+03 3.49363e+03 8.07250e+03 5.08267e+03 2.26917e+03 2.86231e+03

View File

@ -1,11 +1,11 @@
1332497040000000 2.56439e+05 2.24775e+05 2.92897e+03 4.66646e+03 7.58491e+03 3.57351e+03 -4.34171e+02 2.98819e+03 1332497040.000000 2.56439e+05 2.24775e+05 2.92897e+03 4.66646e+03 7.58491e+03 3.57351e+03 -4.34171e+02 2.98819e+03
1332497040010000 2.51903e+05 2.23202e+05 4.23696e+03 3.49363e+03 8.53493e+03 4.29416e+03 8.49573e+02 2.38189e+03 1332497040.010000 2.51903e+05 2.23202e+05 4.23696e+03 3.49363e+03 8.53493e+03 4.29416e+03 8.49573e+02 2.38189e+03
1332497040020000 2.57625e+05 2.20247e+05 5.47017e+03 1.35872e+03 9.18903e+03 4.56136e+03 2.65599e+03 2.60912e+03 1332497040.020000 2.57625e+05 2.20247e+05 5.47017e+03 1.35872e+03 9.18903e+03 4.56136e+03 2.65599e+03 2.60912e+03
1332497040030000 2.63375e+05 2.20706e+05 4.51842e+03 1.80758e+03 8.17208e+03 4.17463e+03 2.57884e+03 3.32848e+03 1332497040.030000 2.63375e+05 2.20706e+05 4.51842e+03 1.80758e+03 8.17208e+03 4.17463e+03 2.57884e+03 3.32848e+03
1332497040040000 2.59221e+05 2.22346e+05 2.98879e+03 3.66264e+03 6.87274e+03 3.94223e+03 1.25928e+03 3.51786e+03 1332497040.040000 2.59221e+05 2.22346e+05 2.98879e+03 3.66264e+03 6.87274e+03 3.94223e+03 1.25928e+03 3.51786e+03
1332497040050000 2.51918e+05 2.22281e+05 4.22677e+03 2.84764e+03 7.78323e+03 3.81659e+03 8.04944e+02 3.46314e+03 1332497040.050000 2.51918e+05 2.22281e+05 4.22677e+03 2.84764e+03 7.78323e+03 3.81659e+03 8.04944e+02 3.46314e+03
1332497040050000 2.54478e+05 2.21701e+05 5.61366e+03 1.02262e+03 9.26581e+03 3.50152e+03 1.29331e+03 3.07271e+03 1332497040.050000 2.54478e+05 2.21701e+05 5.61366e+03 1.02262e+03 9.26581e+03 3.50152e+03 1.29331e+03 3.07271e+03
1332497040060000 2.59568e+05 2.22945e+05 4.97190e+03 1.28250e+03 8.62081e+03 4.06316e+03 1.85717e+03 2.61990e+03 1332497040.060000 2.59568e+05 2.22945e+05 4.97190e+03 1.28250e+03 8.62081e+03 4.06316e+03 1.85717e+03 2.61990e+03
1332497040070000 2.57269e+05 2.23697e+05 3.60527e+03 3.05749e+03 7.22363e+03 4.90330e+03 1.93736e+03 2.35357e+03 1332497040.070000 2.57269e+05 2.23697e+05 3.60527e+03 3.05749e+03 7.22363e+03 4.90330e+03 1.93736e+03 2.35357e+03
1332497040080000 2.52274e+05 2.21438e+05 5.01228e+03 2.86309e+03 7.87115e+03 4.80448e+03 2.18291e+03 2.93397e+03 1332497040.080000 2.52274e+05 2.21438e+05 5.01228e+03 2.86309e+03 7.87115e+03 4.80448e+03 2.18291e+03 2.93397e+03
1332497040090000 2.56468e+05 2.19205e+05 6.29804e+03 8.09467e+02 9.12895e+03 3.52055e+03 2.16980e+03 3.88739e+03 1332497040.090000 2.56468e+05 2.19205e+05 6.29804e+03 8.09467e+02 9.12895e+03 3.52055e+03 2.16980e+03 3.88739e+03

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +0,0 @@
-10000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
-100000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
-100000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
-1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03
1000000000 2.61246e+05 2.22735e+05 4.60340e+03 2.58221e+03 8.42804e+03 3.41890e+03 9.57898e+02 4.00585e+03

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More