Compare commits
	
		
			276 Commits
		
	
	
		
			nilmdb-1.4
			...
			7538c6201b
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 7538c6201b | |||
| 4d9a106ca1 | |||
| e90a79ddad | |||
| 7056c5b4ec | |||
| df4e7f0967 | |||
| b6bba16505 | |||
| d4003d0d34 | |||
| 759492298a | |||
| b5f6fcc253 | |||
| 905e325ded | |||
| 648b6f4b70 | |||
| 7f8a2c7027 | |||
| 276fbc652a | |||
| 10b34f5937 | |||
| 83daeb148a | |||
| d65f00e8b2 | |||
| 71dc01c9a7 | |||
| bcd21b3498 | |||
| a1dee0e6f2 | |||
| 99ac47cf0d | |||
| 4cdaef51c1 | |||
| 88466dcafe | |||
| 8dfb8da15c | |||
| 6cc1f6b7b2 | |||
| 8dc36c2d37 | |||
| 3738430103 | |||
| a41111b045 | |||
| 85f822e1c4 | |||
| 0222dfebf0 | |||
| 70914690c1 | |||
| 10400f2b07 | |||
| 56153ff7ad | |||
| 671f87b047 | |||
| 2f2faeeab7 | |||
| 2ed544bd30 | |||
| 6821b2a97b | |||
| b20bb92988 | |||
| 699de7b11f | |||
| ea67e45be9 | |||
| ca440a42bd | |||
| 4ff4b263b4 | |||
| 79e544c733 | |||
| 9acf99ff25 | |||
| 4958a5ab2e | |||
| f2d89e2da5 | |||
| 1952f245c0 | |||
| 7cbc0c11c3 | |||
| 9f2651c35e | |||
| 9126980ed4 | |||
| ea051c85b3 | |||
| d8294469cf | |||
| 96eadb0577 | |||
| fb524c649f | |||
| 19a34a07a4 | |||
| d8df6f515f | |||
| 90ee127c87 | |||
| 0b631b7dea | |||
| f587518adb | |||
| efbb2665fe | |||
| 544413018c | |||
| 322b0ec423 | |||
| f3833d9b20 | |||
| 735c8497af | |||
| 7252e40c2d | |||
| caa5604d81 | |||
| 6624e8dab6 | |||
| d907638858 | |||
| 39e66fe38c | |||
| ba915bb290 | |||
| 3f0b8e50a2 | |||
| f93edc469c | |||
| 087fb39475 | |||
| 8b4acf41d6 | |||
| 32a76ccf3f | |||
| 5f9367bdd3 | |||
| 5848d03507 | |||
| 36dc448f02 | |||
| 2764283f59 | |||
| 2d0c3f7868 | |||
| cadba9fbba | |||
| 2d200a86c9 | |||
| 640c1bc95e | |||
| b574fc86f4 | |||
| 02ee18c410 | |||
| d1e241a213 | |||
| c5c7f638e7 | |||
| a1218fd20b | |||
| c58a933d21 | |||
| 7874e1ebfa | |||
| 79b410a85b | |||
| 6645395924 | |||
| beb3eadd38 | |||
| edf4568e8f | |||
| a962258b2a | |||
| fa011559c1 | |||
| 349eec3942 | |||
| 99500f3a88 | |||
| 54eccb17aa | |||
| cc8ac74a37 | |||
| 3be904d158 | |||
| 5d9fc5500c | |||
| 57751f5b32 | |||
| 1c005518d8 | |||
| 3279f7ef2c | |||
| a2e124f444 | |||
| 6d673bd2be | |||
| 613a3185e3 | |||
| c83ee65cf7 | |||
| 113633459d | |||
| 41abf53085 | |||
| fef3e1d31e | |||
| 02db87eee6 | |||
| ad85c3dd29 | |||
| 0e6ccd687b | |||
| 85d4c419fd | |||
| 159278066c | |||
| b69358a185 | |||
| e82ef60e2e | |||
| 911d9bc284 | |||
| 752a9b36ae | |||
| 97d17de8ad | |||
| 5da7e6558e | |||
| 1928caa1d7 | |||
| 5db034432c | |||
| 55119a3e07 | |||
| a9eff10dbf | |||
| 0f5c1c0db6 | |||
| d17365ca37 | |||
| 8125d9c840 | |||
| ba55ad82f0 | |||
| 45c81d2019 | |||
| 78cfda32e3 | |||
| 3658d3876b | |||
| 022b50950f | |||
| e5efbadc8e | |||
| 74f633c9da | |||
| ab9a327130 | |||
| da72fc9777 | |||
| a01cb4132d | |||
| 7c3da2fe44 | |||
| f0e06dc436 | |||
| ddc0eb4264 | |||
| 0a22db3965 | |||
| 8bb8f068de | |||
| 416902097d | |||
| f5276e9fc8 | |||
| c47f28f93a | |||
| 63b5f99b90 | |||
| 7d7b89b52f | |||
| 8d249273c6 | |||
| abe431c663 | |||
| ccf1f695af | |||
| 06f7390c9e | |||
| 6de77a08f1 | |||
| 8db9771c20 | |||
| 04f815a24b | |||
| 6868f5f126 | |||
| ca0943ec19 | |||
| 68addb4e4a | |||
| 68c33b1f14 | |||
| 8dd8741100 | |||
| 8e6341ae5d | |||
| 422b1e2df2 | |||
| 0f745b3047 | |||
| 71cd7ed9b7 | |||
| a79d6104d5 | |||
| 8e8ec59e30 | |||
| b89b945a0f | |||
| bd7bdb2eb8 | |||
| 840cd2fd13 | |||
| bbd59c8b50 | |||
| 405c110fd7 | |||
| 274adcd856 | |||
| a1850c9c2c | |||
| 6cd28b67b1 | |||
| d6d215d53d | |||
| e02143ddb2 | |||
| e275384d03 | |||
| a6a67ec15c | |||
| fc43107307 | |||
| 90633413bb | |||
| c7c3aff0fb | |||
| e2347c954e | |||
| 222a5c6c53 | |||
| 1ca2c143e5 | |||
| b5df575c79 | |||
| 2768a5ad15 | |||
| a105543c38 | |||
| 309f38d0ed | |||
| 9a27b6ef6a | |||
| 99532cf9e0 | |||
| dfdd0e5c74 | |||
| 9a2699adfc | |||
| 9bbb95b18b | |||
| 6bbed322c5 | |||
| 2317894355 | |||
| 539c92226c | |||
| 77c766d85d | |||
| 49d04db1d6 | |||
| ea838d05ae | |||
| f2a48bdb2a | |||
| 6d14e0b8aa | |||
| b31b9327b9 | |||
| b98ff1331a | |||
| 00e6ba1124 | |||
| 01029230c9 | |||
| ecc4e5ef9d | |||
| 23f31c472b | |||
| a1e2746360 | |||
| 1c40d59a52 | |||
| bfb09a189f | |||
| 416a499866 | |||
| 637d193807 | |||
| b7fa5745ce | |||
| 0104c8edd9 | |||
| cf3b8e787d | |||
| 83d022016c | |||
| 43b740ecaa | |||
| 4ce059b920 | |||
| 99a4228285 | |||
| 230ec72609 | |||
| d36ece3767 | |||
| 231963538e | |||
| b4d6aad6de | |||
| e95142eabf | |||
| d21c3470bc | |||
| 7576883f49 | |||
| cc211542f8 | |||
| 8292dcf70b | |||
| b362fd37f6 | |||
| 41ec13ee17 | |||
| efa9aa9097 | |||
| d9afb48f45 | |||
| d1140e0f16 | |||
| 6091e44561 | |||
| e233ba790f | |||
| f0304b4c00 | |||
| 60594ca58e | |||
| c7f2df4abc | |||
| 5b7409f802 | |||
| 06038062a2 | |||
| ae9fe89759 | |||
| 04def60021 | |||
| 9ce0f69dff | |||
| 90c3be91c4 | |||
| ebccfb3531 | |||
| e006f1d02e | |||
| 5292319802 | |||
| 173121ca87 | |||
| 26bab031bd | |||
| b5fefffa09 | |||
| dccb3e370a | |||
| 95ca55aa7e | |||
| e01813f29d | |||
| 7f41e117a2 | |||
| dd5fc806e5 | |||
| f8ca8d31e6 | |||
| ed89d803f0 | |||
| 3d24092cd2 | |||
| 304bb43d85 | |||
| 59a79a30a5 | |||
| c0d450d39e | |||
| 6f14d609b2 | |||
| 77ef87456f | |||
| 32d6af935c | |||
| 6af3a6fc41 | |||
| f8a06fb3b7 | |||
| e790bb9e8a | |||
| 89be6f5931 | |||
| 4cdef3285d | |||
| bcd82c4d59 | |||
| caf63ab01f | |||
| 2d72891162 | |||
| cda2ac3e77 | |||
| 57d3d60f6a | |||
| d6b5befe76 | 
@@ -1,10 +1,11 @@
 | 
				
			|||||||
# -*- conf -*-
 | 
					# -*- conf -*-
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[run]
 | 
					[run]
 | 
				
			||||||
# branch = True
 | 
					branch = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[report]
 | 
					[report]
 | 
				
			||||||
exclude_lines =
 | 
					exclude_lines =
 | 
				
			||||||
	pragma: no cover
 | 
						pragma: no cover
 | 
				
			||||||
	if 0:
 | 
						if 0:
 | 
				
			||||||
omit = nilmdb/utils/datetime_tz*,nilmdb/scripts,nilmdb/_version.py
 | 
					omit = nilmdb/scripts,nilmdb/_version.py,nilmdb/fsck
 | 
				
			||||||
 | 
					show_missing = True
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										7
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -4,6 +4,7 @@ tests/*testdb/
 | 
				
			|||||||
db/
 | 
					db/
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Compiled / cythonized files
 | 
					# Compiled / cythonized files
 | 
				
			||||||
 | 
					README.html
 | 
				
			||||||
docs/*.html
 | 
					docs/*.html
 | 
				
			||||||
build/
 | 
					build/
 | 
				
			||||||
*.pyc
 | 
					*.pyc
 | 
				
			||||||
@@ -15,10 +16,8 @@ nilmdb/server/rbtree.c
 | 
				
			|||||||
# Setup junk
 | 
					# Setup junk
 | 
				
			||||||
dist/
 | 
					dist/
 | 
				
			||||||
nilmdb.egg-info/
 | 
					nilmdb.egg-info/
 | 
				
			||||||
 | 
					venv/
 | 
				
			||||||
# This gets generated as needed by setup.py
 | 
					.eggs/
 | 
				
			||||||
MANIFEST.in
 | 
					 | 
				
			||||||
MANIFEST
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Misc
 | 
					# Misc
 | 
				
			||||||
timeit*out
 | 
					timeit*out
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										250
									
								
								.pylintrc
									
									
									
									
									
								
							
							
						
						
									
										250
									
								
								.pylintrc
									
									
									
									
									
								
							@@ -1,250 +0,0 @@
 | 
				
			|||||||
# -*- conf -*-
 | 
					 | 
				
			||||||
[MASTER]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Specify a configuration file.
 | 
					 | 
				
			||||||
#rcfile=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Python code to execute, usually for sys.path manipulation such as
 | 
					 | 
				
			||||||
# pygtk.require().
 | 
					 | 
				
			||||||
#init-hook=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Profiled execution.
 | 
					 | 
				
			||||||
profile=no
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Add files or directories to the blacklist. They should be base names, not
 | 
					 | 
				
			||||||
# paths.
 | 
					 | 
				
			||||||
ignore=datetime_tz
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Pickle collected data for later comparisons.
 | 
					 | 
				
			||||||
persistent=no
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of plugins (as comma separated values of python modules names) to load,
 | 
					 | 
				
			||||||
# usually to register additional checkers.
 | 
					 | 
				
			||||||
load-plugins=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[MESSAGES CONTROL]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Enable the message, report, category or checker with the given id(s). You can
 | 
					 | 
				
			||||||
# either give multiple identifier separated by comma (,) or put this option
 | 
					 | 
				
			||||||
# multiple time.
 | 
					 | 
				
			||||||
#enable=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Disable the message, report, category or checker with the given id(s). You
 | 
					 | 
				
			||||||
# can either give multiple identifier separated by comma (,) or put this option
 | 
					 | 
				
			||||||
# multiple time (only on the command line, not in the configuration file where
 | 
					 | 
				
			||||||
# it should appear only once).
 | 
					 | 
				
			||||||
disable=C0111,R0903,R0201,R0914,R0912,W0142,W0703,W0702
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[REPORTS]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Set the output format. Available formats are text, parseable, colorized, msvs
 | 
					 | 
				
			||||||
# (visual studio) and html
 | 
					 | 
				
			||||||
output-format=parseable
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Include message's id in output
 | 
					 | 
				
			||||||
include-ids=yes
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Put messages in a separate file for each module / package specified on the
 | 
					 | 
				
			||||||
# command line instead of printing them on stdout. Reports (if any) will be
 | 
					 | 
				
			||||||
# written in a file name "pylint_global.[txt|html]".
 | 
					 | 
				
			||||||
files-output=no
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Tells whether to display a full report or only the messages
 | 
					 | 
				
			||||||
reports=yes
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Python expression which should return a note less than 10 (10 is the highest
 | 
					 | 
				
			||||||
# note). You have access to the variables errors warning, statement which
 | 
					 | 
				
			||||||
# respectively contain the number of errors / warnings messages and the total
 | 
					 | 
				
			||||||
# number of statements analyzed. This is used by the global evaluation report
 | 
					 | 
				
			||||||
# (RP0004).
 | 
					 | 
				
			||||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Add a comment according to your evaluation note. This is used by the global
 | 
					 | 
				
			||||||
# evaluation report (RP0004).
 | 
					 | 
				
			||||||
comment=no
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[SIMILARITIES]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Minimum lines number of a similarity.
 | 
					 | 
				
			||||||
min-similarity-lines=4
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Ignore comments when computing similarities.
 | 
					 | 
				
			||||||
ignore-comments=yes
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Ignore docstrings when computing similarities.
 | 
					 | 
				
			||||||
ignore-docstrings=yes
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[TYPECHECK]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Tells whether missing members accessed in mixin class should be ignored. A
 | 
					 | 
				
			||||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
 | 
					 | 
				
			||||||
ignore-mixin-members=yes
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of classes names for which member attributes should not be checked
 | 
					 | 
				
			||||||
# (useful for classes with attributes dynamically set).
 | 
					 | 
				
			||||||
ignored-classes=SQLObject
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# When zope mode is activated, add a predefined set of Zope acquired attributes
 | 
					 | 
				
			||||||
# to generated-members.
 | 
					 | 
				
			||||||
zope=no
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of members which are set dynamically and missed by pylint inference
 | 
					 | 
				
			||||||
# system, and so shouldn't trigger E0201 when accessed. Python regular
 | 
					 | 
				
			||||||
# expressions are accepted.
 | 
					 | 
				
			||||||
generated-members=REQUEST,acl_users,aq_parent
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[FORMAT]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of characters on a single line.
 | 
					 | 
				
			||||||
max-line-length=80
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of lines in a module
 | 
					 | 
				
			||||||
max-module-lines=1000
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
 | 
					 | 
				
			||||||
# tab).
 | 
					 | 
				
			||||||
indent-string='    '
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[MISCELLANEOUS]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of note tags to take in consideration, separated by a comma.
 | 
					 | 
				
			||||||
notes=FIXME,XXX,TODO
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[VARIABLES]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Tells whether we should check for unused import in __init__ files.
 | 
					 | 
				
			||||||
init-import=no
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# A regular expression matching the beginning of the name of dummy variables
 | 
					 | 
				
			||||||
# (i.e. not used).
 | 
					 | 
				
			||||||
dummy-variables-rgx=_|dummy
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of additional names supposed to be defined in builtins. Remember that
 | 
					 | 
				
			||||||
# you should avoid to define new builtins when possible.
 | 
					 | 
				
			||||||
additional-builtins=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[BASIC]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Required attributes for module, separated by a comma
 | 
					 | 
				
			||||||
required-attributes=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of builtins function names that should not be used, separated by a comma
 | 
					 | 
				
			||||||
bad-functions=apply,input
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct module names
 | 
					 | 
				
			||||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct module level names
 | 
					 | 
				
			||||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__)|version)$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct class names
 | 
					 | 
				
			||||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct function names
 | 
					 | 
				
			||||||
function-rgx=[a-z_][a-z0-9_]{0,30}$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct method names
 | 
					 | 
				
			||||||
method-rgx=[a-z_][a-z0-9_]{0,30}$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct instance attribute names
 | 
					 | 
				
			||||||
attr-rgx=[a-z_][a-z0-9_]{0,30}$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct argument names
 | 
					 | 
				
			||||||
argument-rgx=[a-z_][a-z0-9_]{0,30}$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct variable names
 | 
					 | 
				
			||||||
variable-rgx=[a-z_][a-z0-9_]{0,30}$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match correct list comprehension /
 | 
					 | 
				
			||||||
# generator expression variable names
 | 
					 | 
				
			||||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Good variable names which should always be accepted, separated by a comma
 | 
					 | 
				
			||||||
good-names=i,j,k,ex,Run,_
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Bad variable names which should always be refused, separated by a comma
 | 
					 | 
				
			||||||
bad-names=foo,bar,baz,toto,tutu,tata
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Regular expression which should only match functions or classes name which do
 | 
					 | 
				
			||||||
# not require a docstring
 | 
					 | 
				
			||||||
no-docstring-rgx=__.*__
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[CLASSES]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of interface methods to ignore, separated by a comma. This is used for
 | 
					 | 
				
			||||||
# instance to not check methods defines in Zope's Interface base class.
 | 
					 | 
				
			||||||
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of method names used to declare (i.e. assign) instance attributes.
 | 
					 | 
				
			||||||
defining-attr-methods=__init__,__new__,setUp
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# List of valid names for the first argument in a class method.
 | 
					 | 
				
			||||||
valid-classmethod-first-arg=cls
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[DESIGN]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of arguments for function / method
 | 
					 | 
				
			||||||
max-args=5
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Argument names that match this expression will be ignored. Default to name
 | 
					 | 
				
			||||||
# with leading underscore
 | 
					 | 
				
			||||||
ignored-argument-names=_.*
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of locals for function / method body
 | 
					 | 
				
			||||||
max-locals=15
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of return / yield for function / method body
 | 
					 | 
				
			||||||
max-returns=6
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of branch for function / method body
 | 
					 | 
				
			||||||
max-branchs=12
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of statements in function / method body
 | 
					 | 
				
			||||||
max-statements=50
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of parents for a class (see R0901).
 | 
					 | 
				
			||||||
max-parents=7
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of attributes for a class (see R0902).
 | 
					 | 
				
			||||||
max-attributes=7
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Minimum number of public methods for a class (see R0903).
 | 
					 | 
				
			||||||
min-public-methods=2
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Maximum number of public methods for a class (see R0904).
 | 
					 | 
				
			||||||
max-public-methods=20
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[IMPORTS]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Deprecated modules which should not be used, separated by a comma
 | 
					 | 
				
			||||||
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Create a graph of every (i.e. internal and external) dependencies in the
 | 
					 | 
				
			||||||
# given file (report RP0402 must not be disabled)
 | 
					 | 
				
			||||||
import-graph=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Create a graph of external dependencies in the given file (report RP0402 must
 | 
					 | 
				
			||||||
# not be disabled)
 | 
					 | 
				
			||||||
ext-import-graph=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Create a graph of internal dependencies in the given file (report RP0402 must
 | 
					 | 
				
			||||||
# not be disabled)
 | 
					 | 
				
			||||||
int-import-graph=
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[EXCEPTIONS]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Exceptions that will emit a warning when being caught. Defaults to
 | 
					 | 
				
			||||||
# "Exception"
 | 
					 | 
				
			||||||
overgeneral-exceptions=Exception
 | 
					 | 
				
			||||||
							
								
								
									
										29
									
								
								MANIFEST.in
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								MANIFEST.in
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,29 @@
 | 
				
			|||||||
 | 
					# Root
 | 
				
			||||||
 | 
					include README.txt
 | 
				
			||||||
 | 
					include setup.cfg
 | 
				
			||||||
 | 
					include setup.py
 | 
				
			||||||
 | 
					include versioneer.py
 | 
				
			||||||
 | 
					include Makefile
 | 
				
			||||||
 | 
					include .coveragerc
 | 
				
			||||||
 | 
					include .pylintrc
 | 
				
			||||||
 | 
					include requirements.txt
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Cython files -- include .pyx source, but not the generated .c files
 | 
				
			||||||
 | 
					# (Downstream systems must have cython installed in order to build)
 | 
				
			||||||
 | 
					recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
 | 
				
			||||||
 | 
					exclude nilmdb/server/interval.c
 | 
				
			||||||
 | 
					exclude nilmdb/server/rbtree.c
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Version
 | 
				
			||||||
 | 
					include nilmdb/_version.py
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Tests
 | 
				
			||||||
 | 
					recursive-include tests *.py
 | 
				
			||||||
 | 
					recursive-include tests/data *
 | 
				
			||||||
 | 
					include tests/test.order
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Docs
 | 
				
			||||||
 | 
					recursive-include docs Makefile *.md
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Extras
 | 
				
			||||||
 | 
					recursive-include extras *
 | 
				
			||||||
							
								
								
									
										30
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										30
									
								
								Makefile
									
									
									
									
									
								
							@@ -2,45 +2,49 @@
 | 
				
			|||||||
all: test
 | 
					all: test
 | 
				
			||||||
 | 
					
 | 
				
			||||||
version:
 | 
					version:
 | 
				
			||||||
	python setup.py version
 | 
						python3 setup.py version
 | 
				
			||||||
 | 
					
 | 
				
			||||||
build:
 | 
					build:
 | 
				
			||||||
	python setup.py build_ext --inplace
 | 
						python3 setup.py build_ext --inplace
 | 
				
			||||||
 | 
					
 | 
				
			||||||
dist: sdist
 | 
					dist: sdist
 | 
				
			||||||
sdist:
 | 
					sdist:
 | 
				
			||||||
	python setup.py sdist
 | 
						python3 setup.py sdist
 | 
				
			||||||
 | 
					
 | 
				
			||||||
install:
 | 
					install:
 | 
				
			||||||
	python setup.py install
 | 
						python3 setup.py install
 | 
				
			||||||
 | 
					
 | 
				
			||||||
develop:
 | 
					develop:
 | 
				
			||||||
	python setup.py develop
 | 
						python3 setup.py develop
 | 
				
			||||||
 | 
					
 | 
				
			||||||
docs:
 | 
					docs:
 | 
				
			||||||
	make -C docs
 | 
						make -C docs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					ctrl: flake
 | 
				
			||||||
 | 
					flake:
 | 
				
			||||||
 | 
						flake8 nilmdb
 | 
				
			||||||
lint:
 | 
					lint:
 | 
				
			||||||
	pylint --rcfile=.pylintrc nilmdb
 | 
						pylint3 --rcfile=setup.cfg nilmdb
 | 
				
			||||||
 | 
					
 | 
				
			||||||
test:
 | 
					test:
 | 
				
			||||||
ifeq ($(INSIDE_EMACS), t)
 | 
					ifneq ($(INSIDE_EMACS),)
 | 
				
			||||||
# Use the slightly more flexible script
 | 
					# Use the slightly more flexible script
 | 
				
			||||||
	python setup.py build_ext --inplace
 | 
						python3 setup.py build_ext --inplace
 | 
				
			||||||
	python tests/runtests.py
 | 
						python3 tests/runtests.py
 | 
				
			||||||
else
 | 
					else
 | 
				
			||||||
# Let setup.py check dependencies, build stuff, and run the test
 | 
					# Let setup.py check dependencies, build stuff, and run the test
 | 
				
			||||||
	python setup.py nosetests
 | 
						python3 setup.py nosetests
 | 
				
			||||||
endif
 | 
					endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
clean::
 | 
					clean::
 | 
				
			||||||
	find . -name '*pyc' | xargs rm -f
 | 
						find . -name '*.pyc' -o -name '__pycache__' -print0 | xargs -0 rm -rf
 | 
				
			||||||
	rm -f .coverage
 | 
						rm -f .coverage
 | 
				
			||||||
	rm -rf tests/*testdb*
 | 
						rm -rf tests/*testdb*
 | 
				
			||||||
	rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so MANIFEST.in
 | 
						rm -rf nilmdb.egg-info/ build/ nilmdb/server/*.so
 | 
				
			||||||
	make -C docs clean
 | 
						make -C docs clean
 | 
				
			||||||
 | 
					
 | 
				
			||||||
gitclean::
 | 
					gitclean::
 | 
				
			||||||
	git clean -dXf
 | 
						git clean -dXf
 | 
				
			||||||
 | 
					
 | 
				
			||||||
.PHONY: all version build dist sdist install docs lint test clean gitclean
 | 
					.PHONY: all version build dist sdist install docs test
 | 
				
			||||||
 | 
					.PHONY: ctrl lint flake clean gitclean
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										40
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,40 @@
 | 
				
			|||||||
 | 
					# nilmdb: Non-Intrusive Load Monitor Database
 | 
				
			||||||
 | 
					by Jim Paris <jim@jtan.com>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					NilmDB requires Python 3.8 or newer.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Prerequisites:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Runtime and build environments
 | 
				
			||||||
 | 
					    sudo apt install python3 python3-dev python3-venv python3-pip
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Create a new Python virtual environment to isolate deps.
 | 
				
			||||||
 | 
					    python3 -m venv ../venv
 | 
				
			||||||
 | 
					    source ../venv/bin/activate   # run "deactivate" to leave
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Install all Python dependencies
 | 
				
			||||||
 | 
					    pip3 install -r requirements.txt
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Test:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    python3 setup.py nosetests
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Install:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Install it into the virtual environment
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    python3 setup.py install
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					If you want to instead install it system-wide, you will also need to
 | 
				
			||||||
 | 
					install the requirements system-wide:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    sudo pip3 install -r requirements.txt
 | 
				
			||||||
 | 
					    sudo python3 setup.py install
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Usage:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    nilmdb-server --help
 | 
				
			||||||
 | 
					    nilmdb-fsck --help
 | 
				
			||||||
 | 
					    nilmtool --help
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					See docs/wsgi.md for info on setting up a WSGI application in Apache.
 | 
				
			||||||
							
								
								
									
										26
									
								
								README.txt
									
									
									
									
									
								
							
							
						
						
									
										26
									
								
								README.txt
									
									
									
									
									
								
							@@ -1,26 +0,0 @@
 | 
				
			|||||||
nilmdb: Non-Intrusive Load Monitor Database
 | 
					 | 
				
			||||||
by Jim Paris <jim@jtan.com>
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Prerequisites:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Runtime and build environments
 | 
					 | 
				
			||||||
  sudo apt-get install python2.7 python2.7-dev python-setuptools cython
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Base NilmDB dependencies
 | 
					 | 
				
			||||||
  sudo apt-get install python-cherrypy3 python-decorator python-simplejson
 | 
					 | 
				
			||||||
  sudo apt-get install python-requests python-dateutil python-tz python-psutil
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Tools for running tests
 | 
					 | 
				
			||||||
  sudo apt-get install python-nose python-coverage
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Test:
 | 
					 | 
				
			||||||
  python setup.py nosetests
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Install:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  python setup.py install
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Usage:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  nilmdb-server --help
 | 
					 | 
				
			||||||
  nilmtool --help
 | 
					 | 
				
			||||||
@@ -389,3 +389,81 @@ Possible solutions:
 | 
				
			|||||||
    are always printed as int64 values, and a new format
 | 
					    are always printed as int64 values, and a new format
 | 
				
			||||||
    "@1234567890123456" is added to the parser for specifying them
 | 
					    "@1234567890123456" is added to the parser for specifying them
 | 
				
			||||||
    exactly.
 | 
					    exactly.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Binary interface
 | 
				
			||||||
 | 
					----------------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					The ASCII interface is too slow for high-bandwidth processing, like
 | 
				
			||||||
 | 
					sinefits, prep, etc.  A binary interface was added so that you can
 | 
				
			||||||
 | 
					extract the raw binary out of the bulkdata storage.  This binary is
 | 
				
			||||||
 | 
					a little-endian format, e.g. in C a uint16_6 stream would be:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    #include <endian.h>
 | 
				
			||||||
 | 
					    #include <stdint.h>
 | 
				
			||||||
 | 
					    struct {
 | 
				
			||||||
 | 
					        int64_t timestamp_le;
 | 
				
			||||||
 | 
					        uint16_t data_le[6];
 | 
				
			||||||
 | 
					    } __attribute__((packed));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Remember to byteswap (with e.g. `letoh` in C)!
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This interface is used by the new `nilmdb.client.numpyclient.NumpyClient`
 | 
				
			||||||
 | 
					class, which is a subclass of the normal `nilmcb.client.client.Client`
 | 
				
			||||||
 | 
					and has all of the same functions.  It adds three new functions:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- `stream_extract_numpy` to extract data as a Numpy array
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- `stream_insert_numpy` to insert data as a Numpy array
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- `stream_insert_numpy_context` is the context manager for
 | 
				
			||||||
 | 
					  incrementally inserting data
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					It is significantly faster!  It is about 20 times faster to decimate a
 | 
				
			||||||
 | 
					stream with `nilm-decimate` when the filter code is using the new
 | 
				
			||||||
 | 
					binary/numpy interface.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					WSGI interface & chunked requests
 | 
				
			||||||
 | 
					---------------------------------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					mod_wsgi requires "WSGIChunkedRequest On" to handle
 | 
				
			||||||
 | 
					"Transfer-encoding: Chunked" requests.  However, `/stream/insert`
 | 
				
			||||||
 | 
					doesn't handle this correctly right now, because:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- The `cherrypy.request.body.read()` call needs to be fixed for chunked requests
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- We don't want to just buffer endlessly in the server, and it will
 | 
				
			||||||
 | 
					  require some thought on how to handle data in chunks (what to do about
 | 
				
			||||||
 | 
					  interval endpoints).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					It is probably better to just keep the endpoint management on the client
 | 
				
			||||||
 | 
					side, so leave "WSGIChunkedRequest off" for now.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Unicode & character encoding
 | 
				
			||||||
 | 
					----------------------------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Stream data is passed back and forth as raw `bytes` objects in most
 | 
				
			||||||
 | 
					places, including the `nilmdb.client` and command-line interfaces.
 | 
				
			||||||
 | 
					This is done partially for performance reasons, and partially to
 | 
				
			||||||
 | 
					support the binary insert/extract options, where character-set encoding
 | 
				
			||||||
 | 
					would not apply.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					For the HTTP server, the raw bytes transferred over HTTP are interpreted
 | 
				
			||||||
 | 
					as follows:
 | 
				
			||||||
 | 
					- For `/stream/insert`, the client-provided `Content-Type` is ignored,
 | 
				
			||||||
 | 
					  and the data is read as if it were `application/octet-stream`.
 | 
				
			||||||
 | 
					- For `/stream/extract`, the returned data is `application/octet-stream`.
 | 
				
			||||||
 | 
					- All other endpoints communicate via JSON, which is specified to always
 | 
				
			||||||
 | 
					  be encoded as UTF-8.  This includes:
 | 
				
			||||||
 | 
					    - `/version`
 | 
				
			||||||
 | 
					    - `/dbinfo`
 | 
				
			||||||
 | 
					    - `/stream/list`
 | 
				
			||||||
 | 
					    - `/stream/create`
 | 
				
			||||||
 | 
					    - `/stream/destroy`
 | 
				
			||||||
 | 
					    - `/stream/rename`
 | 
				
			||||||
 | 
					    - `/stream/get_metadata`
 | 
				
			||||||
 | 
					    - `/stream/set_metadata`
 | 
				
			||||||
 | 
					    - `/stream/update_metadata`
 | 
				
			||||||
 | 
					    - `/stream/remove`
 | 
				
			||||||
 | 
					    - `/stream/intervals`
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										32
									
								
								docs/wsgi.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								docs/wsgi.md
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,32 @@
 | 
				
			|||||||
 | 
					WSGI Application in Apache
 | 
				
			||||||
 | 
					--------------------------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Install `apache2` and `libapache2-mod-wsgi`
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					We'll set up the database server at URL `http://myhost.com/nilmdb`.
 | 
				
			||||||
 | 
					The database will be stored in `/home/nilm/db`, and the process will
 | 
				
			||||||
 | 
					run as user `nilm`, group `nilm`.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					First, create a WSGI script `/home/nilm/nilmdb.wsgi` containing:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    import nilmdb.server
 | 
				
			||||||
 | 
					    application = nilmdb.server.wsgi_application("/home/nilm/db", "/nilmdb")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					The first parameter is the local filesystem path, and the second
 | 
				
			||||||
 | 
					parameter is the path part of the URL.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Then, set up Apache with a configuration like:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    <VirtualHost>
 | 
				
			||||||
 | 
					        WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
 | 
				
			||||||
 | 
					        WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
 | 
				
			||||||
 | 
					        <Location /nilmdb>
 | 
				
			||||||
 | 
					            WSGIProcessGroup nilmdb-procgroup
 | 
				
			||||||
 | 
					            WSGIApplicationGroup nilmdb-appgroup
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Access control example:
 | 
				
			||||||
 | 
					            Order deny,allow
 | 
				
			||||||
 | 
					            Deny from all
 | 
				
			||||||
 | 
					            Allow from 1.2.3.4
 | 
				
			||||||
 | 
					        </Location>
 | 
				
			||||||
 | 
					    </VirtualHost>
 | 
				
			||||||
							
								
								
									
										50
									
								
								extras/fix-oversize-files.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										50
									
								
								extras/fix-oversize-files.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,50 @@
 | 
				
			|||||||
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import os
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					import pickle
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					import fcntl
 | 
				
			||||||
 | 
					import re
 | 
				
			||||||
 | 
					from nilmdb.client.numpyclient import layout_to_dtype
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					parser = argparse.ArgumentParser(
 | 
				
			||||||
 | 
					    description = """
 | 
				
			||||||
 | 
					Fix database corruption where binary writes caused too much data to be
 | 
				
			||||||
 | 
					written to the file.  Truncates files to the correct length.  This was
 | 
				
			||||||
 | 
					fixed by b98ff1331a515ad47fd3203615e835b529b039f9.
 | 
				
			||||||
 | 
					""")
 | 
				
			||||||
 | 
					parser.add_argument("path", action="store", help='Database root path')
 | 
				
			||||||
 | 
					parser.add_argument("-y", "--yes", action="store_true", help='Fix them')
 | 
				
			||||||
 | 
					args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					lock = os.path.join(args.path, "data.lock")
 | 
				
			||||||
 | 
					with open(lock, "w") as f:
 | 
				
			||||||
 | 
					    fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    fix = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for (path, dirs, files) in os.walk(args.path):
 | 
				
			||||||
 | 
					        if "_format" in files:
 | 
				
			||||||
 | 
					            with open(os.path.join(path, "_format")) as format:
 | 
				
			||||||
 | 
					                fmt = pickle.load(format)
 | 
				
			||||||
 | 
					                rowsize = layout_to_dtype(fmt["layout"]).itemsize
 | 
				
			||||||
 | 
					                maxsize = rowsize * fmt["rows_per_file"]
 | 
				
			||||||
 | 
					                fix[path] = maxsize
 | 
				
			||||||
 | 
					                if maxsize < 128000000: # sanity check
 | 
				
			||||||
 | 
					                    raise Exception("bad maxsize " + str(maxsize))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for fixpath in fix:
 | 
				
			||||||
 | 
					        for (path, dirs, files) in os.walk(fixpath):
 | 
				
			||||||
 | 
					            for fn in files:
 | 
				
			||||||
 | 
					                if not re.match("^[0-9a-f]{4,}$", fn):
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					                fn = os.path.join(path, fn)
 | 
				
			||||||
 | 
					                size = os.path.getsize(fn)
 | 
				
			||||||
 | 
					                maxsize = fix[fixpath]
 | 
				
			||||||
 | 
					                if size > maxsize:
 | 
				
			||||||
 | 
					                    diff = size - maxsize
 | 
				
			||||||
 | 
					                    print(diff, "too big:", fn)
 | 
				
			||||||
 | 
					                    if args.yes:
 | 
				
			||||||
 | 
					                        with open(fn, "a+") as dbfile:
 | 
				
			||||||
 | 
					                            dbfile.truncate(maxsize)
 | 
				
			||||||
@@ -17,4 +17,4 @@ _nilmtool_argcomplete() {
 | 
				
			|||||||
        unset COMPREPLY
 | 
					        unset COMPREPLY
 | 
				
			||||||
    fi
 | 
					    fi
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
complete -o nospace -o default -F _nilmtool_argcomplete nilmtool
 | 
					complete -o nospace -F _nilmtool_argcomplete nilmtool
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,10 +1,5 @@
 | 
				
			|||||||
"""Main NilmDB import"""
 | 
					"""Main NilmDB import"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# These aren't imported automatically, because loading the server
 | 
					from ._version import get_versions
 | 
				
			||||||
# stuff isn't always necessary.
 | 
					 | 
				
			||||||
#from nilmdb.server import NilmDB, Server
 | 
					 | 
				
			||||||
#from nilmdb.client import Client
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from nilmdb._version import get_versions
 | 
					 | 
				
			||||||
__version__ = get_versions()['version']
 | 
					__version__ = get_versions()['version']
 | 
				
			||||||
del get_versions
 | 
					del get_versions
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,197 +1,520 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
IN_LONG_VERSION_PY = True
 | 
					 | 
				
			||||||
# This file helps to compute a version number in source trees obtained from
 | 
					# This file helps to compute a version number in source trees obtained from
 | 
				
			||||||
# git-archive tarball (such as those provided by githubs download-from-tag
 | 
					# git-archive tarball (such as those provided by githubs download-from-tag
 | 
				
			||||||
# feature). Distribution tarballs (build by setup.py sdist) and build
 | 
					# feature). Distribution tarballs (built by setup.py sdist) and build
 | 
				
			||||||
# directories (produced by setup.py build) will contain a much shorter file
 | 
					# directories (produced by setup.py build) will contain a much shorter file
 | 
				
			||||||
# that just contains the computed version number.
 | 
					# that just contains the computed version number.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# This file is released into the public domain. Generated by
 | 
					# This file is released into the public domain. Generated by
 | 
				
			||||||
# versioneer-0.7+ (https://github.com/warner/python-versioneer)
 | 
					# versioneer-0.18 (https://github.com/warner/python-versioneer)
 | 
				
			||||||
 | 
					 | 
				
			||||||
# these strings will be replaced by git during git-archive
 | 
					 | 
				
			||||||
git_refnames = "$Format:%d$"
 | 
					 | 
				
			||||||
git_full = "$Format:%H$"
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					"""Git implementation of _version.py."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import errno
 | 
				
			||||||
 | 
					import os
 | 
				
			||||||
 | 
					import re
 | 
				
			||||||
import subprocess
 | 
					import subprocess
 | 
				
			||||||
import sys
 | 
					import sys
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def run_command(args, cwd=None, verbose=False):
 | 
					
 | 
				
			||||||
 | 
					def get_keywords():
 | 
				
			||||||
 | 
					    """Get the keywords needed to look up the version information."""
 | 
				
			||||||
 | 
					    # these strings will be replaced by git during git-archive.
 | 
				
			||||||
 | 
					    # setup.py/versioneer.py will grep for the variable names, so they must
 | 
				
			||||||
 | 
					    # each be defined on a line of their own. _version.py will just call
 | 
				
			||||||
 | 
					    # get_keywords().
 | 
				
			||||||
 | 
					    git_refnames = "$Format:%d$"
 | 
				
			||||||
 | 
					    git_full = "$Format:%H$"
 | 
				
			||||||
 | 
					    git_date = "$Format:%ci$"
 | 
				
			||||||
 | 
					    keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
 | 
				
			||||||
 | 
					    return keywords
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class VersioneerConfig:
 | 
				
			||||||
 | 
					    """Container for Versioneer configuration parameters."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def get_config():
 | 
				
			||||||
 | 
					    """Create, populate and return the VersioneerConfig() object."""
 | 
				
			||||||
 | 
					    # these strings are filled in when 'setup.py versioneer' creates
 | 
				
			||||||
 | 
					    # _version.py
 | 
				
			||||||
 | 
					    cfg = VersioneerConfig()
 | 
				
			||||||
 | 
					    cfg.VCS = "git"
 | 
				
			||||||
 | 
					    cfg.style = "pep440"
 | 
				
			||||||
 | 
					    cfg.tag_prefix = "nilmdb-"
 | 
				
			||||||
 | 
					    cfg.parentdir_prefix = "nilmdb-"
 | 
				
			||||||
 | 
					    cfg.versionfile_source = "nilmdb/_version.py"
 | 
				
			||||||
 | 
					    cfg.verbose = False
 | 
				
			||||||
 | 
					    return cfg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class NotThisMethod(Exception):
 | 
				
			||||||
 | 
					    """Exception raised if a method is not valid for the current scenario."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					LONG_VERSION_PY = {}
 | 
				
			||||||
 | 
					HANDLERS = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def register_vcs_handler(vcs, method):  # decorator
 | 
				
			||||||
 | 
					    """Decorator to mark a method as the handler for a particular VCS."""
 | 
				
			||||||
 | 
					    def decorate(f):
 | 
				
			||||||
 | 
					        """Store f in HANDLERS[vcs][method]."""
 | 
				
			||||||
 | 
					        if vcs not in HANDLERS:
 | 
				
			||||||
 | 
					            HANDLERS[vcs] = {}
 | 
				
			||||||
 | 
					        HANDLERS[vcs][method] = f
 | 
				
			||||||
 | 
					        return f
 | 
				
			||||||
 | 
					    return decorate
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
 | 
				
			||||||
 | 
					                env=None):
 | 
				
			||||||
 | 
					    """Call the given command(s)."""
 | 
				
			||||||
 | 
					    assert isinstance(commands, list)
 | 
				
			||||||
 | 
					    p = None
 | 
				
			||||||
 | 
					    for c in commands:
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
 | 
					            dispcmd = str([c] + args)
 | 
				
			||||||
            # remember shell=False, so use git.cmd on windows, not just git
 | 
					            # remember shell=False, so use git.cmd on windows, not just git
 | 
				
			||||||
        p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
 | 
					            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
 | 
				
			||||||
 | 
					                                 stdout=subprocess.PIPE,
 | 
				
			||||||
 | 
					                                 stderr=(subprocess.PIPE if hide_stderr
 | 
				
			||||||
 | 
					                                         else None))
 | 
				
			||||||
 | 
					            break
 | 
				
			||||||
        except EnvironmentError:
 | 
					        except EnvironmentError:
 | 
				
			||||||
            e = sys.exc_info()[1]
 | 
					            e = sys.exc_info()[1]
 | 
				
			||||||
 | 
					            if e.errno == errno.ENOENT:
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
            if verbose:
 | 
					            if verbose:
 | 
				
			||||||
            print("unable to run %s" % args[0])
 | 
					                print("unable to run %s" % dispcmd)
 | 
				
			||||||
                print(e)
 | 
					                print(e)
 | 
				
			||||||
        return None
 | 
					            return None, None
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        if verbose:
 | 
				
			||||||
 | 
					            print("unable to find command, tried %s" % (commands,))
 | 
				
			||||||
 | 
					        return None, None
 | 
				
			||||||
    stdout = p.communicate()[0].strip()
 | 
					    stdout = p.communicate()[0].strip()
 | 
				
			||||||
    if sys.version >= '3':
 | 
					    if sys.version_info[0] >= 3:
 | 
				
			||||||
        stdout = stdout.decode()
 | 
					        stdout = stdout.decode()
 | 
				
			||||||
    if p.returncode != 0:
 | 
					    if p.returncode != 0:
 | 
				
			||||||
        if verbose:
 | 
					        if verbose:
 | 
				
			||||||
            print("unable to run %s (error)" % args[0])
 | 
					            print("unable to run %s (error)" % dispcmd)
 | 
				
			||||||
        return None
 | 
					            print("stdout was %s" % stdout)
 | 
				
			||||||
    return stdout
 | 
					        return None, p.returncode
 | 
				
			||||||
 | 
					    return stdout, p.returncode
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import sys
 | 
					def versions_from_parentdir(parentdir_prefix, root, verbose):
 | 
				
			||||||
import re
 | 
					    """Try to determine the version from the parent directory name.
 | 
				
			||||||
import os.path
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def get_expanded_variables(versionfile_source):
 | 
					    Source tarballs conventionally unpack into a directory that includes both
 | 
				
			||||||
 | 
					    the project name and a version string. We will also support searching up
 | 
				
			||||||
 | 
					    two directory levels for an appropriately named parent directory
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    rootdirs = []
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for i in range(3):
 | 
				
			||||||
 | 
					        dirname = os.path.basename(root)
 | 
				
			||||||
 | 
					        if dirname.startswith(parentdir_prefix):
 | 
				
			||||||
 | 
					            return {"version": dirname[len(parentdir_prefix):],
 | 
				
			||||||
 | 
					                    "full-revisionid": None,
 | 
				
			||||||
 | 
					                    "dirty": False, "error": None, "date": None}
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            rootdirs.append(root)
 | 
				
			||||||
 | 
					            root = os.path.dirname(root)  # up a level
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if verbose:
 | 
				
			||||||
 | 
					        print("Tried directories %s but none started with prefix %s" %
 | 
				
			||||||
 | 
					              (str(rootdirs), parentdir_prefix))
 | 
				
			||||||
 | 
					    raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@register_vcs_handler("git", "get_keywords")
 | 
				
			||||||
 | 
					def git_get_keywords(versionfile_abs):
 | 
				
			||||||
 | 
					    """Extract version information from the given file."""
 | 
				
			||||||
    # the code embedded in _version.py can just fetch the value of these
 | 
					    # the code embedded in _version.py can just fetch the value of these
 | 
				
			||||||
    # variables. When used from setup.py, we don't want to import
 | 
					    # keywords. When used from setup.py, we don't want to import _version.py,
 | 
				
			||||||
    # _version.py, so we do it with a regexp instead. This function is not
 | 
					    # so we do it with a regexp instead. This function is not used from
 | 
				
			||||||
    # used from _version.py.
 | 
					    # _version.py.
 | 
				
			||||||
    variables = {}
 | 
					    keywords = {}
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        for line in open(versionfile_source,"r").readlines():
 | 
					        f = open(versionfile_abs, "r")
 | 
				
			||||||
 | 
					        for line in f.readlines():
 | 
				
			||||||
            if line.strip().startswith("git_refnames ="):
 | 
					            if line.strip().startswith("git_refnames ="):
 | 
				
			||||||
                mo = re.search(r'=\s*"(.*)"', line)
 | 
					                mo = re.search(r'=\s*"(.*)"', line)
 | 
				
			||||||
                if mo:
 | 
					                if mo:
 | 
				
			||||||
                    variables["refnames"] = mo.group(1)
 | 
					                    keywords["refnames"] = mo.group(1)
 | 
				
			||||||
            if line.strip().startswith("git_full ="):
 | 
					            if line.strip().startswith("git_full ="):
 | 
				
			||||||
                mo = re.search(r'=\s*"(.*)"', line)
 | 
					                mo = re.search(r'=\s*"(.*)"', line)
 | 
				
			||||||
                if mo:
 | 
					                if mo:
 | 
				
			||||||
                    variables["full"] = mo.group(1)
 | 
					                    keywords["full"] = mo.group(1)
 | 
				
			||||||
 | 
					            if line.strip().startswith("git_date ="):
 | 
				
			||||||
 | 
					                mo = re.search(r'=\s*"(.*)"', line)
 | 
				
			||||||
 | 
					                if mo:
 | 
				
			||||||
 | 
					                    keywords["date"] = mo.group(1)
 | 
				
			||||||
 | 
					        f.close()
 | 
				
			||||||
    except EnvironmentError:
 | 
					    except EnvironmentError:
 | 
				
			||||||
        pass
 | 
					        pass
 | 
				
			||||||
    return variables
 | 
					    return keywords
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
 | 
					
 | 
				
			||||||
    refnames = variables["refnames"].strip()
 | 
					@register_vcs_handler("git", "keywords")
 | 
				
			||||||
 | 
					def git_versions_from_keywords(keywords, tag_prefix, verbose):
 | 
				
			||||||
 | 
					    """Get version information from git keywords."""
 | 
				
			||||||
 | 
					    if not keywords:
 | 
				
			||||||
 | 
					        raise NotThisMethod("no keywords at all, weird")
 | 
				
			||||||
 | 
					    date = keywords.get("date")
 | 
				
			||||||
 | 
					    if date is not None:
 | 
				
			||||||
 | 
					        # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
 | 
				
			||||||
 | 
					        # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
 | 
				
			||||||
 | 
					        # -like" string, which we must then edit to make compliant), because
 | 
				
			||||||
 | 
					        # it's been around since git-1.5.3, and it's too difficult to
 | 
				
			||||||
 | 
					        # discover which version we're using, or to work around using an
 | 
				
			||||||
 | 
					        # older one.
 | 
				
			||||||
 | 
					        date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
 | 
				
			||||||
 | 
					    refnames = keywords["refnames"].strip()
 | 
				
			||||||
    if refnames.startswith("$Format"):
 | 
					    if refnames.startswith("$Format"):
 | 
				
			||||||
        if verbose:
 | 
					        if verbose:
 | 
				
			||||||
            print("variables are unexpanded, not using")
 | 
					            print("keywords are unexpanded, not using")
 | 
				
			||||||
        return {} # unexpanded, so not in an unpacked git-archive tarball
 | 
					        raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
 | 
				
			||||||
    refs = set([r.strip() for r in refnames.strip("()").split(",")])
 | 
					    refs = set([r.strip() for r in refnames.strip("()").split(",")])
 | 
				
			||||||
    for ref in list(refs):
 | 
					    # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
 | 
				
			||||||
        if not re.search(r'\d', ref):
 | 
					    # just "foo-1.0". If we see a "tag: " prefix, prefer those.
 | 
				
			||||||
 | 
					    TAG = "tag: "
 | 
				
			||||||
 | 
					    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
 | 
				
			||||||
 | 
					    if not tags:
 | 
				
			||||||
 | 
					        # Either we're using git < 1.8.3, or there really are no tags. We use
 | 
				
			||||||
 | 
					        # a heuristic: assume all version tags have a digit. The old git %d
 | 
				
			||||||
 | 
					        # expansion behaves like git log --decorate=short and strips out the
 | 
				
			||||||
 | 
					        # refs/heads/ and refs/tags/ prefixes that would let us distinguish
 | 
				
			||||||
 | 
					        # between branches and tags. By ignoring refnames without digits, we
 | 
				
			||||||
 | 
					        # filter out many common branch names like "release" and
 | 
				
			||||||
 | 
					        # "stabilization", as well as "HEAD" and "master".
 | 
				
			||||||
 | 
					        tags = set([r for r in refs if re.search(r'\d', r)])
 | 
				
			||||||
        if verbose:
 | 
					        if verbose:
 | 
				
			||||||
                print("discarding '%s', no digits" % ref)
 | 
					            print("discarding '%s', no digits" % ",".join(refs - tags))
 | 
				
			||||||
            refs.discard(ref)
 | 
					 | 
				
			||||||
            # Assume all version tags have a digit. git's %d expansion
 | 
					 | 
				
			||||||
            # behaves like git log --decorate=short and strips out the
 | 
					 | 
				
			||||||
            # refs/heads/ and refs/tags/ prefixes that would let us
 | 
					 | 
				
			||||||
            # distinguish between branches and tags. By ignoring refnames
 | 
					 | 
				
			||||||
            # without digits, we filter out many common branch names like
 | 
					 | 
				
			||||||
            # "release" and "stabilization", as well as "HEAD" and "master".
 | 
					 | 
				
			||||||
    if verbose:
 | 
					    if verbose:
 | 
				
			||||||
        print("remaining refs: %s" % ",".join(sorted(refs)))
 | 
					        print("likely tags: %s" % ",".join(sorted(tags)))
 | 
				
			||||||
    for ref in sorted(refs):
 | 
					    for ref in sorted(tags):
 | 
				
			||||||
        # sorting will prefer e.g. "2.0" over "2.0rc1"
 | 
					        # sorting will prefer e.g. "2.0" over "2.0rc1"
 | 
				
			||||||
        if ref.startswith(tag_prefix):
 | 
					        if ref.startswith(tag_prefix):
 | 
				
			||||||
            r = ref[len(tag_prefix):]
 | 
					            r = ref[len(tag_prefix):]
 | 
				
			||||||
            if verbose:
 | 
					            if verbose:
 | 
				
			||||||
                print("picking %s" % r)
 | 
					                print("picking %s" % r)
 | 
				
			||||||
            return { "version": r,
 | 
					            return {"version": r,
 | 
				
			||||||
                     "full": variables["full"].strip() }
 | 
					                    "full-revisionid": keywords["full"].strip(),
 | 
				
			||||||
    # no suitable tags, so we use the full revision id
 | 
					                    "dirty": False, "error": None,
 | 
				
			||||||
 | 
					                    "date": date}
 | 
				
			||||||
 | 
					    # no suitable tags, so version is "0+unknown", but full hex is still there
 | 
				
			||||||
    if verbose:
 | 
					    if verbose:
 | 
				
			||||||
        print("no suitable tags, using full revision id")
 | 
					        print("no suitable tags, using unknown + full revision id")
 | 
				
			||||||
    return { "version": variables["full"].strip(),
 | 
					    return {"version": "0+unknown",
 | 
				
			||||||
             "full": variables["full"].strip() }
 | 
					            "full-revisionid": keywords["full"].strip(),
 | 
				
			||||||
 | 
					            "dirty": False, "error": "no suitable tags", "date": None}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
 | 
					 | 
				
			||||||
    # this runs 'git' from the root of the source tree. That either means
 | 
					 | 
				
			||||||
    # someone ran a setup.py command (and this code is in versioneer.py, so
 | 
					 | 
				
			||||||
    # IN_LONG_VERSION_PY=False, thus the containing directory is the root of
 | 
					 | 
				
			||||||
    # the source tree), or someone ran a project-specific entry point (and
 | 
					 | 
				
			||||||
    # this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
 | 
					 | 
				
			||||||
    # containing directory is somewhere deeper in the source tree). This only
 | 
					 | 
				
			||||||
    # gets called if the git-archive 'subst' variables were *not* expanded,
 | 
					 | 
				
			||||||
    # and _version.py hasn't already been rewritten with a short version
 | 
					 | 
				
			||||||
    # string, meaning we're inside a checked out source tree.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    try:
 | 
					@register_vcs_handler("git", "pieces_from_vcs")
 | 
				
			||||||
        here = os.path.abspath(__file__)
 | 
					def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
 | 
				
			||||||
    except NameError:
 | 
					    """Get version from 'git describe' in the root of the source tree.
 | 
				
			||||||
        # some py2exe/bbfreeze/non-CPython implementations don't do __file__
 | 
					 | 
				
			||||||
        return {} # not always correct
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # versionfile_source is the relative path from the top of the source tree
 | 
					    This only gets called if the git-archive 'subst' keywords were *not*
 | 
				
			||||||
    # (where the .git directory might live) to this file. Invert this to find
 | 
					    expanded, and _version.py hasn't already been rewritten with a short
 | 
				
			||||||
    # the root from __file__.
 | 
					    version string, meaning we're inside a checked out source tree.
 | 
				
			||||||
    root = here
 | 
					    """
 | 
				
			||||||
    if IN_LONG_VERSION_PY:
 | 
					    GITS = ["git"]
 | 
				
			||||||
        for i in range(len(versionfile_source.split("/"))):
 | 
					 | 
				
			||||||
            root = os.path.dirname(root)
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
        root = os.path.dirname(here)
 | 
					 | 
				
			||||||
    if not os.path.exists(os.path.join(root, ".git")):
 | 
					 | 
				
			||||||
        if verbose:
 | 
					 | 
				
			||||||
            print("no .git in %s" % root)
 | 
					 | 
				
			||||||
        return {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    GIT = "git"
 | 
					 | 
				
			||||||
    if sys.platform == "win32":
 | 
					    if sys.platform == "win32":
 | 
				
			||||||
        GIT = "git.cmd"
 | 
					        GITS = ["git.cmd", "git.exe"]
 | 
				
			||||||
    stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
 | 
					
 | 
				
			||||||
 | 
					    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
 | 
				
			||||||
 | 
					                          hide_stderr=True)
 | 
				
			||||||
 | 
					    if rc != 0:
 | 
				
			||||||
 | 
					        if verbose:
 | 
				
			||||||
 | 
					            print("Directory %s not under git control" % root)
 | 
				
			||||||
 | 
					        raise NotThisMethod("'git rev-parse --git-dir' returned error")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
 | 
				
			||||||
 | 
					    # if there isn't one, this yields HEX[-dirty] (no NUM)
 | 
				
			||||||
 | 
					    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
 | 
				
			||||||
 | 
					                                          "--always", "--long",
 | 
				
			||||||
 | 
					                                          "--match", "%s*" % tag_prefix],
 | 
				
			||||||
                                   cwd=root)
 | 
					                                   cwd=root)
 | 
				
			||||||
    if stdout is None:
 | 
					    # --long was added in git-1.5.5
 | 
				
			||||||
        return {}
 | 
					    if describe_out is None:
 | 
				
			||||||
    if not stdout.startswith(tag_prefix):
 | 
					        raise NotThisMethod("'git describe' failed")
 | 
				
			||||||
 | 
					    describe_out = describe_out.strip()
 | 
				
			||||||
 | 
					    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
 | 
				
			||||||
 | 
					    if full_out is None:
 | 
				
			||||||
 | 
					        raise NotThisMethod("'git rev-parse' failed")
 | 
				
			||||||
 | 
					    full_out = full_out.strip()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    pieces = {}
 | 
				
			||||||
 | 
					    pieces["long"] = full_out
 | 
				
			||||||
 | 
					    pieces["short"] = full_out[:7]  # maybe improved later
 | 
				
			||||||
 | 
					    pieces["error"] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
 | 
				
			||||||
 | 
					    # TAG might have hyphens.
 | 
				
			||||||
 | 
					    git_describe = describe_out
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # look for -dirty suffix
 | 
				
			||||||
 | 
					    dirty = git_describe.endswith("-dirty")
 | 
				
			||||||
 | 
					    pieces["dirty"] = dirty
 | 
				
			||||||
 | 
					    if dirty:
 | 
				
			||||||
 | 
					        git_describe = git_describe[:git_describe.rindex("-dirty")]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # now we have TAG-NUM-gHEX or HEX
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if "-" in git_describe:
 | 
				
			||||||
 | 
					        # TAG-NUM-gHEX
 | 
				
			||||||
 | 
					        mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
 | 
				
			||||||
 | 
					        if not mo:
 | 
				
			||||||
 | 
					            # unparseable. Maybe git-describe is misbehaving?
 | 
				
			||||||
 | 
					            pieces["error"] = ("unable to parse git-describe output: '%s'"
 | 
				
			||||||
 | 
					                               % describe_out)
 | 
				
			||||||
 | 
					            return pieces
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # tag
 | 
				
			||||||
 | 
					        full_tag = mo.group(1)
 | 
				
			||||||
 | 
					        if not full_tag.startswith(tag_prefix):
 | 
				
			||||||
            if verbose:
 | 
					            if verbose:
 | 
				
			||||||
            print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
 | 
					                fmt = "tag '%s' doesn't start with prefix '%s'"
 | 
				
			||||||
        return {}
 | 
					                print(fmt % (full_tag, tag_prefix))
 | 
				
			||||||
    tag = stdout[len(tag_prefix):]
 | 
					            pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
 | 
				
			||||||
    stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
 | 
					                               % (full_tag, tag_prefix))
 | 
				
			||||||
    if stdout is None:
 | 
					            return pieces
 | 
				
			||||||
        return {}
 | 
					        pieces["closest-tag"] = full_tag[len(tag_prefix):]
 | 
				
			||||||
    full = stdout.strip()
 | 
					 | 
				
			||||||
    if tag.endswith("-dirty"):
 | 
					 | 
				
			||||||
        full += "-dirty"
 | 
					 | 
				
			||||||
    return {"version": tag, "full": full}
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # distance: number of commits since tag
 | 
				
			||||||
 | 
					        pieces["distance"] = int(mo.group(2))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # commit: short hex revision ID
 | 
				
			||||||
 | 
					        pieces["short"] = mo.group(3)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
 | 
					 | 
				
			||||||
    if IN_LONG_VERSION_PY:
 | 
					 | 
				
			||||||
        # We're running from _version.py. If it's from a source tree
 | 
					 | 
				
			||||||
        # (execute-in-place), we can work upwards to find the root of the
 | 
					 | 
				
			||||||
        # tree, and then check the parent directory for a version string. If
 | 
					 | 
				
			||||||
        # it's in an installed application, there's no hope.
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            here = os.path.abspath(__file__)
 | 
					 | 
				
			||||||
        except NameError:
 | 
					 | 
				
			||||||
            # py2exe/bbfreeze/non-CPython don't have __file__
 | 
					 | 
				
			||||||
            return {} # without __file__, we have no hope
 | 
					 | 
				
			||||||
        # versionfile_source is the relative path from the top of the source
 | 
					 | 
				
			||||||
        # tree to _version.py. Invert this to find the root from __file__.
 | 
					 | 
				
			||||||
        root = here
 | 
					 | 
				
			||||||
        for i in range(len(versionfile_source.split("/"))):
 | 
					 | 
				
			||||||
            root = os.path.dirname(root)
 | 
					 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        # we're running from versioneer.py, which means we're running from
 | 
					        # HEX: no tags
 | 
				
			||||||
        # the setup.py in a source tree. sys.argv[0] is setup.py in the root.
 | 
					        pieces["closest-tag"] = None
 | 
				
			||||||
        here = os.path.abspath(sys.argv[0])
 | 
					        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
 | 
				
			||||||
        root = os.path.dirname(here)
 | 
					                                    cwd=root)
 | 
				
			||||||
 | 
					        pieces["distance"] = int(count_out)  # total number of commits
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Source tarballs conventionally unpack into a directory that includes
 | 
					    # commit date: see ISO-8601 comment in git_versions_from_keywords()
 | 
				
			||||||
    # both the project name and a version string.
 | 
					    date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
 | 
				
			||||||
    dirname = os.path.basename(root)
 | 
					                       cwd=root)[0].strip()
 | 
				
			||||||
    if not dirname.startswith(parentdir_prefix):
 | 
					    pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
 | 
				
			||||||
        if verbose:
 | 
					 | 
				
			||||||
            print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
 | 
					 | 
				
			||||||
                  (root, dirname, parentdir_prefix))
 | 
					 | 
				
			||||||
        return None
 | 
					 | 
				
			||||||
    return {"version": dirname[len(parentdir_prefix):], "full": ""}
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
tag_prefix = "nilmdb-"
 | 
					    return pieces
 | 
				
			||||||
parentdir_prefix = "nilmdb-"
 | 
					 | 
				
			||||||
versionfile_source = "nilmdb/_version.py"
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
 | 
					
 | 
				
			||||||
    variables = { "refnames": git_refnames, "full": git_full }
 | 
					def plus_or_dot(pieces):
 | 
				
			||||||
    ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
 | 
					    """Return a + if we don't already have one, else return a ."""
 | 
				
			||||||
    if not ver:
 | 
					    if "+" in pieces.get("closest-tag", ""):
 | 
				
			||||||
        ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
 | 
					        return "."
 | 
				
			||||||
    if not ver:
 | 
					    return "+"
 | 
				
			||||||
        ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def render_pep440(pieces):
 | 
				
			||||||
 | 
					    """Build up version string, with post-release "local version identifier".
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
 | 
				
			||||||
 | 
					    get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Exceptions:
 | 
				
			||||||
 | 
					    1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if pieces["closest-tag"]:
 | 
				
			||||||
 | 
					        rendered = pieces["closest-tag"]
 | 
				
			||||||
 | 
					        if pieces["distance"] or pieces["dirty"]:
 | 
				
			||||||
 | 
					            rendered += plus_or_dot(pieces)
 | 
				
			||||||
 | 
					            rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
 | 
				
			||||||
 | 
					            if pieces["dirty"]:
 | 
				
			||||||
 | 
					                rendered += ".dirty"
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # exception #1
 | 
				
			||||||
 | 
					        rendered = "0+untagged.%d.g%s" % (pieces["distance"],
 | 
				
			||||||
 | 
					                                          pieces["short"])
 | 
				
			||||||
 | 
					        if pieces["dirty"]:
 | 
				
			||||||
 | 
					            rendered += ".dirty"
 | 
				
			||||||
 | 
					    return rendered
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def render_pep440_pre(pieces):
 | 
				
			||||||
 | 
					    """TAG[.post.devDISTANCE] -- No -dirty.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Exceptions:
 | 
				
			||||||
 | 
					    1: no tags. 0.post.devDISTANCE
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if pieces["closest-tag"]:
 | 
				
			||||||
 | 
					        rendered = pieces["closest-tag"]
 | 
				
			||||||
 | 
					        if pieces["distance"]:
 | 
				
			||||||
 | 
					            rendered += ".post.dev%d" % pieces["distance"]
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # exception #1
 | 
				
			||||||
 | 
					        rendered = "0.post.dev%d" % pieces["distance"]
 | 
				
			||||||
 | 
					    return rendered
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def render_pep440_post(pieces):
 | 
				
			||||||
 | 
					    """TAG[.postDISTANCE[.dev0]+gHEX] .
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    The ".dev0" means dirty. Note that .dev0 sorts backwards
 | 
				
			||||||
 | 
					    (a dirty tree will appear "older" than the corresponding clean one),
 | 
				
			||||||
 | 
					    but you shouldn't be releasing software with -dirty anyways.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Exceptions:
 | 
				
			||||||
 | 
					    1: no tags. 0.postDISTANCE[.dev0]
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if pieces["closest-tag"]:
 | 
				
			||||||
 | 
					        rendered = pieces["closest-tag"]
 | 
				
			||||||
 | 
					        if pieces["distance"] or pieces["dirty"]:
 | 
				
			||||||
 | 
					            rendered += ".post%d" % pieces["distance"]
 | 
				
			||||||
 | 
					            if pieces["dirty"]:
 | 
				
			||||||
 | 
					                rendered += ".dev0"
 | 
				
			||||||
 | 
					            rendered += plus_or_dot(pieces)
 | 
				
			||||||
 | 
					            rendered += "g%s" % pieces["short"]
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # exception #1
 | 
				
			||||||
 | 
					        rendered = "0.post%d" % pieces["distance"]
 | 
				
			||||||
 | 
					        if pieces["dirty"]:
 | 
				
			||||||
 | 
					            rendered += ".dev0"
 | 
				
			||||||
 | 
					        rendered += "+g%s" % pieces["short"]
 | 
				
			||||||
 | 
					    return rendered
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def render_pep440_old(pieces):
 | 
				
			||||||
 | 
					    """TAG[.postDISTANCE[.dev0]] .
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    The ".dev0" means dirty.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Eexceptions:
 | 
				
			||||||
 | 
					    1: no tags. 0.postDISTANCE[.dev0]
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if pieces["closest-tag"]:
 | 
				
			||||||
 | 
					        rendered = pieces["closest-tag"]
 | 
				
			||||||
 | 
					        if pieces["distance"] or pieces["dirty"]:
 | 
				
			||||||
 | 
					            rendered += ".post%d" % pieces["distance"]
 | 
				
			||||||
 | 
					            if pieces["dirty"]:
 | 
				
			||||||
 | 
					                rendered += ".dev0"
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # exception #1
 | 
				
			||||||
 | 
					        rendered = "0.post%d" % pieces["distance"]
 | 
				
			||||||
 | 
					        if pieces["dirty"]:
 | 
				
			||||||
 | 
					            rendered += ".dev0"
 | 
				
			||||||
 | 
					    return rendered
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def render_git_describe(pieces):
 | 
				
			||||||
 | 
					    """TAG[-DISTANCE-gHEX][-dirty].
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Like 'git describe --tags --dirty --always'.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Exceptions:
 | 
				
			||||||
 | 
					    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if pieces["closest-tag"]:
 | 
				
			||||||
 | 
					        rendered = pieces["closest-tag"]
 | 
				
			||||||
 | 
					        if pieces["distance"]:
 | 
				
			||||||
 | 
					            rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # exception #1
 | 
				
			||||||
 | 
					        rendered = pieces["short"]
 | 
				
			||||||
 | 
					    if pieces["dirty"]:
 | 
				
			||||||
 | 
					        rendered += "-dirty"
 | 
				
			||||||
 | 
					    return rendered
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def render_git_describe_long(pieces):
 | 
				
			||||||
 | 
					    """TAG-DISTANCE-gHEX[-dirty].
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Like 'git describe --tags --dirty --always -long'.
 | 
				
			||||||
 | 
					    The distance/hash is unconditional.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Exceptions:
 | 
				
			||||||
 | 
					    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if pieces["closest-tag"]:
 | 
				
			||||||
 | 
					        rendered = pieces["closest-tag"]
 | 
				
			||||||
 | 
					        rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # exception #1
 | 
				
			||||||
 | 
					        rendered = pieces["short"]
 | 
				
			||||||
 | 
					    if pieces["dirty"]:
 | 
				
			||||||
 | 
					        rendered += "-dirty"
 | 
				
			||||||
 | 
					    return rendered
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def render(pieces, style):
 | 
				
			||||||
 | 
					    """Render the given version pieces into the requested style."""
 | 
				
			||||||
 | 
					    if pieces["error"]:
 | 
				
			||||||
 | 
					        return {"version": "unknown",
 | 
				
			||||||
 | 
					                "full-revisionid": pieces.get("long"),
 | 
				
			||||||
 | 
					                "dirty": None,
 | 
				
			||||||
 | 
					                "error": pieces["error"],
 | 
				
			||||||
 | 
					                "date": None}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not style or style == "default":
 | 
				
			||||||
 | 
					        style = "pep440"  # the default
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if style == "pep440":
 | 
				
			||||||
 | 
					        rendered = render_pep440(pieces)
 | 
				
			||||||
 | 
					    elif style == "pep440-pre":
 | 
				
			||||||
 | 
					        rendered = render_pep440_pre(pieces)
 | 
				
			||||||
 | 
					    elif style == "pep440-post":
 | 
				
			||||||
 | 
					        rendered = render_pep440_post(pieces)
 | 
				
			||||||
 | 
					    elif style == "pep440-old":
 | 
				
			||||||
 | 
					        rendered = render_pep440_old(pieces)
 | 
				
			||||||
 | 
					    elif style == "git-describe":
 | 
				
			||||||
 | 
					        rendered = render_git_describe(pieces)
 | 
				
			||||||
 | 
					    elif style == "git-describe-long":
 | 
				
			||||||
 | 
					        rendered = render_git_describe_long(pieces)
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        raise ValueError("unknown style '%s'" % style)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return {"version": rendered, "full-revisionid": pieces["long"],
 | 
				
			||||||
 | 
					            "dirty": pieces["dirty"], "error": None,
 | 
				
			||||||
 | 
					            "date": pieces.get("date")}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def get_versions():
 | 
				
			||||||
 | 
					    """Get version information or return default if unable to do so."""
 | 
				
			||||||
 | 
					    # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
 | 
				
			||||||
 | 
					    # __file__, we can work backwards from there to the root. Some
 | 
				
			||||||
 | 
					    # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
 | 
				
			||||||
 | 
					    # case we can only use expanded keywords.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    cfg = get_config()
 | 
				
			||||||
 | 
					    verbose = cfg.verbose
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
 | 
				
			||||||
                                          verbose)
 | 
					                                          verbose)
 | 
				
			||||||
    if not ver:
 | 
					    except NotThisMethod:
 | 
				
			||||||
        ver = default
 | 
					        pass
 | 
				
			||||||
    return ver
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        root = os.path.realpath(__file__)
 | 
				
			||||||
 | 
					        # versionfile_source is the relative path from the top of the source
 | 
				
			||||||
 | 
					        # tree (where the .git directory might live) to this file. Invert
 | 
				
			||||||
 | 
					        # this to find the root from __file__.
 | 
				
			||||||
 | 
					        for i in cfg.versionfile_source.split('/'):
 | 
				
			||||||
 | 
					            root = os.path.dirname(root)
 | 
				
			||||||
 | 
					    except NameError:
 | 
				
			||||||
 | 
					        return {"version": "0+unknown", "full-revisionid": None,
 | 
				
			||||||
 | 
					                "dirty": None,
 | 
				
			||||||
 | 
					                "error": "unable to find root of source tree",
 | 
				
			||||||
 | 
					                "date": None}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
 | 
				
			||||||
 | 
					        return render(pieces, cfg.style)
 | 
				
			||||||
 | 
					    except NotThisMethod:
 | 
				
			||||||
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        if cfg.parentdir_prefix:
 | 
				
			||||||
 | 
					            return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
 | 
				
			||||||
 | 
					    except NotThisMethod:
 | 
				
			||||||
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return {"version": "0+unknown", "full-revisionid": None,
 | 
				
			||||||
 | 
					            "dirty": None,
 | 
				
			||||||
 | 
					            "error": "unable to compute version", "date": None}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,24 +2,24 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
"""Class for performing HTTP client requests via libcurl"""
 | 
					"""Class for performing HTTP client requests via libcurl"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import json
 | 
				
			||||||
 | 
					import contextlib
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.utils
 | 
					import nilmdb.utils
 | 
				
			||||||
import nilmdb.client.httpclient
 | 
					import nilmdb.client.httpclient
 | 
				
			||||||
from nilmdb.client.errors import ClientError
 | 
					from nilmdb.client.errors import ClientError
 | 
				
			||||||
 | 
					 | 
				
			||||||
import time
 | 
					 | 
				
			||||||
import simplejson as json
 | 
					 | 
				
			||||||
import contextlib
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
 | 
					from nilmdb.utils.time import timestamp_to_string, string_to_timestamp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def extract_timestamp(line):
 | 
					def extract_timestamp(line):
 | 
				
			||||||
    """Extract just the timestamp from a line of data text"""
 | 
					    """Extract just the timestamp from a line of data text"""
 | 
				
			||||||
    return string_to_timestamp(line.split()[0])
 | 
					    return string_to_timestamp(line.split()[0])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Client(object):
 | 
					
 | 
				
			||||||
 | 
					class Client():
 | 
				
			||||||
    """Main client interface to the Nilm database."""
 | 
					    """Main client interface to the Nilm database."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(self, url, post_json = False):
 | 
					    def __init__(self, url, post_json=False):
 | 
				
			||||||
        """Initialize client with given URL.  If post_json is true,
 | 
					        """Initialize client with given URL.  If post_json is true,
 | 
				
			||||||
        POST requests are sent with Content-Type 'application/json'
 | 
					        POST requests are sent with Content-Type 'application/json'
 | 
				
			||||||
        instead of the default 'x-www-form-urlencoded'."""
 | 
					        instead of the default 'x-www-form-urlencoded'."""
 | 
				
			||||||
@@ -38,7 +38,7 @@ class Client(object):
 | 
				
			|||||||
        if self.post_json:
 | 
					        if self.post_json:
 | 
				
			||||||
            # If we're posting as JSON, we don't need to encode it further here
 | 
					            # If we're posting as JSON, we don't need to encode it further here
 | 
				
			||||||
            return data
 | 
					            return data
 | 
				
			||||||
        return json.dumps(data, separators=(',',':'))
 | 
					        return json.dumps(data, separators=(',', ':'))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def close(self):
 | 
					    def close(self):
 | 
				
			||||||
        """Close the connection; safe to call multiple times"""
 | 
					        """Close the connection; safe to call multiple times"""
 | 
				
			||||||
@@ -57,7 +57,12 @@ class Client(object):
 | 
				
			|||||||
        as a dictionary."""
 | 
					        as a dictionary."""
 | 
				
			||||||
        return self.http.get("dbinfo")
 | 
					        return self.http.get("dbinfo")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_list(self, path = None, layout = None, extended = False):
 | 
					    def stream_list(self, path=None, layout=None, extended=False):
 | 
				
			||||||
 | 
					        """Return a sorted list of [path, layout] lists.  If 'path' or
 | 
				
			||||||
 | 
					        'layout' are specified, only return streams that match those
 | 
				
			||||||
 | 
					        exact values.  If 'extended' is True, the returned lists have
 | 
				
			||||||
 | 
					        extended info, e.g.: [path, layout, extent_min, extent_max,
 | 
				
			||||||
 | 
					        total_rows, total_seconds."""
 | 
				
			||||||
        params = {}
 | 
					        params = {}
 | 
				
			||||||
        if path is not None:
 | 
					        if path is not None:
 | 
				
			||||||
            params["path"] = path
 | 
					            params["path"] = path
 | 
				
			||||||
@@ -65,10 +70,12 @@ class Client(object):
 | 
				
			|||||||
            params["layout"] = layout
 | 
					            params["layout"] = layout
 | 
				
			||||||
        if extended:
 | 
					        if extended:
 | 
				
			||||||
            params["extended"] = 1
 | 
					            params["extended"] = 1
 | 
				
			||||||
        return self.http.get("stream/list", params)
 | 
					        streams = self.http.get("stream/list", params)
 | 
				
			||||||
 | 
					        return nilmdb.utils.sort.sort_human(streams, key=lambda s: s[0])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_get_metadata(self, path, keys = None):
 | 
					    def stream_get_metadata(self, path, keys=None):
 | 
				
			||||||
        params = { "path": path }
 | 
					        """Get stream metadata"""
 | 
				
			||||||
 | 
					        params = {"path": path}
 | 
				
			||||||
        if keys is not None:
 | 
					        if keys is not None:
 | 
				
			||||||
            params["key"] = keys
 | 
					            params["key"] = keys
 | 
				
			||||||
        return self.http.get("stream/get_metadata", params)
 | 
					        return self.http.get("stream/get_metadata", params)
 | 
				
			||||||
@@ -92,22 +99,28 @@ class Client(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def stream_create(self, path, layout):
 | 
					    def stream_create(self, path, layout):
 | 
				
			||||||
        """Create a new stream"""
 | 
					        """Create a new stream"""
 | 
				
			||||||
        params = { "path": path,
 | 
					        params = {
 | 
				
			||||||
                   "layout" : layout }
 | 
					            "path": path,
 | 
				
			||||||
 | 
					            "layout": layout
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
        return self.http.post("stream/create", params)
 | 
					        return self.http.post("stream/create", params)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_destroy(self, path):
 | 
					    def stream_destroy(self, path):
 | 
				
			||||||
        """Delete stream and its contents"""
 | 
					        """Delete stream.  Fails if any data is still present."""
 | 
				
			||||||
        params = { "path": path }
 | 
					        params = {
 | 
				
			||||||
 | 
					            "path": path
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
        return self.http.post("stream/destroy", params)
 | 
					        return self.http.post("stream/destroy", params)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_rename(self, oldpath, newpath):
 | 
					    def stream_rename(self, oldpath, newpath):
 | 
				
			||||||
        """Rename a stream."""
 | 
					        """Rename a stream."""
 | 
				
			||||||
        params = { "oldpath": oldpath,
 | 
					        params = {
 | 
				
			||||||
                   "newpath": newpath }
 | 
					            "oldpath": oldpath,
 | 
				
			||||||
 | 
					            "newpath": newpath
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
        return self.http.post("stream/rename", params)
 | 
					        return self.http.post("stream/rename", params)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_remove(self, path, start = None, end = None):
 | 
					    def stream_remove(self, path, start=None, end=None):
 | 
				
			||||||
        """Remove data from the specified time range"""
 | 
					        """Remove data from the specified time range"""
 | 
				
			||||||
        params = {
 | 
					        params = {
 | 
				
			||||||
            "path": path
 | 
					            "path": path
 | 
				
			||||||
@@ -116,48 +129,71 @@ class Client(object):
 | 
				
			|||||||
            params["start"] = timestamp_to_string(start)
 | 
					            params["start"] = timestamp_to_string(start)
 | 
				
			||||||
        if end is not None:
 | 
					        if end is not None:
 | 
				
			||||||
            params["end"] = timestamp_to_string(end)
 | 
					            params["end"] = timestamp_to_string(end)
 | 
				
			||||||
        return self.http.post("stream/remove", params)
 | 
					        total = 0
 | 
				
			||||||
 | 
					        for count in self.http.post_gen("stream/remove", params):
 | 
				
			||||||
 | 
					            total += int(count)
 | 
				
			||||||
 | 
					        return total
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @contextlib.contextmanager
 | 
					    @contextlib.contextmanager
 | 
				
			||||||
    def stream_insert_context(self, path, start = None, end = None):
 | 
					    def stream_insert_context(self, path, start=None, end=None):
 | 
				
			||||||
        """Return a context manager that allows data to be efficiently
 | 
					        """Return a context manager that allows data to be efficiently
 | 
				
			||||||
        inserted into a stream in a piecewise manner.  Data is be provided
 | 
					        inserted into a stream in a piecewise manner.  Data is
 | 
				
			||||||
        as single lines, and is aggregated and sent to the server in larger
 | 
					        provided as ASCII lines, and is aggregated and sent to the
 | 
				
			||||||
        chunks as necessary.  Data lines must match the database layout for
 | 
					        server in larger or smaller chunks as necessary.  Data lines
 | 
				
			||||||
        the given path, and end with a newline.
 | 
					        must match the database layout for the given path, and end
 | 
				
			||||||
 | 
					        with a newline.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Example:
 | 
					        Example:
 | 
				
			||||||
          with client.stream_insert_context('/path', start, end) as ctx:
 | 
					          with client.stream_insert_context('/path', start, end) as ctx:
 | 
				
			||||||
            ctx.insert('1234567890.0 1 2 3 4\\n')
 | 
					            ctx.insert('1234567890000000 1 2 3 4\\n')
 | 
				
			||||||
            ctx.insert('1234567891.0 1 2 3 4\\n')
 | 
					            ctx.insert('1234567891000000 1 2 3 4\\n')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        For more details, see help for nilmdb.client.client.StreamInserter
 | 
					        For more details, see help for nilmdb.client.client.StreamInserter
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        This may make multiple requests to the server, if the data is
 | 
					        This may make multiple requests to the server, if the data is
 | 
				
			||||||
        large enough or enough time has passed between insertions.
 | 
					        large enough or enough time has passed between insertions.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        ctx = StreamInserter(self.http, path, start, end)
 | 
					        ctx = StreamInserter(self, path, start, end)
 | 
				
			||||||
        yield ctx
 | 
					        yield ctx
 | 
				
			||||||
        ctx.finalize()
 | 
					        ctx.finalize()
 | 
				
			||||||
 | 
					        ctx.destroy()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_insert(self, path, data, start = None, end = None):
 | 
					    def stream_insert(self, path, data, start=None, end=None):
 | 
				
			||||||
        """Insert rows of data into a stream.  data should be a string
 | 
					        """Insert rows of data into a stream.  data should be a string
 | 
				
			||||||
        or iterable that provides ASCII data that matches the database
 | 
					        or iterable that provides ASCII data that matches the database
 | 
				
			||||||
        layout for path.  See stream_insert_context for details on the
 | 
					        layout for path.  Data is passed through stream_insert_context,
 | 
				
			||||||
        'start' and 'end' parameters."""
 | 
					        so it will be broken into reasonably-sized chunks and
 | 
				
			||||||
 | 
					        start/end will be deduced if missing."""
 | 
				
			||||||
        with self.stream_insert_context(path, start, end) as ctx:
 | 
					        with self.stream_insert_context(path, start, end) as ctx:
 | 
				
			||||||
            if isinstance(data, basestring):
 | 
					            if isinstance(data, bytes):
 | 
				
			||||||
                ctx.insert(data)
 | 
					                ctx.insert(data)
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                for chunk in data:
 | 
					                for chunk in data:
 | 
				
			||||||
                    ctx.insert(chunk)
 | 
					                    ctx.insert(chunk)
 | 
				
			||||||
        return ctx.last_response
 | 
					        return ctx.last_response
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_intervals(self, path, start = None, end = None, diffpath = None):
 | 
					    def stream_insert_block(self, path, data, start, end, binary=False):
 | 
				
			||||||
 | 
					        """Insert a single fixed block of data into the stream.  It is
 | 
				
			||||||
 | 
					        sent directly to the server in one block with no further
 | 
				
			||||||
 | 
					        processing.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'binary' is True, provide raw binary data in little-endian
 | 
				
			||||||
 | 
					        format matching the path layout, including an int64 timestamp.
 | 
				
			||||||
 | 
					        Otherwise, provide ASCII data matching the layout."""
 | 
				
			||||||
 | 
					        params = {
 | 
				
			||||||
 | 
					            "path": path,
 | 
				
			||||||
 | 
					            "start": timestamp_to_string(start),
 | 
				
			||||||
 | 
					            "end": timestamp_to_string(end),
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					        if binary:
 | 
				
			||||||
 | 
					            params["binary"] = 1
 | 
				
			||||||
 | 
					        return self.http.put("stream/insert", data, params)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def stream_intervals(self, path, start=None, end=None, diffpath=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Return a generator that yields each stream interval.
 | 
					        Return a generator that yields each stream interval.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        If diffpath is not None, yields only interval ranges that are
 | 
					        If 'diffpath' is not None, yields only interval ranges that are
 | 
				
			||||||
        present in 'path' but not in 'diffpath'.
 | 
					        present in 'path' but not in 'diffpath'.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        params = {
 | 
					        params = {
 | 
				
			||||||
@@ -171,14 +207,23 @@ class Client(object):
 | 
				
			|||||||
            params["end"] = timestamp_to_string(end)
 | 
					            params["end"] = timestamp_to_string(end)
 | 
				
			||||||
        return self.http.get_gen("stream/intervals", params)
 | 
					        return self.http.get_gen("stream/intervals", params)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_extract(self, path, start = None, end = None, count = False):
 | 
					    def stream_extract(self, path, start=None, end=None,
 | 
				
			||||||
 | 
					                       count=False, markup=False, binary=False):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Extract data from a stream.  Returns a generator that yields
 | 
					        Extract data from a stream.  Returns a generator that yields
 | 
				
			||||||
        lines of ASCII-formatted data that matches the database
 | 
					        lines of ASCII-formatted data that matches the database
 | 
				
			||||||
        layout for the given path.
 | 
					        layout for the given path.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Specify count = True to return a count of matching data points
 | 
					        If 'count' is True, return a count of matching data points
 | 
				
			||||||
        rather than the actual data.  The output format is unchanged.
 | 
					        rather than the actual data.  The output format is unchanged.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'markup' is True, include comments in the returned data
 | 
				
			||||||
 | 
					        that indicate interval starts and ends.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'binary' is True, return chunks of raw binary data, rather
 | 
				
			||||||
 | 
					        than lines of ASCII-formatted data.  Raw binary data is
 | 
				
			||||||
 | 
					        little-endian and matches the database types (including an
 | 
				
			||||||
 | 
					        int64 timestamp).
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        params = {
 | 
					        params = {
 | 
				
			||||||
            "path": path,
 | 
					            "path": path,
 | 
				
			||||||
@@ -189,17 +234,22 @@ class Client(object):
 | 
				
			|||||||
            params["end"] = timestamp_to_string(end)
 | 
					            params["end"] = timestamp_to_string(end)
 | 
				
			||||||
        if count:
 | 
					        if count:
 | 
				
			||||||
            params["count"] = 1
 | 
					            params["count"] = 1
 | 
				
			||||||
        return self.http.get_gen("stream/extract", params)
 | 
					        if markup:
 | 
				
			||||||
 | 
					            params["markup"] = 1
 | 
				
			||||||
 | 
					        if binary:
 | 
				
			||||||
 | 
					            params["binary"] = 1
 | 
				
			||||||
 | 
					        return self.http.get_gen("stream/extract", params, binary=binary)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_count(self, path, start = None, end = None):
 | 
					    def stream_count(self, path, start=None, end=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Return the number of rows of data in the stream that satisfy
 | 
					        Return the number of rows of data in the stream that satisfy
 | 
				
			||||||
        the given timestamps.
 | 
					        the given timestamps.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        counts = list(self.stream_extract(path, start, end, count = True))
 | 
					        counts = list(self.stream_extract(path, start, end, count=True))
 | 
				
			||||||
        return int(counts[0])
 | 
					        return int(counts[0])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class StreamInserter(object):
 | 
					
 | 
				
			||||||
 | 
					class StreamInserter():
 | 
				
			||||||
    """Object returned by stream_insert_context() that manages
 | 
					    """Object returned by stream_insert_context() that manages
 | 
				
			||||||
    the insertion of rows of data into a particular path.
 | 
					    the insertion of rows of data into a particular path.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -238,13 +288,13 @@ class StreamInserter(object):
 | 
				
			|||||||
    _max_data = 2 * 1024 * 1024
 | 
					    _max_data = 2 * 1024 * 1024
 | 
				
			||||||
    _max_data_after_send = 64 * 1024
 | 
					    _max_data_after_send = 64 * 1024
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(self, http, path, start = None, end = None):
 | 
					    def __init__(self, client, path, start, end):
 | 
				
			||||||
        """'http' is the httpclient object.  'path' is the database
 | 
					        """'client' is the client object.  'path' is the database
 | 
				
			||||||
        path to insert to.  'start' and 'end' are used for the first
 | 
					        path to insert to.  'start' and 'end' are used for the first
 | 
				
			||||||
        contiguous interval."""
 | 
					        contiguous interval and may be None."""
 | 
				
			||||||
        self.last_response = None
 | 
					        self.last_response = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._http = http
 | 
					        self._client = client
 | 
				
			||||||
        self._path = path
 | 
					        self._path = path
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Start and end for the overall contiguous interval we're
 | 
					        # Start and end for the overall contiguous interval we're
 | 
				
			||||||
@@ -257,6 +307,15 @@ class StreamInserter(object):
 | 
				
			|||||||
        self._block_data = []
 | 
					        self._block_data = []
 | 
				
			||||||
        self._block_len = 0
 | 
					        self._block_len = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        self.destroyed = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def destroy(self):
 | 
				
			||||||
 | 
					        """Ensure this object can't be used again without raising
 | 
				
			||||||
 | 
					        an error"""
 | 
				
			||||||
 | 
					        def error(*args, **kwargs):
 | 
				
			||||||
 | 
					            raise Exception("don't reuse this context object")
 | 
				
			||||||
 | 
					        self._send_block = self.insert = self.finalize = self.send = error
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def insert(self, data):
 | 
					    def insert(self, data):
 | 
				
			||||||
        """Insert a chunk of ASCII formatted data in string form.  The
 | 
					        """Insert a chunk of ASCII formatted data in string form.  The
 | 
				
			||||||
        overall data must consist of lines terminated by '\\n'."""
 | 
					        overall data must consist of lines terminated by '\\n'."""
 | 
				
			||||||
@@ -278,8 +337,8 @@ class StreamInserter(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        # Send the block once we have enough data
 | 
					        # Send the block once we have enough data
 | 
				
			||||||
        if self._block_len >= maxdata:
 | 
					        if self._block_len >= maxdata:
 | 
				
			||||||
            self._send_block(final = False)
 | 
					            self._send_block(final=False)
 | 
				
			||||||
            if self._block_len >= self._max_data_after_send: # pragma: no cover
 | 
					            if self._block_len >= self._max_data_after_send:
 | 
				
			||||||
                raise ValueError("too much data left over after trying"
 | 
					                raise ValueError("too much data left over after trying"
 | 
				
			||||||
                                 " to send intermediate block; is it"
 | 
					                                 " to send intermediate block; is it"
 | 
				
			||||||
                                 " missing newlines or malformed?")
 | 
					                                 " missing newlines or malformed?")
 | 
				
			||||||
@@ -305,7 +364,12 @@ class StreamInserter(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        If more data is inserted after a finalize(), it will become
 | 
					        If more data is inserted after a finalize(), it will become
 | 
				
			||||||
        part of a new interval and there may be a gap left in-between."""
 | 
					        part of a new interval and there may be a gap left in-between."""
 | 
				
			||||||
        self._send_block(final = True)
 | 
					        self._send_block(final=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def send(self):
 | 
				
			||||||
 | 
					        """Send any data that we might have buffered up.  Does not affect
 | 
				
			||||||
 | 
					        any other treatment of timestamps or endpoints."""
 | 
				
			||||||
 | 
					        self._send_block(final=False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _get_first_noncomment(self, block):
 | 
					    def _get_first_noncomment(self, block):
 | 
				
			||||||
        """Return the (start, end) indices of the first full line in
 | 
					        """Return the (start, end) indices of the first full line in
 | 
				
			||||||
@@ -313,10 +377,10 @@ class StreamInserter(object):
 | 
				
			|||||||
        there isn't one."""
 | 
					        there isn't one."""
 | 
				
			||||||
        start = 0
 | 
					        start = 0
 | 
				
			||||||
        while True:
 | 
					        while True:
 | 
				
			||||||
            end = block.find('\n', start)
 | 
					            end = block.find(b'\n', start)
 | 
				
			||||||
            if end < 0:
 | 
					            if end < 0:
 | 
				
			||||||
                raise IndexError
 | 
					                raise IndexError
 | 
				
			||||||
            if block[start] != '#':
 | 
					            if block[start] != b'#'[0]:
 | 
				
			||||||
                return (start, (end + 1))
 | 
					                return (start, (end + 1))
 | 
				
			||||||
            start = end + 1
 | 
					            start = end + 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -324,22 +388,22 @@ class StreamInserter(object):
 | 
				
			|||||||
        """Return the (start, end) indices of the last full line in
 | 
					        """Return the (start, end) indices of the last full line in
 | 
				
			||||||
        block[:length] that isn't a comment, or raise IndexError if
 | 
					        block[:length] that isn't a comment, or raise IndexError if
 | 
				
			||||||
        there isn't one."""
 | 
					        there isn't one."""
 | 
				
			||||||
        end = block.rfind('\n')
 | 
					        end = block.rfind(b'\n')
 | 
				
			||||||
        if end <= 0:
 | 
					        if end <= 0:
 | 
				
			||||||
            raise IndexError
 | 
					            raise IndexError
 | 
				
			||||||
        while True:
 | 
					        while True:
 | 
				
			||||||
            start = block.rfind('\n', 0, end)
 | 
					            start = block.rfind(b'\n', 0, end)
 | 
				
			||||||
            if block[start + 1] != '#':
 | 
					            if block[start + 1] != b'#'[0]:
 | 
				
			||||||
                return ((start + 1), end)
 | 
					                return ((start + 1), end)
 | 
				
			||||||
            if start == -1:
 | 
					            if start == -1:
 | 
				
			||||||
                raise IndexError
 | 
					                raise IndexError
 | 
				
			||||||
            end = start
 | 
					            end = start
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _send_block(self, final = False):
 | 
					    def _send_block(self, final=False):
 | 
				
			||||||
        """Send data currently in the block.  The data sent will
 | 
					        """Send data currently in the block.  The data sent will
 | 
				
			||||||
        consist of full lines only, so some might be left over."""
 | 
					        consist of full lines only, so some might be left over."""
 | 
				
			||||||
        # Build the full string to send
 | 
					        # Build the full string to send
 | 
				
			||||||
        block = "".join(self._block_data)
 | 
					        block = b"".join(self._block_data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        start_ts = self._interval_start
 | 
					        start_ts = self._interval_start
 | 
				
			||||||
        if start_ts is None:
 | 
					        if start_ts is None:
 | 
				
			||||||
@@ -356,7 +420,7 @@ class StreamInserter(object):
 | 
				
			|||||||
            # or the timestamp of the last line plus epsilon.
 | 
					            # or the timestamp of the last line plus epsilon.
 | 
				
			||||||
            end_ts = self._interval_end
 | 
					            end_ts = self._interval_end
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                if block[-1] != '\n':
 | 
					                if block[-1] != b'\n'[0]:
 | 
				
			||||||
                    raise ValueError("final block didn't end with a newline")
 | 
					                    raise ValueError("final block didn't end with a newline")
 | 
				
			||||||
                if end_ts is None:
 | 
					                if end_ts is None:
 | 
				
			||||||
                    (spos, epos) = self._get_last_noncomment(block)
 | 
					                    (spos, epos) = self._get_last_noncomment(block)
 | 
				
			||||||
@@ -390,7 +454,7 @@ class StreamInserter(object):
 | 
				
			|||||||
                # the server complain so that the error is the same
 | 
					                # the server complain so that the error is the same
 | 
				
			||||||
                # as if we hadn't done this chunking.
 | 
					                # as if we hadn't done this chunking.
 | 
				
			||||||
                end_ts = self._interval_end
 | 
					                end_ts = self._interval_end
 | 
				
			||||||
            self._block_data = [ block[spos:] ]
 | 
					            self._block_data = [block[spos:]]
 | 
				
			||||||
            self._block_len = (epos - spos)
 | 
					            self._block_len = (epos - spos)
 | 
				
			||||||
            block = block[:spos]
 | 
					            block = block[:spos]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -398,7 +462,7 @@ class StreamInserter(object):
 | 
				
			|||||||
            self._interval_start = end_ts
 | 
					            self._interval_start = end_ts
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Double check endpoints
 | 
					        # Double check endpoints
 | 
				
			||||||
        if start_ts is None or end_ts is None:
 | 
					        if (start_ts is None or end_ts is None) or (start_ts == end_ts):
 | 
				
			||||||
            # If the block has no non-comment lines, it's OK
 | 
					            # If the block has no non-comment lines, it's OK
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                self._get_first_noncomment(block)
 | 
					                self._get_first_noncomment(block)
 | 
				
			||||||
@@ -407,9 +471,7 @@ class StreamInserter(object):
 | 
				
			|||||||
            raise ClientError("have data to send, but no start/end times")
 | 
					            raise ClientError("have data to send, but no start/end times")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Send it
 | 
					        # Send it
 | 
				
			||||||
        params = { "path": self._path,
 | 
					        self.last_response = self._client.stream_insert_block(
 | 
				
			||||||
                   "start": timestamp_to_string(start_ts),
 | 
					            self._path, block, start_ts, end_ts, binary=False)
 | 
				
			||||||
                   "end": timestamp_to_string(end_ts) }
 | 
					 | 
				
			||||||
        self.last_response = self._http.put("stream/insert", block, params)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return
 | 
					        return
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,33 +1,41 @@
 | 
				
			|||||||
"""HTTP client errors"""
 | 
					"""HTTP client errors"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					from nilmdb.utils.printf import sprintf
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Error(Exception):
 | 
					class Error(Exception):
 | 
				
			||||||
    """Base exception for both ClientError and ServerError responses"""
 | 
					    """Base exception for both ClientError and ServerError responses"""
 | 
				
			||||||
    def __init__(self,
 | 
					    def __init__(self,
 | 
				
			||||||
                 status = "Unspecified error",
 | 
					                 status="Unspecified error",
 | 
				
			||||||
                 message = None,
 | 
					                 message=None,
 | 
				
			||||||
                 url = None,
 | 
					                 url=None,
 | 
				
			||||||
                 traceback = None):
 | 
					                 traceback=None):
 | 
				
			||||||
        Exception.__init__(self, status)
 | 
					        super().__init__(status)
 | 
				
			||||||
        self.status = status     # e.g. "400 Bad Request"
 | 
					        self.status = status     # e.g. "400 Bad Request"
 | 
				
			||||||
        self.message = message   # textual message from the server
 | 
					        self.message = message   # textual message from the server
 | 
				
			||||||
        self.url = url           # URL we were requesting
 | 
					        self.url = url           # URL we were requesting
 | 
				
			||||||
        self.traceback = traceback  # server traceback, if available
 | 
					        self.traceback = traceback  # server traceback, if available
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _format_error(self, show_url):
 | 
					    def _format_error(self, show_url):
 | 
				
			||||||
        s = sprintf("[%s]", self.status)
 | 
					        s = sprintf("[%s]", self.status)
 | 
				
			||||||
        if self.message:
 | 
					        if self.message:
 | 
				
			||||||
            s += sprintf(" %s", self.message)
 | 
					            s += sprintf(" %s", self.message)
 | 
				
			||||||
        if show_url and self.url: # pragma: no cover
 | 
					        if show_url and self.url:
 | 
				
			||||||
            s += sprintf(" (%s)", self.url)
 | 
					            s += sprintf(" (%s)", self.url)
 | 
				
			||||||
        if self.traceback: # pragma: no cover
 | 
					        if self.traceback:
 | 
				
			||||||
            s += sprintf("\nServer traceback:\n%s", self.traceback)
 | 
					            s += sprintf("\nServer traceback:\n%s", self.traceback)
 | 
				
			||||||
        return s
 | 
					        return s
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __str__(self):
 | 
					    def __str__(self):
 | 
				
			||||||
        return self._format_error(show_url = False)
 | 
					        return self._format_error(show_url=False)
 | 
				
			||||||
    def __repr__(self): # pragma: no cover
 | 
					
 | 
				
			||||||
        return self._format_error(show_url = True)
 | 
					    def __repr__(self):
 | 
				
			||||||
 | 
					        return self._format_error(show_url=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ClientError(Error):
 | 
					class ClientError(Error):
 | 
				
			||||||
    pass
 | 
					    pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ServerError(Error):
 | 
					class ServerError(Error):
 | 
				
			||||||
    pass
 | 
					    pass
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,26 +1,25 @@
 | 
				
			|||||||
"""HTTP client library"""
 | 
					"""HTTP client library"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.utils
 | 
					import json
 | 
				
			||||||
from nilmdb.client.errors import ClientError, ServerError, Error
 | 
					import urllib.parse
 | 
				
			||||||
 | 
					 | 
				
			||||||
import simplejson as json
 | 
					 | 
				
			||||||
import urlparse
 | 
					 | 
				
			||||||
import requests
 | 
					import requests
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class HTTPClient(object):
 | 
					from nilmdb.client.errors import ClientError, ServerError, Error
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class HTTPClient():
 | 
				
			||||||
    """Class to manage and perform HTTP requests from the client"""
 | 
					    """Class to manage and perform HTTP requests from the client"""
 | 
				
			||||||
    def __init__(self, baseurl = "", post_json = False):
 | 
					    def __init__(self, baseurl="", post_json=False, verify_ssl=True):
 | 
				
			||||||
        """If baseurl is supplied, all other functions that take
 | 
					        """If baseurl is supplied, all other functions that take
 | 
				
			||||||
        a URL can be given a relative URL instead."""
 | 
					        a URL can be given a relative URL instead."""
 | 
				
			||||||
        # Verify / clean up URL
 | 
					        # Verify / clean up URL
 | 
				
			||||||
        reparsed = urlparse.urlparse(baseurl).geturl()
 | 
					        reparsed = urllib.parse.urlparse(baseurl).geturl()
 | 
				
			||||||
        if '://' not in reparsed:
 | 
					        if '://' not in reparsed:
 | 
				
			||||||
            reparsed = urlparse.urlparse("http://" + baseurl).geturl()
 | 
					            reparsed = urllib.parse.urlparse("http://" + baseurl).geturl()
 | 
				
			||||||
        self.baseurl = reparsed
 | 
					        self.baseurl = reparsed.rstrip('/') + '/'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Build Requests session object, enable SSL verification
 | 
					        # Note whether we want SSL verification
 | 
				
			||||||
        self.session = requests.Session()
 | 
					        self.verify_ssl = verify_ssl
 | 
				
			||||||
        self.session.verify = True
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Saved response, so that tests can verify a few things.
 | 
					        # Saved response, so that tests can verify a few things.
 | 
				
			||||||
        self._last_response = {}
 | 
					        self._last_response = {}
 | 
				
			||||||
@@ -33,44 +32,64 @@ class HTTPClient(object):
 | 
				
			|||||||
        # Default variables for exception.  We use the entire body as
 | 
					        # Default variables for exception.  We use the entire body as
 | 
				
			||||||
        # the default message, in case we can't extract it from a JSON
 | 
					        # the default message, in case we can't extract it from a JSON
 | 
				
			||||||
        # response.
 | 
					        # response.
 | 
				
			||||||
        args = { "url" : url,
 | 
					        args = {
 | 
				
			||||||
                 "status" : str(code),
 | 
					            "url": url,
 | 
				
			||||||
                 "message" : body,
 | 
					            "status": str(code),
 | 
				
			||||||
                 "traceback" : None }
 | 
					            "message": body,
 | 
				
			||||||
 | 
					            "traceback": None
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            # Fill with server-provided data if we can
 | 
					            # Fill with server-provided data if we can
 | 
				
			||||||
            jsonerror = json.loads(body)
 | 
					            jsonerror = json.loads(body)
 | 
				
			||||||
            args["status"] = jsonerror["status"]
 | 
					            args["status"] = jsonerror["status"]
 | 
				
			||||||
            args["message"] = jsonerror["message"]
 | 
					            args["message"] = jsonerror["message"]
 | 
				
			||||||
            args["traceback"] = jsonerror["traceback"]
 | 
					            args["traceback"] = jsonerror["traceback"]
 | 
				
			||||||
        except Exception: # pragma: no cover
 | 
					        except Exception:
 | 
				
			||||||
            pass
 | 
					            pass
 | 
				
			||||||
        if code >= 400 and code <= 499:
 | 
					        if 400 <= code <= 499:
 | 
				
			||||||
            raise ClientError(**args)
 | 
					            raise ClientError(**args)
 | 
				
			||||||
        else: # pragma: no cover
 | 
					        else:
 | 
				
			||||||
            if code >= 500 and code <= 599:
 | 
					            if 500 <= code <= 599:
 | 
				
			||||||
                if args["message"] is None:
 | 
					                if args["message"] is None:
 | 
				
			||||||
                    args["message"] = ("(no message; try disabling " +
 | 
					                    args["message"] = ("(no message; try disabling "
 | 
				
			||||||
                                       "response.stream option in " +
 | 
					                                       "response.stream option in "
 | 
				
			||||||
                                       "nilmdb.server for better debugging)")
 | 
					                                       "nilmdb.server for better debugging)")
 | 
				
			||||||
                raise ServerError(**args)
 | 
					                raise ServerError(**args)
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                raise Error(**args)
 | 
					                raise Error(**args)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def close(self):
 | 
					    def close(self):
 | 
				
			||||||
        self.session.close()
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _do_req(self, method, url, query_data, body_data, stream, headers):
 | 
					    def _do_req(self, method, url, query_data, body_data, stream, headers):
 | 
				
			||||||
        url = urlparse.urljoin(self.baseurl, url)
 | 
					        url = urllib.parse.urljoin(self.baseurl, url)
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            response = self.session.request(method, url,
 | 
					            # Create a new session, ensure we send "Connection: close",
 | 
				
			||||||
                                            params = query_data,
 | 
					            # and explicitly close connection after the transfer.
 | 
				
			||||||
                                            data = body_data,
 | 
					            # This is to avoid HTTP/1.1 persistent connections
 | 
				
			||||||
                                            stream = stream,
 | 
					            # (keepalive), because they have fundamental race
 | 
				
			||||||
                                            headers = headers)
 | 
					            # conditions when there are delays between requests:
 | 
				
			||||||
 | 
					            # a new request may be sent at the same instant that the
 | 
				
			||||||
 | 
					            # server decides to timeout the connection.
 | 
				
			||||||
 | 
					            session = requests.Session()
 | 
				
			||||||
 | 
					            if headers is None:
 | 
				
			||||||
 | 
					                headers = {}
 | 
				
			||||||
 | 
					            headers["Connection"] = "close"
 | 
				
			||||||
 | 
					            response = session.request(method, url,
 | 
				
			||||||
 | 
					                                       params=query_data,
 | 
				
			||||||
 | 
					                                       data=body_data,
 | 
				
			||||||
 | 
					                                       stream=stream,
 | 
				
			||||||
 | 
					                                       headers=headers,
 | 
				
			||||||
 | 
					                                       verify=self.verify_ssl)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Close the connection.  If it's a generator (stream =
 | 
				
			||||||
 | 
					            # True), the requests library shouldn't actually close the
 | 
				
			||||||
 | 
					            # HTTP connection until all data has been read from the
 | 
				
			||||||
 | 
					            # response.
 | 
				
			||||||
 | 
					            session.close()
 | 
				
			||||||
        except requests.RequestException as e:
 | 
					        except requests.RequestException as e:
 | 
				
			||||||
            raise ServerError(status = "502 Error", url = url,
 | 
					            raise ServerError(status="502 Error", url=url,
 | 
				
			||||||
                              message = str(e.message))
 | 
					                              message=str(e))
 | 
				
			||||||
        if response.status_code != 200:
 | 
					        if response.status_code != 200:
 | 
				
			||||||
            self._handle_error(url, response.status_code, response.content)
 | 
					            self._handle_error(url, response.status_code, response.content)
 | 
				
			||||||
        self._last_response = response
 | 
					        self._last_response = response
 | 
				
			||||||
@@ -81,53 +100,90 @@ class HTTPClient(object):
 | 
				
			|||||||
            return (response, False)
 | 
					            return (response, False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Normal versions that return data directly
 | 
					    # Normal versions that return data directly
 | 
				
			||||||
    def _req(self, method, url, query = None, body = None, headers = None):
 | 
					    def _req(self, method, url, query=None, body=None, headers=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Make a request and return the body data as a string or parsed
 | 
					        Make a request and return the body data as a string or parsed
 | 
				
			||||||
        JSON object, or raise an error if it contained an error.
 | 
					        JSON object, or raise an error if it contained an error.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        (response, isjson) = self._do_req(method, url, query, body,
 | 
					        (response, isjson) = self._do_req(method, url, query, body,
 | 
				
			||||||
                                          stream = False, headers = headers)
 | 
					                                          stream=False, headers=headers)
 | 
				
			||||||
        if isjson:
 | 
					        if isjson:
 | 
				
			||||||
            return json.loads(response.content)
 | 
					            return json.loads(response.content)
 | 
				
			||||||
        return response.content
 | 
					        return response.text
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get(self, url, params = None):
 | 
					    def get(self, url, params=None):
 | 
				
			||||||
        """Simple GET (parameters in URL)"""
 | 
					        """Simple GET (parameters in URL)"""
 | 
				
			||||||
        return self._req("GET", url, params, None)
 | 
					        return self._req("GET", url, params, None)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def post(self, url, params = None):
 | 
					    def post(self, url, params=None):
 | 
				
			||||||
        """Simple POST (parameters in body)"""
 | 
					        """Simple POST (parameters in body)"""
 | 
				
			||||||
        if self.post_json:
 | 
					        if self.post_json:
 | 
				
			||||||
            return self._req("POST", url, None,
 | 
					            return self._req("POST", url, None,
 | 
				
			||||||
                             json.dumps(params),
 | 
					                             json.dumps(params),
 | 
				
			||||||
                             { 'Content-type': 'application/json' })
 | 
					                             {'Content-type': 'application/json'})
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            return self._req("POST", url, None, params)
 | 
					            return self._req("POST", url, None, params)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def put(self, url, data, params = None):
 | 
					    def put(self, url, data, params=None,
 | 
				
			||||||
 | 
					            content_type="application/octet-stream"):
 | 
				
			||||||
        """Simple PUT (parameters in URL, data in body)"""
 | 
					        """Simple PUT (parameters in URL, data in body)"""
 | 
				
			||||||
        return self._req("PUT", url, params, data)
 | 
					        h = {'Content-type': content_type}
 | 
				
			||||||
 | 
					        return self._req("PUT", url, query=params, body=data, headers=h)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Generator versions that return data one line at a time.
 | 
					    # Generator versions that return data one line at a time.
 | 
				
			||||||
    def _req_gen(self, method, url, query = None, body = None, headers = None):
 | 
					    def _req_gen(self, method, url, query=None, body=None,
 | 
				
			||||||
 | 
					                 headers=None, binary=False):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Make a request and return a generator that gives back strings
 | 
					        Make a request and return a generator that gives back strings
 | 
				
			||||||
        or JSON decoded lines of the body data, or raise an error if
 | 
					        or JSON decoded lines of the body data, or raise an error if
 | 
				
			||||||
        it contained an eror.
 | 
					        it contained an eror.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        (response, isjson) = self._do_req(method, url, query, body,
 | 
					        (response, isjson) = self._do_req(method, url, query, body,
 | 
				
			||||||
                                          stream = True, headers = headers)
 | 
					                                          stream=True, headers=headers)
 | 
				
			||||||
        if isjson:
 | 
					
 | 
				
			||||||
            for line in response.iter_lines():
 | 
					        # Like the iter_lines function in Requests, but only splits on
 | 
				
			||||||
 | 
					        # the specified line ending.
 | 
				
			||||||
 | 
					        def lines(source, ending):
 | 
				
			||||||
 | 
					            pending = None
 | 
				
			||||||
 | 
					            for chunk in source:
 | 
				
			||||||
 | 
					                if pending is not None:
 | 
				
			||||||
 | 
					                    chunk = pending + chunk
 | 
				
			||||||
 | 
					                tmp = chunk.split(ending)
 | 
				
			||||||
 | 
					                lines = tmp[:-1]
 | 
				
			||||||
 | 
					                if chunk.endswith(ending):
 | 
				
			||||||
 | 
					                    pending = None
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    pending = tmp[-1]
 | 
				
			||||||
 | 
					                for line in lines:
 | 
				
			||||||
 | 
					                    yield line
 | 
				
			||||||
 | 
					            if pending is not None:
 | 
				
			||||||
 | 
					                yield pending
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Yield the chunks or lines as requested
 | 
				
			||||||
 | 
					        if binary:
 | 
				
			||||||
 | 
					            for chunk in response.iter_content(chunk_size=65536):
 | 
				
			||||||
 | 
					                yield chunk
 | 
				
			||||||
 | 
					        elif isjson:
 | 
				
			||||||
 | 
					            for line in lines(response.iter_content(chunk_size=1),
 | 
				
			||||||
 | 
					                              ending=b'\r\n'):
 | 
				
			||||||
                yield json.loads(line)
 | 
					                yield json.loads(line)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            for line in response.iter_lines():
 | 
					            for line in lines(response.iter_content(chunk_size=65536),
 | 
				
			||||||
 | 
					                              ending=b'\n'):
 | 
				
			||||||
                yield line
 | 
					                yield line
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_gen(self, url, params = None):
 | 
					    def get_gen(self, url, params=None, binary=False):
 | 
				
			||||||
        """Simple GET (parameters in URL) returning a generator"""
 | 
					        """Simple GET (parameters in URL) returning a generator"""
 | 
				
			||||||
        return self._req_gen("GET", url, params)
 | 
					        return self._req_gen("GET", url, params, binary=binary)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def post_gen(self, url, params=None):
 | 
				
			||||||
 | 
					        """Simple POST (parameters in body) returning a generator"""
 | 
				
			||||||
 | 
					        if self.post_json:
 | 
				
			||||||
 | 
					            return self._req_gen("POST", url, None,
 | 
				
			||||||
 | 
					                                 json.dumps(params),
 | 
				
			||||||
 | 
					                                 {'Content-type': 'application/json'})
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            return self._req_gen("POST", url, None, params)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Not much use for a POST or PUT generator, since they don't
 | 
					    # Not much use for a POST or PUT generator, since they don't
 | 
				
			||||||
    # return much data.
 | 
					    # return much data.
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										263
									
								
								nilmdb/client/numpyclient.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										263
									
								
								nilmdb/client/numpyclient.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,263 @@
 | 
				
			|||||||
 | 
					# -*- coding: utf-8 -*-
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					"""Provide a NumpyClient class that is based on normal Client, but has
 | 
				
			||||||
 | 
					additional methods for extracting and inserting data via Numpy arrays."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import contextlib
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import numpy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import nilmdb.utils
 | 
				
			||||||
 | 
					import nilmdb.client.client
 | 
				
			||||||
 | 
					import nilmdb.client.httpclient
 | 
				
			||||||
 | 
					from nilmdb.client.errors import ClientError
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def layout_to_dtype(layout):
 | 
				
			||||||
 | 
					    ltype = layout.split('_')[0]
 | 
				
			||||||
 | 
					    lcount = int(layout.split('_')[1])
 | 
				
			||||||
 | 
					    if ltype.startswith('int'):
 | 
				
			||||||
 | 
					        atype = '<i' + str(int(ltype[3:]) // 8)
 | 
				
			||||||
 | 
					    elif ltype.startswith('uint'):
 | 
				
			||||||
 | 
					        atype = '<u' + str(int(ltype[4:]) // 8)
 | 
				
			||||||
 | 
					    elif ltype.startswith('float'):
 | 
				
			||||||
 | 
					        atype = '<f' + str(int(ltype[5:]) // 8)
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        raise ValueError("bad layout")
 | 
				
			||||||
 | 
					    if lcount == 1:
 | 
				
			||||||
 | 
					        dtype = [('timestamp', '<i8'), ('data', atype)]
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        dtype = [('timestamp', '<i8'), ('data', atype, lcount)]
 | 
				
			||||||
 | 
					    return numpy.dtype(dtype)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class NumpyClient(nilmdb.client.client.Client):
 | 
				
			||||||
 | 
					    """Subclass of nilmdb.client.Client that adds additional methods for
 | 
				
			||||||
 | 
					    extracting and inserting data via Numpy arrays."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def _get_dtype(self, path, layout):
 | 
				
			||||||
 | 
					        if layout is None:
 | 
				
			||||||
 | 
					            streams = self.stream_list(path)
 | 
				
			||||||
 | 
					            if len(streams) != 1:
 | 
				
			||||||
 | 
					                raise ClientError("can't get layout for path: " + path)
 | 
				
			||||||
 | 
					            layout = streams[0][1]
 | 
				
			||||||
 | 
					        return layout_to_dtype(layout)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def stream_extract_numpy(self, path, start=None, end=None,
 | 
				
			||||||
 | 
					                             layout=None, maxrows=100000,
 | 
				
			||||||
 | 
					                             structured=False):
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        Extract data from a stream.  Returns a generator that yields
 | 
				
			||||||
 | 
					        Numpy arrays of up to 'maxrows' of data each.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'layout' is None, it is read using stream_info.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'structured' is False, all data is converted to float64
 | 
				
			||||||
 | 
					        and returned in a flat 2D array.  Otherwise, data is returned
 | 
				
			||||||
 | 
					        as a structured dtype in a 1D array.
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        dtype = self._get_dtype(path, layout)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def to_numpy(data):
 | 
				
			||||||
 | 
					            a = numpy.frombuffer(data, dtype)
 | 
				
			||||||
 | 
					            if structured:
 | 
				
			||||||
 | 
					                return a
 | 
				
			||||||
 | 
					            return numpy.c_[a['timestamp'], a['data']]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        chunks = []
 | 
				
			||||||
 | 
					        total_len = 0
 | 
				
			||||||
 | 
					        maxsize = dtype.itemsize * maxrows
 | 
				
			||||||
 | 
					        for data in self.stream_extract(path, start, end, binary=True):
 | 
				
			||||||
 | 
					            # Add this block of binary data
 | 
				
			||||||
 | 
					            chunks.append(data)
 | 
				
			||||||
 | 
					            total_len += len(data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # See if we have enough to make the requested Numpy array
 | 
				
			||||||
 | 
					            while total_len >= maxsize:
 | 
				
			||||||
 | 
					                assembled = b"".join(chunks)
 | 
				
			||||||
 | 
					                total_len -= maxsize
 | 
				
			||||||
 | 
					                chunks = [assembled[maxsize:]]
 | 
				
			||||||
 | 
					                block = assembled[:maxsize]
 | 
				
			||||||
 | 
					                yield to_numpy(block)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if total_len:
 | 
				
			||||||
 | 
					            yield to_numpy(b"".join(chunks))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @contextlib.contextmanager
 | 
				
			||||||
 | 
					    def stream_insert_numpy_context(self, path, start=None, end=None,
 | 
				
			||||||
 | 
					                                    layout=None):
 | 
				
			||||||
 | 
					        """Return a context manager that allows data to be efficiently
 | 
				
			||||||
 | 
					        inserted into a stream in a piecewise manner.  Data is
 | 
				
			||||||
 | 
					        provided as Numpy arrays, and is aggregated and sent to the
 | 
				
			||||||
 | 
					        server in larger or smaller chunks as necessary.  Data format
 | 
				
			||||||
 | 
					        must match the database layout for the given path.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        For more details, see help for
 | 
				
			||||||
 | 
					        nilmdb.client.numpyclient.StreamInserterNumpy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'layout' is not None, use it as the layout rather than
 | 
				
			||||||
 | 
					        querying the database.
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        dtype = self._get_dtype(path, layout)
 | 
				
			||||||
 | 
					        ctx = StreamInserterNumpy(self, path, start, end, dtype)
 | 
				
			||||||
 | 
					        yield ctx
 | 
				
			||||||
 | 
					        ctx.finalize()
 | 
				
			||||||
 | 
					        ctx.destroy()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def stream_insert_numpy(self, path, data, start=None, end=None,
 | 
				
			||||||
 | 
					                            layout=None):
 | 
				
			||||||
 | 
					        """Insert data into a stream.  data should be a Numpy array
 | 
				
			||||||
 | 
					        which will be passed through stream_insert_numpy_context to
 | 
				
			||||||
 | 
					        break it into chunks etc.  See the help for that function
 | 
				
			||||||
 | 
					        for details."""
 | 
				
			||||||
 | 
					        with self.stream_insert_numpy_context(path, start, end, layout) as ctx:
 | 
				
			||||||
 | 
					            if isinstance(data, numpy.ndarray):
 | 
				
			||||||
 | 
					                ctx.insert(data)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                for chunk in data:
 | 
				
			||||||
 | 
					                    ctx.insert(chunk)
 | 
				
			||||||
 | 
					        return ctx.last_response
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class StreamInserterNumpy(nilmdb.client.client.StreamInserter):
 | 
				
			||||||
 | 
					    """Object returned by stream_insert_numpy_context() that manages
 | 
				
			||||||
 | 
					    the insertion of rows of data into a particular path.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    See help for nilmdb.client.client.StreamInserter for details.
 | 
				
			||||||
 | 
					    The only difference is that, instead of ASCII formatted data,
 | 
				
			||||||
 | 
					    this context manager can take Numpy arrays, which are either
 | 
				
			||||||
 | 
					    structured (1D with complex dtype) or flat (2D with simple dtype).
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Soft limit of how many bytes to send per HTTP request.
 | 
				
			||||||
 | 
					    _max_data = 2 * 1024 * 1024
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __init__(self, client, path, start, end, dtype):
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        'client' is the client object.  'path' is the database path
 | 
				
			||||||
 | 
					        to insert to.  'start' and 'end' are used for the first
 | 
				
			||||||
 | 
					        contiguous interval and may be None.  'dtype' is the Numpy
 | 
				
			||||||
 | 
					        dtype for this stream.
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        super(StreamInserterNumpy, self).__init__(client, path, start, end)
 | 
				
			||||||
 | 
					        self._dtype = dtype
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Max rows to send at once
 | 
				
			||||||
 | 
					        self._max_rows = self._max_data // self._dtype.itemsize
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # List of the current arrays we're building up to send
 | 
				
			||||||
 | 
					        self._block_arrays = []
 | 
				
			||||||
 | 
					        self._block_rows = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def insert(self, array):
 | 
				
			||||||
 | 
					        """Insert Numpy data, which must match the layout type."""
 | 
				
			||||||
 | 
					        if not isinstance(array, numpy.ndarray):
 | 
				
			||||||
 | 
					            array = numpy.array(array)
 | 
				
			||||||
 | 
					        if array.ndim == 1:
 | 
				
			||||||
 | 
					            # Already a structured array; just verify the type
 | 
				
			||||||
 | 
					            if array.dtype != self._dtype:
 | 
				
			||||||
 | 
					                raise ValueError("wrong dtype for 1D (structured) array")
 | 
				
			||||||
 | 
					        elif array.ndim == 2:
 | 
				
			||||||
 | 
					            # Convert to structured array
 | 
				
			||||||
 | 
					            sarray = numpy.zeros(array.shape[0], dtype=self._dtype)
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                sarray['timestamp'] = array[:, 0]
 | 
				
			||||||
 | 
					                # Need the squeeze in case sarray['data'] is 1 dimensional
 | 
				
			||||||
 | 
					                sarray['data'] = numpy.squeeze(array[:, 1:])
 | 
				
			||||||
 | 
					            except (IndexError, ValueError):
 | 
				
			||||||
 | 
					                raise ValueError("wrong number of fields for this data type")
 | 
				
			||||||
 | 
					            array = sarray
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            raise ValueError("wrong number of dimensions in array")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        length = len(array)
 | 
				
			||||||
 | 
					        maxrows = self._max_rows
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if length == 0:
 | 
				
			||||||
 | 
					            return
 | 
				
			||||||
 | 
					        if length > maxrows:
 | 
				
			||||||
 | 
					            # This is more than twice what we wanted to send, so split
 | 
				
			||||||
 | 
					            # it up.  This is a bit inefficient, but the user really
 | 
				
			||||||
 | 
					            # shouldn't be providing this much data at once.
 | 
				
			||||||
 | 
					            for cut in range(0, length, maxrows):
 | 
				
			||||||
 | 
					                self.insert(array[cut:(cut + maxrows)])
 | 
				
			||||||
 | 
					            return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Add this array to our list
 | 
				
			||||||
 | 
					        self._block_arrays.append(array)
 | 
				
			||||||
 | 
					        self._block_rows += length
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Send if it's too long
 | 
				
			||||||
 | 
					        if self._block_rows >= maxrows:
 | 
				
			||||||
 | 
					            self._send_block(final=False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def _send_block(self, final=False):
 | 
				
			||||||
 | 
					        """Send the data current stored up.  One row might be left
 | 
				
			||||||
 | 
					        over if we need its timestamp saved."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Build the full array to send
 | 
				
			||||||
 | 
					        if self._block_rows == 0:
 | 
				
			||||||
 | 
					            array = numpy.zeros(0, dtype=self._dtype)
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            array = numpy.hstack(self._block_arrays)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Get starting timestamp
 | 
				
			||||||
 | 
					        start_ts = self._interval_start
 | 
				
			||||||
 | 
					        if start_ts is None:
 | 
				
			||||||
 | 
					            # Pull start from the first row
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                start_ts = array['timestamp'][0]
 | 
				
			||||||
 | 
					            except IndexError:
 | 
				
			||||||
 | 
					                pass  # no timestamp is OK, if we have no data
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Get ending timestamp
 | 
				
			||||||
 | 
					        if final:
 | 
				
			||||||
 | 
					            # For a final block, the timestamp is either the
 | 
				
			||||||
 | 
					            # user-provided end, or the timestamp of the last line
 | 
				
			||||||
 | 
					            # plus epsilon.
 | 
				
			||||||
 | 
					            end_ts = self._interval_end
 | 
				
			||||||
 | 
					            if end_ts is None:
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    end_ts = array['timestamp'][-1]
 | 
				
			||||||
 | 
					                    end_ts += nilmdb.utils.time.epsilon
 | 
				
			||||||
 | 
					                except IndexError:
 | 
				
			||||||
 | 
					                    pass  # no timestamp is OK, if we have no data
 | 
				
			||||||
 | 
					            self._block_arrays = []
 | 
				
			||||||
 | 
					            self._block_rows = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Next block is completely fresh
 | 
				
			||||||
 | 
					            self._interval_start = None
 | 
				
			||||||
 | 
					            self._interval_end = None
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            # An intermediate block.  We need to save the last row
 | 
				
			||||||
 | 
					            # for the next block, and use its timestamp as the ending
 | 
				
			||||||
 | 
					            # timestamp for this one.
 | 
				
			||||||
 | 
					            if len(array) < 2:
 | 
				
			||||||
 | 
					                # Not enough data to send an intermediate block
 | 
				
			||||||
 | 
					                return
 | 
				
			||||||
 | 
					            end_ts = array['timestamp'][-1]
 | 
				
			||||||
 | 
					            if self._interval_end is not None and end_ts > self._interval_end:
 | 
				
			||||||
 | 
					                # User gave us bad endpoints; send it anyway, and let
 | 
				
			||||||
 | 
					                # the server complain so that the error is the same
 | 
				
			||||||
 | 
					                # as if we hadn't done this chunking.
 | 
				
			||||||
 | 
					                end_ts = self._interval_end
 | 
				
			||||||
 | 
					            self._block_arrays = [array[-1:]]
 | 
				
			||||||
 | 
					            self._block_rows = 1
 | 
				
			||||||
 | 
					            array = array[:-1]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Next block continues where this one ended
 | 
				
			||||||
 | 
					            self._interval_start = end_ts
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # If we have no endpoints, or equal endpoints, it's OK as long
 | 
				
			||||||
 | 
					        # as there's no data to send
 | 
				
			||||||
 | 
					        if (start_ts is None or end_ts is None) or (start_ts == end_ts):
 | 
				
			||||||
 | 
					            if not array:
 | 
				
			||||||
 | 
					                return
 | 
				
			||||||
 | 
					            raise ClientError("have data to send, but invalid start/end times")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Send it
 | 
				
			||||||
 | 
					        data = array.tostring()
 | 
				
			||||||
 | 
					        self.last_response = self._client.stream_insert_block(
 | 
				
			||||||
 | 
					            self._path, data, start_ts, end_ts, binary=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        return
 | 
				
			||||||
@@ -1,42 +1,48 @@
 | 
				
			|||||||
"""Command line client functionality"""
 | 
					"""Command line client functionality"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.client
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					 | 
				
			||||||
from nilmdb.utils import datetime_tz
 | 
					 | 
				
			||||||
import nilmdb.utils.time
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import sys
 | 
					 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					import signal
 | 
				
			||||||
import argparse
 | 
					import argparse
 | 
				
			||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
					from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
				
			||||||
 | 
					
 | 
				
			||||||
try: # pragma: no cover
 | 
					import nilmdb.client
 | 
				
			||||||
    import argcomplete
 | 
					from nilmdb.utils.printf import fprintf, sprintf
 | 
				
			||||||
except ImportError: # pragma: no cover
 | 
					import nilmdb.utils.time
 | 
				
			||||||
    argcomplete = None
 | 
					
 | 
				
			||||||
 | 
					import argcomplete
 | 
				
			||||||
 | 
					import datetime_tz
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Valid subcommands.  Defined in separate files just to break
 | 
					# Valid subcommands.  Defined in separate files just to break
 | 
				
			||||||
# things up -- they're still called with Cmdline as self.
 | 
					# things up -- they're still called with Cmdline as self.
 | 
				
			||||||
subcommands = [ "help", "info", "create", "list", "metadata",
 | 
					subcommands = ["help", "info", "create", "rename", "list", "intervals",
 | 
				
			||||||
                "insert", "extract", "remove", "destroy",
 | 
					               "metadata", "insert", "extract", "remove", "destroy"]
 | 
				
			||||||
                "intervals", "rename" ]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Import the subcommand modules
 | 
					# Import the subcommand modules
 | 
				
			||||||
subcmd_mods = {}
 | 
					subcmd_mods = {}
 | 
				
			||||||
for cmd in subcommands:
 | 
					for cmd in subcommands:
 | 
				
			||||||
    subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist = [ cmd ])
 | 
					    subcmd_mods[cmd] = __import__("nilmdb.cmdline." + cmd, fromlist=[cmd])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class JimArgumentParser(argparse.ArgumentParser):
 | 
					class JimArgumentParser(argparse.ArgumentParser):
 | 
				
			||||||
 | 
					    def parse_args(self, args=None, namespace=None):
 | 
				
			||||||
 | 
					        # Look for --version anywhere and change it to just "nilmtool
 | 
				
			||||||
 | 
					        # --version".  This makes "nilmtool cmd --version" work, which
 | 
				
			||||||
 | 
					        # is needed by help2man.
 | 
				
			||||||
 | 
					        if "--version" in (args or sys.argv[1:]):
 | 
				
			||||||
 | 
					            args = ["--version"]
 | 
				
			||||||
 | 
					        return argparse.ArgumentParser.parse_args(self, args, namespace)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def error(self, message):
 | 
					    def error(self, message):
 | 
				
			||||||
        self.print_usage(sys.stderr)
 | 
					        self.print_usage(sys.stderr)
 | 
				
			||||||
        self.exit(2, sprintf("error: %s\n", message))
 | 
					        self.exit(2, sprintf("error: %s\n", message))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Complete(object): # pragma: no cover
 | 
					
 | 
				
			||||||
 | 
					class Complete():
 | 
				
			||||||
    # Completion helpers, for using argcomplete (see
 | 
					    # Completion helpers, for using argcomplete (see
 | 
				
			||||||
    # extras/nilmtool-bash-completion.sh)
 | 
					    # extras/nilmtool-bash-completion.sh)
 | 
				
			||||||
    def escape(self, s):
 | 
					    def escape(self, s):
 | 
				
			||||||
        quote_chars = [ "\\", "\"", "'", " " ]
 | 
					        quote_chars = ["\\", "\"", "'", " "]
 | 
				
			||||||
        for char in quote_chars:
 | 
					        for char in quote_chars:
 | 
				
			||||||
            s = s.replace(char, "\\" + char)
 | 
					            s = s.replace(char, "\\" + char)
 | 
				
			||||||
        return s
 | 
					        return s
 | 
				
			||||||
@@ -49,18 +55,18 @@ class Complete(object): # pragma: no cover
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def path(self, prefix, parsed_args, **kwargs):
 | 
					    def path(self, prefix, parsed_args, **kwargs):
 | 
				
			||||||
        client = nilmdb.client.Client(parsed_args.url)
 | 
					        client = nilmdb.client.Client(parsed_args.url)
 | 
				
			||||||
        return ( self.escape(s[0])
 | 
					        return (self.escape(s[0])
 | 
				
			||||||
                for s in client.stream_list()
 | 
					                for s in client.stream_list()
 | 
				
			||||||
                 if s[0].startswith(prefix) )
 | 
					                if s[0].startswith(prefix))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def layout(self, prefix, parsed_args, **kwargs):
 | 
					    def layout(self, prefix, parsed_args, **kwargs):
 | 
				
			||||||
        types = [ "int8", "int16", "int32", "int64",
 | 
					        types = ["int8", "int16", "int32", "int64",
 | 
				
			||||||
                 "uint8", "uint16", "uint32", "uint64",
 | 
					                 "uint8", "uint16", "uint32", "uint64",
 | 
				
			||||||
                  "float32", "float64" ]
 | 
					                 "float32", "float64"]
 | 
				
			||||||
        layouts = []
 | 
					        layouts = []
 | 
				
			||||||
        for i in range(1,10):
 | 
					        for i in range(1, 10):
 | 
				
			||||||
            layouts.extend([(t + "_" + str(i)) for t in types])
 | 
					            layouts.extend([(t + "_" + str(i)) for t in types])
 | 
				
			||||||
        return ( l for l in layouts if l.startswith(prefix) )
 | 
					        return (lay for lay in layouts if lay.startswith(prefix))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def meta_key(self, prefix, parsed_args, **kwargs):
 | 
					    def meta_key(self, prefix, parsed_args, **kwargs):
 | 
				
			||||||
        return (kv.split('=')[0] for kv
 | 
					        return (kv.split('=')[0] for kv
 | 
				
			||||||
@@ -71,19 +77,23 @@ class Complete(object): # pragma: no cover
 | 
				
			|||||||
        path = parsed_args.path
 | 
					        path = parsed_args.path
 | 
				
			||||||
        if not path:
 | 
					        if not path:
 | 
				
			||||||
            return []
 | 
					            return []
 | 
				
			||||||
        return ( self.escape(k + '=' + v)
 | 
					        results = []
 | 
				
			||||||
                 for (k,v) in client.stream_get_metadata(path).iteritems()
 | 
					        for (k, v) in client.stream_get_metadata(path).items():
 | 
				
			||||||
                 if k.startswith(prefix) )
 | 
					            kv = self.escape(k + '=' + v)
 | 
				
			||||||
 | 
					            if kv.startswith(prefix):
 | 
				
			||||||
 | 
					                results.append(kv)
 | 
				
			||||||
 | 
					        return results
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Cmdline(object):
 | 
					class Cmdline():
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(self, argv = None):
 | 
					    def __init__(self, argv=None):
 | 
				
			||||||
        self.argv = argv or sys.argv[1:]
 | 
					        self.argv = argv or sys.argv[1:]
 | 
				
			||||||
        self.client = None
 | 
					        self.client = None
 | 
				
			||||||
        self.def_url = os.environ.get("NILMDB_URL", "http://localhost:12380")
 | 
					        self.def_url = os.environ.get("NILMDB_URL", "http://localhost/nilmdb/")
 | 
				
			||||||
        self.subcmd = {}
 | 
					        self.subcmd = {}
 | 
				
			||||||
        self.complete = Complete()
 | 
					        self.complete = Complete()
 | 
				
			||||||
 | 
					        self.complete_output_stream = None  # overridden by test suite
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def arg_time(self, toparse):
 | 
					    def arg_time(self, toparse):
 | 
				
			||||||
        """Parse a time string argument"""
 | 
					        """Parse a time string argument"""
 | 
				
			||||||
@@ -95,14 +105,14 @@ class Cmdline(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    # Set up the parser
 | 
					    # Set up the parser
 | 
				
			||||||
    def parser_setup(self):
 | 
					    def parser_setup(self):
 | 
				
			||||||
        self.parser = JimArgumentParser(add_help = False,
 | 
					        self.parser = JimArgumentParser(add_help=False,
 | 
				
			||||||
                                        formatter_class = def_form)
 | 
					                                        formatter_class=def_form)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        group = self.parser.add_argument_group("General options")
 | 
					        group = self.parser.add_argument_group("General options")
 | 
				
			||||||
        group.add_argument("-h", "--help", action='help',
 | 
					        group.add_argument("-h", "--help", action='help',
 | 
				
			||||||
                           help='show this help message and exit')
 | 
					                           help='show this help message and exit')
 | 
				
			||||||
        group.add_argument("-V", "--version", action="version",
 | 
					        group.add_argument("-v", "--version", action="version",
 | 
				
			||||||
                           version = nilmdb.__version__)
 | 
					                           version=nilmdb.__version__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        group = self.parser.add_argument_group("Server")
 | 
					        group = self.parser.add_argument_group("Server")
 | 
				
			||||||
        group.add_argument("-u", "--url", action="store",
 | 
					        group.add_argument("-u", "--url", action="store",
 | 
				
			||||||
@@ -111,7 +121,7 @@ class Cmdline(object):
 | 
				
			|||||||
                           ).completer = self.complete.url
 | 
					                           ).completer = self.complete.url
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        sub = self.parser.add_subparsers(
 | 
					        sub = self.parser.add_subparsers(
 | 
				
			||||||
            title="Commands", dest="command",
 | 
					            title="Commands", dest="command", required=True,
 | 
				
			||||||
            description="Use 'help command' or 'command --help' for more "
 | 
					            description="Use 'help command' or 'command --help' for more "
 | 
				
			||||||
            "details on a particular command.")
 | 
					            "details on a particular command.")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -126,14 +136,18 @@ class Cmdline(object):
 | 
				
			|||||||
        sys.exit(-1)
 | 
					        sys.exit(-1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def run(self):
 | 
					    def run(self):
 | 
				
			||||||
 | 
					        # Set SIGPIPE to its default handler -- we don't need Python
 | 
				
			||||||
 | 
					        # to catch it for us.
 | 
				
			||||||
 | 
					        signal.signal(signal.SIGPIPE, signal.SIG_DFL)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Clear cached timezone, so that we can pick up timezone changes
 | 
					        # Clear cached timezone, so that we can pick up timezone changes
 | 
				
			||||||
        # while running this from the test suite.
 | 
					        # while running this from the test suite.
 | 
				
			||||||
        datetime_tz._localtz = None
 | 
					        datetime_tz._localtz = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Run parser
 | 
					        # Run parser
 | 
				
			||||||
        self.parser_setup()
 | 
					        self.parser_setup()
 | 
				
			||||||
        if argcomplete: # pragma: no cover
 | 
					        argcomplete.autocomplete(self.parser, exit_method=sys.exit,
 | 
				
			||||||
            argcomplete.autocomplete(self.parser)
 | 
					                                 output_stream=self.complete_output_stream)
 | 
				
			||||||
        self.args = self.parser.parse_args(self.argv)
 | 
					        self.args = self.parser.parse_args(self.argv)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Run arg verify handler if there is one
 | 
					        # Run arg verify handler if there is one
 | 
				
			||||||
@@ -146,7 +160,7 @@ class Cmdline(object):
 | 
				
			|||||||
        # unless the particular command requests that we don't.
 | 
					        # unless the particular command requests that we don't.
 | 
				
			||||||
        if "no_test_connect" not in self.args:
 | 
					        if "no_test_connect" not in self.args:
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                server_version = self.client.version()
 | 
					                self.client.version()
 | 
				
			||||||
            except nilmdb.client.Error as e:
 | 
					            except nilmdb.client.Error as e:
 | 
				
			||||||
                self.die("error connecting to server: %s", str(e))
 | 
					                self.die("error connecting to server: %s", str(e))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,11 +1,11 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					from argparse import RawDescriptionHelpFormatter as raw_form
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.client
 | 
					import nilmdb.client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from argparse import RawDescriptionHelpFormatter as raw_form
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("create", help="Create a new stream",
 | 
					    cmd = sub.add_parser("create", help="Create a new stream",
 | 
				
			||||||
                         formatter_class = raw_form,
 | 
					                         formatter_class=raw_form,
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
Create a new empty stream at the specified path and with the specified
 | 
					Create a new empty stream at the specified path and with the specified
 | 
				
			||||||
layout type.
 | 
					layout type.
 | 
				
			||||||
@@ -19,7 +19,7 @@ Layout types are of the format: type_count
 | 
				
			|||||||
  For example, 'float32_8' means the data for this stream has 8 columns of
 | 
					  For example, 'float32_8' means the data for this stream has 8 columns of
 | 
				
			||||||
  32-bit floating point values.
 | 
					  32-bit floating point values.
 | 
				
			||||||
""")
 | 
					""")
 | 
				
			||||||
    cmd.set_defaults(handler = cmd_create)
 | 
					    cmd.set_defaults(handler=cmd_create)
 | 
				
			||||||
    group = cmd.add_argument_group("Required arguments")
 | 
					    group = cmd.add_argument_group("Required arguments")
 | 
				
			||||||
    group.add_argument("path",
 | 
					    group.add_argument("path",
 | 
				
			||||||
                       help="Path (in database) of new stream, e.g. /foo/bar",
 | 
					                       help="Path (in database) of new stream, e.g. /foo/bar",
 | 
				
			||||||
@@ -29,6 +29,7 @@ Layout types are of the format: type_count
 | 
				
			|||||||
                       ).completer = self.complete.layout
 | 
					                       ).completer = self.complete.layout
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_create(self):
 | 
					def cmd_create(self):
 | 
				
			||||||
    """Create new stream"""
 | 
					    """Create new stream"""
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,26 +1,52 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					import fnmatch
 | 
				
			||||||
import nilmdb.client
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
					from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
 | 
					import nilmdb.client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("destroy", help="Delete a stream and all data",
 | 
					    cmd = sub.add_parser("destroy", help="Delete a stream and all data",
 | 
				
			||||||
                         formatter_class = def_form,
 | 
					                         formatter_class=def_form,
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         Destroy the stream at the specified path.  All
 | 
					                         Destroy the stream at the specified path.
 | 
				
			||||||
                         data and metadata related to the stream is
 | 
					                         The stream must be empty.  All metadata
 | 
				
			||||||
                         permanently deleted.
 | 
					                         related to the stream is permanently deleted.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                         Wildcards and multiple paths are supported.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(handler = cmd_destroy)
 | 
					    cmd.set_defaults(handler=cmd_destroy)
 | 
				
			||||||
 | 
					    group = cmd.add_argument_group("Options")
 | 
				
			||||||
 | 
					    group.add_argument("-R", "--remove", action="store_true",
 | 
				
			||||||
 | 
					                       help="Remove all data before destroying stream")
 | 
				
			||||||
 | 
					    group.add_argument("-q", "--quiet", action="store_true",
 | 
				
			||||||
 | 
					                       help="Don't display names when destroying "
 | 
				
			||||||
 | 
					                       "multiple paths")
 | 
				
			||||||
    group = cmd.add_argument_group("Required arguments")
 | 
					    group = cmd.add_argument_group("Required arguments")
 | 
				
			||||||
    group.add_argument("path",
 | 
					    group.add_argument("path", nargs='+',
 | 
				
			||||||
                       help="Path of the stream to delete, e.g. /foo/bar",
 | 
					                       help="Path of the stream to delete, e.g. /foo/bar/*",
 | 
				
			||||||
                       ).completer = self.complete.path
 | 
					                       ).completer = self.complete.path
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_destroy(self):
 | 
					def cmd_destroy(self):
 | 
				
			||||||
    """Destroy stream"""
 | 
					    """Destroy stream"""
 | 
				
			||||||
 | 
					    streams = [s[0] for s in self.client.stream_list()]
 | 
				
			||||||
 | 
					    paths = []
 | 
				
			||||||
 | 
					    for path in self.args.path:
 | 
				
			||||||
 | 
					        new = fnmatch.filter(streams, path)
 | 
				
			||||||
 | 
					        if not new:
 | 
				
			||||||
 | 
					            self.die("error: no stream matched path: %s", path)
 | 
				
			||||||
 | 
					        paths.extend(new)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for path in paths:
 | 
				
			||||||
 | 
					        if not self.args.quiet and len(paths) > 1:
 | 
				
			||||||
 | 
					            printf("Destroying %s\n", path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
        self.client.stream_destroy(self.args.path)
 | 
					            if self.args.remove:
 | 
				
			||||||
 | 
					                self.client.stream_remove(path)
 | 
				
			||||||
 | 
					            self.client.stream_destroy(path)
 | 
				
			||||||
        except nilmdb.client.ClientError as e:
 | 
					        except nilmdb.client.ClientError as e:
 | 
				
			||||||
            self.die("error destroying stream: %s", str(e))
 | 
					            self.die("error destroying stream: %s", str(e))
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,14 +1,16 @@
 | 
				
			|||||||
from __future__ import print_function
 | 
					import sys
 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
import nilmdb.client
 | 
					import nilmdb.client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("extract", help="Extract data",
 | 
					    cmd = sub.add_parser("extract", help="Extract data",
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         Extract data from a stream.
 | 
					                         Extract data from a stream.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(verify = cmd_extract_verify,
 | 
					    cmd.set_defaults(verify=cmd_extract_verify,
 | 
				
			||||||
                     handler = cmd_extract)
 | 
					                     handler=cmd_extract)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Data selection")
 | 
					    group = cmd.add_argument_group("Data selection")
 | 
				
			||||||
    group.add_argument("path",
 | 
					    group.add_argument("path",
 | 
				
			||||||
@@ -24,22 +26,32 @@ def setup(self, sub):
 | 
				
			|||||||
                       ).completer = self.complete.time
 | 
					                       ).completer = self.complete.time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Output format")
 | 
					    group = cmd.add_argument_group("Output format")
 | 
				
			||||||
 | 
					    group.add_argument("-B", "--binary", action="store_true",
 | 
				
			||||||
 | 
					                       help="Raw binary output")
 | 
				
			||||||
    group.add_argument("-b", "--bare", action="store_true",
 | 
					    group.add_argument("-b", "--bare", action="store_true",
 | 
				
			||||||
                       help="Exclude timestamps from output lines")
 | 
					                       help="Exclude timestamps from output lines")
 | 
				
			||||||
    group.add_argument("-a", "--annotate", action="store_true",
 | 
					    group.add_argument("-a", "--annotate", action="store_true",
 | 
				
			||||||
                       help="Include comments with some information "
 | 
					                       help="Include comments with some information "
 | 
				
			||||||
                       "about the stream")
 | 
					                       "about the stream")
 | 
				
			||||||
 | 
					    group.add_argument("-m", "--markup", action="store_true",
 | 
				
			||||||
 | 
					                       help="Include comments with interval starts and ends")
 | 
				
			||||||
    group.add_argument("-T", "--timestamp-raw", action="store_true",
 | 
					    group.add_argument("-T", "--timestamp-raw", action="store_true",
 | 
				
			||||||
                       help="Show raw timestamps in annotated information")
 | 
					                       help="Show raw timestamps in annotated information")
 | 
				
			||||||
    group.add_argument("-c", "--count", action="store_true",
 | 
					    group.add_argument("-c", "--count", action="store_true",
 | 
				
			||||||
                       help="Just output a count of matched data points")
 | 
					                       help="Just output a count of matched data points")
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_extract_verify(self):
 | 
					def cmd_extract_verify(self):
 | 
				
			||||||
    if self.args.start is not None and self.args.end is not None:
 | 
					 | 
				
			||||||
    if self.args.start > self.args.end:
 | 
					    if self.args.start > self.args.end:
 | 
				
			||||||
        self.parser.error("start is after end")
 | 
					        self.parser.error("start is after end")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if self.args.binary:
 | 
				
			||||||
 | 
					        if (self.args.bare or self.args.annotate or self.args.markup or
 | 
				
			||||||
 | 
					                self.args.timestamp_raw or self.args.count):
 | 
				
			||||||
 | 
					            self.parser.error("--binary cannot be combined with other options")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_extract(self):
 | 
					def cmd_extract(self):
 | 
				
			||||||
    streams = self.client.stream_list(self.args.path)
 | 
					    streams = self.client.stream_list(self.args.path)
 | 
				
			||||||
    if len(streams) != 1:
 | 
					    if len(streams) != 1:
 | 
				
			||||||
@@ -58,15 +70,23 @@ def cmd_extract(self):
 | 
				
			|||||||
        printf("# end: %s\n", time_string(self.args.end))
 | 
					        printf("# end: %s\n", time_string(self.args.end))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    printed = False
 | 
					    printed = False
 | 
				
			||||||
 | 
					    if self.args.binary:
 | 
				
			||||||
 | 
					        printer = sys.stdout.buffer.write
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        printer = lambda x: print(x.decode('utf-8'))
 | 
				
			||||||
 | 
					    bare = self.args.bare
 | 
				
			||||||
 | 
					    count = self.args.count
 | 
				
			||||||
    for dataline in self.client.stream_extract(self.args.path,
 | 
					    for dataline in self.client.stream_extract(self.args.path,
 | 
				
			||||||
                                               self.args.start,
 | 
					                                               self.args.start,
 | 
				
			||||||
                                               self.args.end,
 | 
					                                               self.args.end,
 | 
				
			||||||
                                               self.args.count):
 | 
					                                               self.args.count,
 | 
				
			||||||
        if self.args.bare and not self.args.count:
 | 
					                                               self.args.markup,
 | 
				
			||||||
 | 
					                                               self.args.binary):
 | 
				
			||||||
 | 
					        if bare and not count:
 | 
				
			||||||
            # Strip timestamp (first element).  Doesn't make sense
 | 
					            # Strip timestamp (first element).  Doesn't make sense
 | 
				
			||||||
            # if we are only returning a count.
 | 
					            # if we are only returning a count.
 | 
				
			||||||
            dataline = ' '.join(dataline.split(' ')[1:])
 | 
					            dataline = b' '.join(dataline.split(b' ')[1:])
 | 
				
			||||||
        print(dataline)
 | 
					        printer(dataline)
 | 
				
			||||||
        printed = True
 | 
					        printed = True
 | 
				
			||||||
    if not printed:
 | 
					    if not printed:
 | 
				
			||||||
        if self.args.annotate:
 | 
					        if self.args.annotate:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,7 +1,5 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import argparse
 | 
					import argparse
 | 
				
			||||||
import sys
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("help", help="Show detailed help for a command",
 | 
					    cmd = sub.add_parser("help", help="Show detailed help for a command",
 | 
				
			||||||
@@ -9,14 +7,15 @@ def setup(self, sub):
 | 
				
			|||||||
                         Show help for a command. 'help command' is
 | 
					                         Show help for a command. 'help command' is
 | 
				
			||||||
                         the same as 'command --help'.
 | 
					                         the same as 'command --help'.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(handler = cmd_help)
 | 
					    cmd.set_defaults(handler=cmd_help)
 | 
				
			||||||
    cmd.set_defaults(no_test_connect = True)
 | 
					    cmd.set_defaults(no_test_connect=True)
 | 
				
			||||||
    cmd.add_argument("command", nargs="?",
 | 
					    cmd.add_argument("command", nargs="?",
 | 
				
			||||||
                     help="Command to get help about")
 | 
					                     help="Command to get help about")
 | 
				
			||||||
    cmd.add_argument("rest", nargs=argparse.REMAINDER,
 | 
					    cmd.add_argument("rest", nargs=argparse.REMAINDER,
 | 
				
			||||||
                     help=argparse.SUPPRESS)
 | 
					                     help=argparse.SUPPRESS)
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_help(self):
 | 
					def cmd_help(self):
 | 
				
			||||||
    if self.args.command in self.subcmd:
 | 
					    if self.args.command in self.subcmd:
 | 
				
			||||||
        self.subcmd[self.args.command].print_help()
 | 
					        self.subcmd[self.args.command].print_help()
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,19 +1,21 @@
 | 
				
			|||||||
 | 
					from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.client
 | 
					import nilmdb.client
 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
from nilmdb.utils import human_size
 | 
					from nilmdb.utils import human_size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("info", help="Server information",
 | 
					    cmd = sub.add_parser("info", help="Server information",
 | 
				
			||||||
                         formatter_class = def_form,
 | 
					                         formatter_class=def_form,
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         List information about the server, like
 | 
					                         List information about the server, like
 | 
				
			||||||
                         version.
 | 
					                         version.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(handler = cmd_info)
 | 
					    cmd.set_defaults(handler=cmd_info)
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_info(self):
 | 
					def cmd_info(self):
 | 
				
			||||||
    """Print info about the server"""
 | 
					    """Print info about the server"""
 | 
				
			||||||
    printf("Client version: %s\n", nilmdb.__version__)
 | 
					    printf("Client version: %s\n", nilmdb.__version__)
 | 
				
			||||||
@@ -21,5 +23,8 @@ def cmd_info(self):
 | 
				
			|||||||
    printf("Server URL: %s\n", self.client.geturl())
 | 
					    printf("Server URL: %s\n", self.client.geturl())
 | 
				
			||||||
    dbinfo = self.client.dbinfo()
 | 
					    dbinfo = self.client.dbinfo()
 | 
				
			||||||
    printf("Server database path: %s\n", dbinfo["path"])
 | 
					    printf("Server database path: %s\n", dbinfo["path"])
 | 
				
			||||||
    printf("Server database size: %s\n", human_size(dbinfo["size"]))
 | 
					    for (desc, field) in [("used by NilmDB", "size"),
 | 
				
			||||||
    printf("Server database free space: %s\n", human_size(dbinfo["free"]))
 | 
					                          ("used by other", "other"),
 | 
				
			||||||
 | 
					                          ("reserved", "reserved"),
 | 
				
			||||||
 | 
					                          ("free", "free")]:
 | 
				
			||||||
 | 
					        printf("Server disk space %s: %s\n", desc, human_size(dbinfo[field]))
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,17 +1,18 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					import sys
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
import nilmdb.client
 | 
					import nilmdb.client
 | 
				
			||||||
import nilmdb.utils.timestamper as timestamper
 | 
					import nilmdb.utils.timestamper as timestamper
 | 
				
			||||||
import nilmdb.utils.time
 | 
					import nilmdb.utils.time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import sys
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("insert", help="Insert data",
 | 
					    cmd = sub.add_parser("insert", help="Insert data",
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         Insert data into a stream.
 | 
					                         Insert data into a stream.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(verify = cmd_insert_verify,
 | 
					    cmd.set_defaults(verify=cmd_insert_verify,
 | 
				
			||||||
                     handler = cmd_insert)
 | 
					                     handler=cmd_insert)
 | 
				
			||||||
    cmd.add_argument("-q", "--quiet", action='store_true',
 | 
					    cmd.add_argument("-q", "--quiet", action='store_true',
 | 
				
			||||||
                     help='suppress unnecessary messages')
 | 
					                     help='suppress unnecessary messages')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -61,21 +62,24 @@ def setup(self, sub):
 | 
				
			|||||||
    group.add_argument("path",
 | 
					    group.add_argument("path",
 | 
				
			||||||
                       help="Path of stream, e.g. /foo/bar",
 | 
					                       help="Path of stream, e.g. /foo/bar",
 | 
				
			||||||
                       ).completer = self.complete.path
 | 
					                       ).completer = self.complete.path
 | 
				
			||||||
    group.add_argument("file", nargs = '?', default='-',
 | 
					    group.add_argument("file", nargs='?', default='-',
 | 
				
			||||||
                       help="File to insert (default: - (stdin))")
 | 
					                       help="File to insert (default: - (stdin))")
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_insert_verify(self):
 | 
					def cmd_insert_verify(self):
 | 
				
			||||||
    if self.args.timestamp:
 | 
					    if self.args.timestamp:
 | 
				
			||||||
        if not self.args.rate:
 | 
					        if not self.args.rate:
 | 
				
			||||||
            self.die("error: --rate is needed, but was not specified")
 | 
					            self.die("error: --rate is needed, but was not specified")
 | 
				
			||||||
        if not self.args.filename and self.args.start is None:
 | 
					        if not self.args.filename and self.args.start is None:
 | 
				
			||||||
            self.die("error: need --start or --filename when adding timestamps")
 | 
					            self.die("error: need --start or --filename "
 | 
				
			||||||
 | 
					                     "when adding timestamps")
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        if self.args.start is None or self.args.end is None:
 | 
					        if self.args.start is None or self.args.end is None:
 | 
				
			||||||
            self.die("error: when not adding timestamps, --start and "
 | 
					            self.die("error: when not adding timestamps, --start and "
 | 
				
			||||||
                     "--end are required")
 | 
					                     "--end are required")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_insert(self):
 | 
					def cmd_insert(self):
 | 
				
			||||||
    # Find requested stream
 | 
					    # Find requested stream
 | 
				
			||||||
    streams = self.client.stream_list(self.args.path)
 | 
					    streams = self.client.stream_list(self.args.path)
 | 
				
			||||||
@@ -87,7 +91,7 @@ def cmd_insert(self):
 | 
				
			|||||||
    try:
 | 
					    try:
 | 
				
			||||||
        filename = arg.file
 | 
					        filename = arg.file
 | 
				
			||||||
        if filename == '-':
 | 
					        if filename == '-':
 | 
				
			||||||
            infile = sys.stdin
 | 
					            infile = sys.stdin.buffer
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                infile = open(filename, "rb")
 | 
					                infile = open(filename, "rb")
 | 
				
			||||||
@@ -104,7 +108,7 @@ def cmd_insert(self):
 | 
				
			|||||||
        if arg.timestamp:
 | 
					        if arg.timestamp:
 | 
				
			||||||
            data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
 | 
					            data = timestamper.TimestamperRate(infile, arg.start, arg.rate)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            data = iter(lambda: infile.read(1048576), '')
 | 
					            data = iter(lambda: infile.read(1048576), b'')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Print info
 | 
					        # Print info
 | 
				
			||||||
        if not arg.quiet:
 | 
					        if not arg.quiet:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,13 +1,13 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					 | 
				
			||||||
import nilmdb.utils.time
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import fnmatch
 | 
					 | 
				
			||||||
import argparse
 | 
					 | 
				
			||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
					from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
 | 
					import nilmdb.utils.time
 | 
				
			||||||
 | 
					from nilmdb.utils.interval import Interval
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("intervals", help="List intervals",
 | 
					    cmd = sub.add_parser("intervals", help="List intervals",
 | 
				
			||||||
                         formatter_class = def_form,
 | 
					                         formatter_class=def_form,
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         List intervals in a stream, similar to
 | 
					                         List intervals in a stream, similar to
 | 
				
			||||||
                         'list --detail path'.
 | 
					                         'list --detail path'.
 | 
				
			||||||
@@ -16,8 +16,8 @@ def setup(self, sub):
 | 
				
			|||||||
                         interval ranges that are present in 'path'
 | 
					                         interval ranges that are present in 'path'
 | 
				
			||||||
                         and not present in 'diffpath' are printed.
 | 
					                         and not present in 'diffpath' are printed.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(verify = cmd_intervals_verify,
 | 
					    cmd.set_defaults(verify=cmd_intervals_verify,
 | 
				
			||||||
                     handler = cmd_intervals)
 | 
					                     handler=cmd_intervals)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Stream selection")
 | 
					    group = cmd.add_argument_group("Stream selection")
 | 
				
			||||||
    group.add_argument("path", metavar="PATH",
 | 
					    group.add_argument("path", metavar="PATH",
 | 
				
			||||||
@@ -42,14 +42,18 @@ def setup(self, sub):
 | 
				
			|||||||
    group = cmd.add_argument_group("Misc options")
 | 
					    group = cmd.add_argument_group("Misc options")
 | 
				
			||||||
    group.add_argument("-T", "--timestamp-raw", action="store_true",
 | 
					    group.add_argument("-T", "--timestamp-raw", action="store_true",
 | 
				
			||||||
                       help="Show raw timestamps when printing times")
 | 
					                       help="Show raw timestamps when printing times")
 | 
				
			||||||
 | 
					    group.add_argument("-o", "--optimize", action="store_true",
 | 
				
			||||||
 | 
					                       help="Optimize (merge adjacent) intervals")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_intervals_verify(self):
 | 
					def cmd_intervals_verify(self):
 | 
				
			||||||
    if self.args.start is not None and self.args.end is not None:
 | 
					    if self.args.start is not None and self.args.end is not None:
 | 
				
			||||||
        if self.args.start >= self.args.end:
 | 
					        if self.args.start >= self.args.end:
 | 
				
			||||||
            self.parser.error("start must precede end")
 | 
					            self.parser.error("start must precede end")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_intervals(self):
 | 
					def cmd_intervals(self):
 | 
				
			||||||
    """List intervals in a stream"""
 | 
					    """List intervals in a stream"""
 | 
				
			||||||
    if self.args.timestamp_raw:
 | 
					    if self.args.timestamp_raw:
 | 
				
			||||||
@@ -58,9 +62,15 @@ def cmd_intervals(self):
 | 
				
			|||||||
        time_string = nilmdb.utils.time.timestamp_to_human
 | 
					        time_string = nilmdb.utils.time.timestamp_to_human
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
           for (start, end) in self.client.stream_intervals(
 | 
					        intervals = (Interval(start, end) for (start, end) in
 | 
				
			||||||
               self.args.path, self.args.start, self.args.end, self.args.diff):
 | 
					                     self.client.stream_intervals(self.args.path,
 | 
				
			||||||
               printf("[ %s -> %s ]\n", time_string(start), time_string(end))
 | 
					                                                  self.args.start,
 | 
				
			||||||
 | 
					                                                  self.args.end,
 | 
				
			||||||
 | 
					                                                  self.args.diff))
 | 
				
			||||||
 | 
					        if self.args.optimize:
 | 
				
			||||||
 | 
					            intervals = nilmdb.utils.interval.optimize(intervals)
 | 
				
			||||||
 | 
					        for i in intervals:
 | 
				
			||||||
 | 
					            printf("[ %s -> %s ]\n", time_string(i.start), time_string(i.end))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    except nilmdb.client.ClientError as e:
 | 
					    except nilmdb.client.ClientError as e:
 | 
				
			||||||
        self.die("error listing intervals: %s", str(e))
 | 
					        self.die("error listing intervals: %s", str(e))
 | 
				
			||||||
 | 
					 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,31 +1,25 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					import fnmatch
 | 
				
			||||||
 | 
					from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
import nilmdb.utils.time
 | 
					import nilmdb.utils.time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import fnmatch
 | 
					 | 
				
			||||||
import argparse
 | 
					 | 
				
			||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("list", help="List streams",
 | 
					    cmd = sub.add_parser("list", help="List streams",
 | 
				
			||||||
                         formatter_class = def_form,
 | 
					                         formatter_class=def_form,
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         List streams available in the database,
 | 
					                         List streams available in the database,
 | 
				
			||||||
                         optionally filtering by layout or path.  Wildcards
 | 
					                         optionally filtering by path.  Wildcards
 | 
				
			||||||
                         are accepted.
 | 
					                         are accepted; non-matching paths or wildcards
 | 
				
			||||||
 | 
					                         are ignored.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(verify = cmd_list_verify,
 | 
					    cmd.set_defaults(verify=cmd_list_verify,
 | 
				
			||||||
                     handler = cmd_list)
 | 
					                     handler=cmd_list)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Stream filtering")
 | 
					    group = cmd.add_argument_group("Stream filtering")
 | 
				
			||||||
    group.add_argument("-p", "--path", metavar="PATH", default="*",
 | 
					    group.add_argument("path", metavar="PATH", default=["*"], nargs='*',
 | 
				
			||||||
                       help="Match only this path (-p can be omitted)",
 | 
					 | 
				
			||||||
                       ).completer = self.complete.path
 | 
					                       ).completer = self.complete.path
 | 
				
			||||||
    group.add_argument("path_positional", default="*",
 | 
					 | 
				
			||||||
                       nargs="?", help=argparse.SUPPRESS,
 | 
					 | 
				
			||||||
                       ).completer = self.complete.path
 | 
					 | 
				
			||||||
    group.add_argument("-l", "--layout", default="*",
 | 
					 | 
				
			||||||
                       help="Match only this stream layout",
 | 
					 | 
				
			||||||
                       ).completer = self.complete.layout
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Interval info")
 | 
					    group = cmd.add_argument_group("Interval info")
 | 
				
			||||||
    group.add_argument("-E", "--ext", action="store_true",
 | 
					    group.add_argument("-E", "--ext", action="store_true",
 | 
				
			||||||
@@ -49,44 +43,46 @@ def setup(self, sub):
 | 
				
			|||||||
    group = cmd.add_argument_group("Misc options")
 | 
					    group = cmd.add_argument_group("Misc options")
 | 
				
			||||||
    group.add_argument("-T", "--timestamp-raw", action="store_true",
 | 
					    group.add_argument("-T", "--timestamp-raw", action="store_true",
 | 
				
			||||||
                       help="Show raw timestamps when printing times")
 | 
					                       help="Show raw timestamps when printing times")
 | 
				
			||||||
 | 
					    group.add_argument("-l", "--layout", action="store_true",
 | 
				
			||||||
 | 
					                       help="Show layout type next to path name")
 | 
				
			||||||
 | 
					    group.add_argument("-n", "--no-decim", action="store_true",
 | 
				
			||||||
 | 
					                       help="Skip paths containing \"~decim-\"")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_list_verify(self):
 | 
					 | 
				
			||||||
    # A hidden "path_positional" argument lets the user leave off the
 | 
					 | 
				
			||||||
    # "-p" when specifying the path.  Handle it here.
 | 
					 | 
				
			||||||
    got_opt = self.args.path != "*"
 | 
					 | 
				
			||||||
    got_pos = self.args.path_positional != "*"
 | 
					 | 
				
			||||||
    if got_pos:
 | 
					 | 
				
			||||||
        if got_opt:
 | 
					 | 
				
			||||||
            self.parser.error("too many paths specified")
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            self.args.path = self.args.path_positional
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def cmd_list_verify(self):
 | 
				
			||||||
    if self.args.start is not None and self.args.end is not None:
 | 
					    if self.args.start is not None and self.args.end is not None:
 | 
				
			||||||
        if self.args.start >= self.args.end:
 | 
					        if self.args.start >= self.args.end:
 | 
				
			||||||
            self.parser.error("start must precede end")
 | 
					            self.parser.error("start must precede end")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if self.args.start is not None or self.args.end is not None:
 | 
					    if self.args.start is not None or self.args.end is not None:
 | 
				
			||||||
        if not self.args.detail:
 | 
					        if not self.args.detail:
 | 
				
			||||||
            self.parser.error("--start and --end only make sense with --detail")
 | 
					            self.parser.error("--start and --end only make sense "
 | 
				
			||||||
 | 
					                              "with --detail")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_list(self):
 | 
					def cmd_list(self):
 | 
				
			||||||
    """List available streams"""
 | 
					    """List available streams"""
 | 
				
			||||||
    streams = self.client.stream_list(extended = True)
 | 
					    streams = self.client.stream_list(extended=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if self.args.timestamp_raw:
 | 
					    if self.args.timestamp_raw:
 | 
				
			||||||
        time_string = nilmdb.utils.time.timestamp_to_string
 | 
					        time_string = nilmdb.utils.time.timestamp_to_string
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        time_string = nilmdb.utils.time.timestamp_to_human
 | 
					        time_string = nilmdb.utils.time.timestamp_to_human
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for argpath in self.args.path:
 | 
				
			||||||
        for stream in streams:
 | 
					        for stream in streams:
 | 
				
			||||||
            (path, layout, int_min, int_max, rows, time) = stream[:6]
 | 
					            (path, layout, int_min, int_max, rows, time) = stream[:6]
 | 
				
			||||||
        if not (fnmatch.fnmatch(path, self.args.path) and
 | 
					            if not fnmatch.fnmatch(path, argpath):
 | 
				
			||||||
                fnmatch.fnmatch(layout, self.args.layout)):
 | 
					                continue
 | 
				
			||||||
 | 
					            if self.args.no_decim and "~decim-" in path:
 | 
				
			||||||
                continue
 | 
					                continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if self.args.layout:
 | 
				
			||||||
                printf("%s %s\n", path, layout)
 | 
					                printf("%s %s\n", path, layout)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                printf("%s\n", path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if self.args.ext:
 | 
					            if self.args.ext:
 | 
				
			||||||
                if int_min is None or int_max is None:
 | 
					                if int_min is None or int_max is None:
 | 
				
			||||||
@@ -102,7 +98,8 @@ def cmd_list(self):
 | 
				
			|||||||
                printed = False
 | 
					                printed = False
 | 
				
			||||||
                for (start, end) in self.client.stream_intervals(
 | 
					                for (start, end) in self.client.stream_intervals(
 | 
				
			||||||
                        path, self.args.start, self.args.end):
 | 
					                        path, self.args.start, self.args.end):
 | 
				
			||||||
                printf("  [ %s -> %s ]\n", time_string(start), time_string(end))
 | 
					                    printf("  [ %s -> %s ]\n",
 | 
				
			||||||
 | 
					                           time_string(start), time_string(end))
 | 
				
			||||||
                    printed = True
 | 
					                    printed = True
 | 
				
			||||||
                if not printed:
 | 
					                if not printed:
 | 
				
			||||||
                    printf("  (no intervals)\n")
 | 
					                    printf("  (no intervals)\n")
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,7 +1,8 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
import nilmdb
 | 
					import nilmdb
 | 
				
			||||||
import nilmdb.client
 | 
					import nilmdb.client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("metadata", help="Get or set stream metadata",
 | 
					    cmd = sub.add_parser("metadata", help="Get or set stream metadata",
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
@@ -9,8 +10,9 @@ def setup(self, sub):
 | 
				
			|||||||
                         a stream.
 | 
					                         a stream.
 | 
				
			||||||
                         """,
 | 
					                         """,
 | 
				
			||||||
                         usage="%(prog)s path [-g [key ...] | "
 | 
					                         usage="%(prog)s path [-g [key ...] | "
 | 
				
			||||||
                         "-s key=value [...] | -u key=value [...]]")
 | 
					                         "-s key=value [...] | -u key=value [...]] | "
 | 
				
			||||||
    cmd.set_defaults(handler = cmd_metadata)
 | 
					                         "-d [key ...]")
 | 
				
			||||||
 | 
					    cmd.set_defaults(handler=cmd_metadata)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Required arguments")
 | 
					    group = cmd.add_argument_group("Required arguments")
 | 
				
			||||||
    group.add_argument("path",
 | 
					    group.add_argument("path",
 | 
				
			||||||
@@ -30,8 +32,12 @@ def setup(self, sub):
 | 
				
			|||||||
                     help="Update metadata using provided "
 | 
					                     help="Update metadata using provided "
 | 
				
			||||||
                     "key=value pairs",
 | 
					                     "key=value pairs",
 | 
				
			||||||
                     ).completer = self.complete.meta_keyval
 | 
					                     ).completer = self.complete.meta_keyval
 | 
				
			||||||
 | 
					    exc.add_argument("-d", "--delete", nargs="*", metavar="key",
 | 
				
			||||||
 | 
					                     help="Delete metadata for specified keys (default all)",
 | 
				
			||||||
 | 
					                     ).completer = self.complete.meta_key
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_metadata(self):
 | 
					def cmd_metadata(self):
 | 
				
			||||||
    """Manipulate metadata"""
 | 
					    """Manipulate metadata"""
 | 
				
			||||||
    if self.args.set is not None or self.args.update is not None:
 | 
					    if self.args.set is not None or self.args.update is not None:
 | 
				
			||||||
@@ -56,15 +62,29 @@ def cmd_metadata(self):
 | 
				
			|||||||
            handler(self.args.path, data)
 | 
					            handler(self.args.path, data)
 | 
				
			||||||
        except nilmdb.client.ClientError as e:
 | 
					        except nilmdb.client.ClientError as e:
 | 
				
			||||||
            self.die("error setting/updating metadata: %s", str(e))
 | 
					            self.die("error setting/updating metadata: %s", str(e))
 | 
				
			||||||
 | 
					    elif self.args.delete is not None:
 | 
				
			||||||
 | 
					        # Delete (by setting values to empty strings)
 | 
				
			||||||
 | 
					        keys = None
 | 
				
			||||||
 | 
					        if self.args.delete:
 | 
				
			||||||
 | 
					            keys = list(self.args.delete)
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            data = self.client.stream_get_metadata(self.args.path, keys)
 | 
				
			||||||
 | 
					            for key in data:
 | 
				
			||||||
 | 
					                data[key] = ""
 | 
				
			||||||
 | 
					            self.client.stream_update_metadata(self.args.path, data)
 | 
				
			||||||
 | 
					        except nilmdb.client.ClientError as e:
 | 
				
			||||||
 | 
					            self.die("error deleting metadata: %s", str(e))
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        # Get (or unspecified)
 | 
					        # Get (or unspecified)
 | 
				
			||||||
        keys = self.args.get or None
 | 
					        keys = None
 | 
				
			||||||
 | 
					        if self.args.get:
 | 
				
			||||||
 | 
					            keys = list(self.args.get)
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            data = self.client.stream_get_metadata(self.args.path, keys)
 | 
					            data = self.client.stream_get_metadata(self.args.path, keys)
 | 
				
			||||||
        except nilmdb.client.ClientError as e:
 | 
					        except nilmdb.client.ClientError as e:
 | 
				
			||||||
            self.die("error getting metadata: %s", str(e))
 | 
					            self.die("error getting metadata: %s", str(e))
 | 
				
			||||||
        for key, value in sorted(data.items()):
 | 
					        for key, value in sorted(data.items()):
 | 
				
			||||||
            # Omit nonexistant keys
 | 
					            # Print nonexistant keys as having empty value
 | 
				
			||||||
            if value is None:
 | 
					            if value is None:
 | 
				
			||||||
                value = ""
 | 
					                value = ""
 | 
				
			||||||
            printf("%s=%s\n", key, value)
 | 
					            printf("%s=%s\n", key, value)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,17 +1,22 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					import fnmatch
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
import nilmdb.client
 | 
					import nilmdb.client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("remove", help="Remove data",
 | 
					    cmd = sub.add_parser("remove", help="Remove data",
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         Remove all data from a specified time range within a
 | 
					                         Remove all data from a specified time range within a
 | 
				
			||||||
                         stream.
 | 
					                         stream.  If multiple streams or wildcards are
 | 
				
			||||||
 | 
					                         provided, the same time range is removed from all
 | 
				
			||||||
 | 
					                         streams.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(handler = cmd_remove)
 | 
					    cmd.set_defaults(handler=cmd_remove)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Data selection")
 | 
					    group = cmd.add_argument_group("Data selection")
 | 
				
			||||||
    group.add_argument("path",
 | 
					    group.add_argument("path", nargs='+',
 | 
				
			||||||
                       help="Path of stream, e.g. /foo/bar",
 | 
					                       help="Path of stream, e.g. /foo/bar/*",
 | 
				
			||||||
                       ).completer = self.complete.path
 | 
					                       ).completer = self.complete.path
 | 
				
			||||||
    group.add_argument("-s", "--start", required=True,
 | 
					    group.add_argument("-s", "--start", required=True,
 | 
				
			||||||
                       metavar="TIME", type=self.arg_time,
 | 
					                       metavar="TIME", type=self.arg_time,
 | 
				
			||||||
@@ -23,18 +28,32 @@ def setup(self, sub):
 | 
				
			|||||||
                       ).completer = self.complete.time
 | 
					                       ).completer = self.complete.time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = cmd.add_argument_group("Output format")
 | 
					    group = cmd.add_argument_group("Output format")
 | 
				
			||||||
 | 
					    group.add_argument("-q", "--quiet", action="store_true",
 | 
				
			||||||
 | 
					                       help="Don't display names when removing "
 | 
				
			||||||
 | 
					                       "from multiple paths")
 | 
				
			||||||
    group.add_argument("-c", "--count", action="store_true",
 | 
					    group.add_argument("-c", "--count", action="store_true",
 | 
				
			||||||
                       help="Output number of data points removed")
 | 
					                       help="Output number of data points removed")
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_remove(self):
 | 
					def cmd_remove(self):
 | 
				
			||||||
 | 
					    streams = [s[0] for s in self.client.stream_list()]
 | 
				
			||||||
 | 
					    paths = []
 | 
				
			||||||
 | 
					    for path in self.args.path:
 | 
				
			||||||
 | 
					        new = fnmatch.filter(streams, path)
 | 
				
			||||||
 | 
					        if not new:
 | 
				
			||||||
 | 
					            self.die("error: no stream matched path: %s", path)
 | 
				
			||||||
 | 
					        paths.extend(new)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        count = self.client.stream_remove(self.args.path,
 | 
					        for path in paths:
 | 
				
			||||||
 | 
					            if not self.args.quiet and len(paths) > 1:
 | 
				
			||||||
 | 
					                printf("Removing from %s\n", path)
 | 
				
			||||||
 | 
					            count = self.client.stream_remove(path,
 | 
				
			||||||
                                              self.args.start, self.args.end)
 | 
					                                              self.args.start, self.args.end)
 | 
				
			||||||
 | 
					            if self.args.count:
 | 
				
			||||||
 | 
					                printf("%d\n", count)
 | 
				
			||||||
    except nilmdb.client.ClientError as e:
 | 
					    except nilmdb.client.ClientError as e:
 | 
				
			||||||
        self.die("error removing data: %s", str(e))
 | 
					        self.die("error removing data: %s", str(e))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if self.args.count:
 | 
					 | 
				
			||||||
        printf("%d\n", count)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return 0
 | 
					    return 0
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,18 +1,18 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.client
 | 
					import nilmdb.client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from argparse import ArgumentDefaultsHelpFormatter as def_form
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def setup(self, sub):
 | 
					def setup(self, sub):
 | 
				
			||||||
    cmd = sub.add_parser("rename", help="Rename a stream",
 | 
					    cmd = sub.add_parser("rename", help="Rename a stream",
 | 
				
			||||||
                         formatter_class = def_form,
 | 
					                         formatter_class=def_form,
 | 
				
			||||||
                         description="""
 | 
					                         description="""
 | 
				
			||||||
                         Rename a stream.
 | 
					                         Rename a stream.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                         Only the stream's path is renamed; no
 | 
					                         Only the stream's path is renamed; no
 | 
				
			||||||
                         metadata is changed.
 | 
					                         metadata is changed.
 | 
				
			||||||
                         """)
 | 
					                         """)
 | 
				
			||||||
    cmd.set_defaults(handler = cmd_rename)
 | 
					    cmd.set_defaults(handler=cmd_rename)
 | 
				
			||||||
    group = cmd.add_argument_group("Required arguments")
 | 
					    group = cmd.add_argument_group("Required arguments")
 | 
				
			||||||
    group.add_argument("oldpath",
 | 
					    group.add_argument("oldpath",
 | 
				
			||||||
                       help="Old path, e.g. /foo/old",
 | 
					                       help="Old path, e.g. /foo/old",
 | 
				
			||||||
@@ -23,6 +23,7 @@ def setup(self, sub):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    return cmd
 | 
					    return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cmd_rename(self):
 | 
					def cmd_rename(self):
 | 
				
			||||||
    """Rename a stream"""
 | 
					    """Rename a stream"""
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										3
									
								
								nilmdb/fsck/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								nilmdb/fsck/__init__.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,3 @@
 | 
				
			|||||||
 | 
					"""nilmdb.fsck"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.fsck.fsck import Fsck
 | 
				
			||||||
							
								
								
									
										610
									
								
								nilmdb/fsck/fsck.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										610
									
								
								nilmdb/fsck/fsck.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,610 @@
 | 
				
			|||||||
 | 
					# -*- coding: utf-8 -*-
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					"""Check database consistency, with some ability to fix problems.
 | 
				
			||||||
 | 
					This should be able to fix cases where a database gets corrupted due
 | 
				
			||||||
 | 
					to unexpected system shutdown, and detect other cases that may cause
 | 
				
			||||||
 | 
					NilmDB to return errors when trying to manipulate the database."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import nilmdb.utils
 | 
				
			||||||
 | 
					import nilmdb.server
 | 
				
			||||||
 | 
					import nilmdb.client.numpyclient
 | 
				
			||||||
 | 
					from nilmdb.utils.interval import IntervalError
 | 
				
			||||||
 | 
					from nilmdb.server.interval import Interval, IntervalSet
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import printf, fprintf, sprintf
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from collections import defaultdict
 | 
				
			||||||
 | 
					import sqlite3
 | 
				
			||||||
 | 
					import os
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					import progressbar
 | 
				
			||||||
 | 
					import re
 | 
				
			||||||
 | 
					import shutil
 | 
				
			||||||
 | 
					import pickle
 | 
				
			||||||
 | 
					import numpy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class FsckError(Exception):
 | 
				
			||||||
 | 
					    def __init__(self, msg="", *args):
 | 
				
			||||||
 | 
					        if args:
 | 
				
			||||||
 | 
					            msg = sprintf(msg, *args)
 | 
				
			||||||
 | 
					        Exception.__init__(self, msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class FixableFsckError(FsckError):
 | 
				
			||||||
 | 
					    def __init__(self, msg=""):
 | 
				
			||||||
 | 
					        FsckError.__init__(self, f'{msg}\nThis may be fixable with "--fix".')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class RetryFsck(FsckError):
 | 
				
			||||||
 | 
					    pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class FsckFormatError(FsckError):
 | 
				
			||||||
 | 
					    pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def log(format, *args):
 | 
				
			||||||
 | 
					    printf(format, *args)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def err(format, *args):
 | 
				
			||||||
 | 
					    fprintf(sys.stderr, format, *args)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Decorator that retries a function if it returns a specific value
 | 
				
			||||||
 | 
					def retry_if_raised(exc, message=None, max_retries=1000):
 | 
				
			||||||
 | 
					    def f1(func):
 | 
				
			||||||
 | 
					        def f2(*args, **kwargs):
 | 
				
			||||||
 | 
					            for n in range(max_retries):
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    return func(*args, **kwargs)
 | 
				
			||||||
 | 
					                except exc:
 | 
				
			||||||
 | 
					                    if message:
 | 
				
			||||||
 | 
					                        log(f"{message} ({n+1})\n\n")
 | 
				
			||||||
 | 
					            raise Exception("Max number of retries (%d) exceeded; giving up" %
 | 
				
			||||||
 | 
					                            max_retries)
 | 
				
			||||||
 | 
					        return f2
 | 
				
			||||||
 | 
					    return f1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class Progress(object):
 | 
				
			||||||
 | 
					    def __init__(self, maxval):
 | 
				
			||||||
 | 
					        if maxval == 0:
 | 
				
			||||||
 | 
					            maxval = 1
 | 
				
			||||||
 | 
					        self.bar = progressbar.ProgressBar(
 | 
				
			||||||
 | 
					            maxval=maxval,
 | 
				
			||||||
 | 
					            widgets=[progressbar.Percentage(), ' ',
 | 
				
			||||||
 | 
					                     progressbar.Bar(), ' ',
 | 
				
			||||||
 | 
					                     progressbar.ETA()])
 | 
				
			||||||
 | 
					        self.bar.term_width = self.bar.term_width or 75
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __enter__(self):
 | 
				
			||||||
 | 
					        self.bar.start()
 | 
				
			||||||
 | 
					        self.last_update = 0
 | 
				
			||||||
 | 
					        return self
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __exit__(self, exc_type, exc_value, traceback):
 | 
				
			||||||
 | 
					        if exc_type is None:
 | 
				
			||||||
 | 
					            self.bar.finish()
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            printf("\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def update(self, val):
 | 
				
			||||||
 | 
					        self.bar.update(val)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class Fsck(object):
 | 
				
			||||||
 | 
					    def __init__(self, path, fix=False):
 | 
				
			||||||
 | 
					        self.basepath = path
 | 
				
			||||||
 | 
					        self.sqlpath = os.path.join(path, "data.sql")
 | 
				
			||||||
 | 
					        self.bulkpath = os.path.join(path, "data")
 | 
				
			||||||
 | 
					        self.bulklock = os.path.join(path, "data.lock")
 | 
				
			||||||
 | 
					        self.fix = fix
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ### Main checks
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @retry_if_raised(RetryFsck, "Something was fixed: restarting fsck")
 | 
				
			||||||
 | 
					    def check(self, skip_data=False):
 | 
				
			||||||
 | 
					        self.bulk = None
 | 
				
			||||||
 | 
					        self.sql = None
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            self.check_paths()
 | 
				
			||||||
 | 
					            self.check_sql()
 | 
				
			||||||
 | 
					            self.check_streams()
 | 
				
			||||||
 | 
					            self.check_intervals()
 | 
				
			||||||
 | 
					            if skip_data:
 | 
				
			||||||
 | 
					                log("skipped data check\n")
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                self.check_data()
 | 
				
			||||||
 | 
					        finally:
 | 
				
			||||||
 | 
					            if self.bulk:
 | 
				
			||||||
 | 
					                self.bulk.close()
 | 
				
			||||||
 | 
					            if self.sql:  # pragma: no cover
 | 
				
			||||||
 | 
					                # (coverage doesn't handle finally clauses correctly;
 | 
				
			||||||
 | 
					                # both branches here are tested)
 | 
				
			||||||
 | 
					                self.sql.commit()
 | 
				
			||||||
 | 
					                self.sql.close()
 | 
				
			||||||
 | 
					        log("ok\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ### Check basic path structure
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def check_paths(self):
 | 
				
			||||||
 | 
					        log("checking paths\n")
 | 
				
			||||||
 | 
					        if self.bulk:
 | 
				
			||||||
 | 
					            self.bulk.close()
 | 
				
			||||||
 | 
					        if not os.path.isfile(self.sqlpath):
 | 
				
			||||||
 | 
					            raise FsckError("SQL database missing (%s)", self.sqlpath)
 | 
				
			||||||
 | 
					        if not os.path.isdir(self.bulkpath):
 | 
				
			||||||
 | 
					            raise FsckError("Bulk data directory missing (%s)", self.bulkpath)
 | 
				
			||||||
 | 
					        with open(self.bulklock, "w") as lockfile:
 | 
				
			||||||
 | 
					            if not nilmdb.utils.lock.exclusive_lock(lockfile):
 | 
				
			||||||
 | 
					                raise FsckError('Database already locked by another process\n'
 | 
				
			||||||
 | 
					                                'Make sure all other processes that might be '
 | 
				
			||||||
 | 
					                                'using the database are stopped.\n'
 | 
				
			||||||
 | 
					                                'Restarting apache will cause it to unlock '
 | 
				
			||||||
 | 
					                                'the db until a request is received.')
 | 
				
			||||||
 | 
					            # unlocked immediately
 | 
				
			||||||
 | 
					        self.bulk = nilmdb.server.bulkdata.BulkData(self.basepath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ### Check SQL database health
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def check_sql(self):
 | 
				
			||||||
 | 
					        log("checking sqlite database\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        self.sql = sqlite3.connect(self.sqlpath)
 | 
				
			||||||
 | 
					        with self.sql:
 | 
				
			||||||
 | 
					            cur = self.sql.cursor()
 | 
				
			||||||
 | 
					            ver = cur.execute("PRAGMA user_version").fetchone()[0]
 | 
				
			||||||
 | 
					            good = max(nilmdb.server.nilmdb._sql_schema_updates.keys())
 | 
				
			||||||
 | 
					            if ver != good:
 | 
				
			||||||
 | 
					                raise FsckError("database version %d too old, should be %d",
 | 
				
			||||||
 | 
					                                ver, good)
 | 
				
			||||||
 | 
					            self.stream_path = {}
 | 
				
			||||||
 | 
					            self.stream_layout = {}
 | 
				
			||||||
 | 
					            log("  loading paths\n")
 | 
				
			||||||
 | 
					            result = cur.execute("SELECT id, path, layout FROM streams")
 | 
				
			||||||
 | 
					            for r in result:
 | 
				
			||||||
 | 
					                if r[0] in self.stream_path:
 | 
				
			||||||
 | 
					                    raise FsckError("duplicated ID %d in stream IDs", r[0])
 | 
				
			||||||
 | 
					                self.stream_path[r[0]] = r[1]
 | 
				
			||||||
 | 
					                self.stream_layout[r[0]] = r[2]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            log("  loading intervals\n")
 | 
				
			||||||
 | 
					            self.stream_interval = defaultdict(list)
 | 
				
			||||||
 | 
					            result = cur.execute("SELECT stream_id, start_time, end_time, "
 | 
				
			||||||
 | 
					                                 "start_pos, end_pos FROM ranges "
 | 
				
			||||||
 | 
					                                 "ORDER BY start_time")
 | 
				
			||||||
 | 
					            for r in result:
 | 
				
			||||||
 | 
					                if r[0] not in self.stream_path:
 | 
				
			||||||
 | 
					                    raise FsckError("interval ID %d not in streams", r[0])
 | 
				
			||||||
 | 
					                self.stream_interval[r[0]].append((r[1], r[2], r[3], r[4]))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            log("  loading metadata\n")
 | 
				
			||||||
 | 
					            self.stream_meta = defaultdict(dict)
 | 
				
			||||||
 | 
					            result = cur.execute("SELECT stream_id, key, value FROM metadata")
 | 
				
			||||||
 | 
					            for r in result:
 | 
				
			||||||
 | 
					                if r[0] not in self.stream_path:
 | 
				
			||||||
 | 
					                    raise FsckError("metadata ID %d not in streams", r[0])
 | 
				
			||||||
 | 
					                if r[1] in self.stream_meta[r[0]]:
 | 
				
			||||||
 | 
					                    raise FsckError(
 | 
				
			||||||
 | 
					                        "duplicate metadata key '%s' for stream %d",
 | 
				
			||||||
 | 
					                        r[1], r[0])
 | 
				
			||||||
 | 
					                self.stream_meta[r[0]][r[1]] = r[2]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ### Check streams and basic interval overlap
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def check_streams(self):
 | 
				
			||||||
 | 
					        ids = list(self.stream_path.keys())
 | 
				
			||||||
 | 
					        log("checking %s streams\n", "{:,d}".format(len(ids)))
 | 
				
			||||||
 | 
					        with Progress(len(ids)) as pbar:
 | 
				
			||||||
 | 
					            for i, sid in enumerate(ids):
 | 
				
			||||||
 | 
					                pbar.update(i)
 | 
				
			||||||
 | 
					                path = self.stream_path[sid]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # unique path, valid layout
 | 
				
			||||||
 | 
					                if list(self.stream_path.values()).count(path) != 1:
 | 
				
			||||||
 | 
					                    raise FsckError("duplicated path %s", path)
 | 
				
			||||||
 | 
					                layout = self.stream_layout[sid].split('_')[0]
 | 
				
			||||||
 | 
					                if layout not in ('int8', 'int16', 'int32', 'int64',
 | 
				
			||||||
 | 
					                                  'uint8', 'uint16', 'uint32', 'uint64',
 | 
				
			||||||
 | 
					                                  'float32', 'float64'):
 | 
				
			||||||
 | 
					                    raise FsckError("bad layout %s for %s", layout, path)
 | 
				
			||||||
 | 
					                count = int(self.stream_layout[sid].split('_')[1])
 | 
				
			||||||
 | 
					                if count < 1 or count > 1024:
 | 
				
			||||||
 | 
					                    raise FsckError("bad count %d for %s", count, path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # must exist in bulkdata
 | 
				
			||||||
 | 
					                bulk = self.bulkpath + path
 | 
				
			||||||
 | 
					                bulk = bulk.encode('utf-8')
 | 
				
			||||||
 | 
					                if not os.path.isdir(bulk):
 | 
				
			||||||
 | 
					                    raise FsckError("%s: missing bulkdata dir", path)
 | 
				
			||||||
 | 
					                if not nilmdb.server.bulkdata.Table.exists(bulk):
 | 
				
			||||||
 | 
					                    raise FsckError("%s: bad bulkdata table", path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # intervals don't overlap.  Abuse IntervalSet to check
 | 
				
			||||||
 | 
					                # for intervals in file positions, too.
 | 
				
			||||||
 | 
					                timeiset = IntervalSet()
 | 
				
			||||||
 | 
					                posiset = IntervalSet()
 | 
				
			||||||
 | 
					                for (stime, etime, spos, epos) in self.stream_interval[sid]:
 | 
				
			||||||
 | 
					                    new = Interval(stime, etime)
 | 
				
			||||||
 | 
					                    try:
 | 
				
			||||||
 | 
					                        timeiset += new
 | 
				
			||||||
 | 
					                    except IntervalError:
 | 
				
			||||||
 | 
					                        raise FsckError("%s: overlap in intervals:\n"
 | 
				
			||||||
 | 
					                                        "set: %s\nnew: %s",
 | 
				
			||||||
 | 
					                                        path, str(timeiset), str(new))
 | 
				
			||||||
 | 
					                    if spos != epos:
 | 
				
			||||||
 | 
					                        new = Interval(spos, epos)
 | 
				
			||||||
 | 
					                        try:
 | 
				
			||||||
 | 
					                            posiset += new
 | 
				
			||||||
 | 
					                        except IntervalError:
 | 
				
			||||||
 | 
					                            self.fix_row_overlap(sid, path, posiset, new)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    # Check bulkdata
 | 
				
			||||||
 | 
					                    self.check_bulkdata(sid, path, bulk)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    # Check that we can open bulkdata
 | 
				
			||||||
 | 
					                    tab = nilmdb.server.bulkdata.Table(bulk)
 | 
				
			||||||
 | 
					                except FsckFormatError as e:
 | 
				
			||||||
 | 
					                    # If there are no files except _format, try deleting
 | 
				
			||||||
 | 
					                    # the entire stream; this may remove metadata, but
 | 
				
			||||||
 | 
					                    # it's probably unimportant.
 | 
				
			||||||
 | 
					                    files = list(os.listdir(bulk))
 | 
				
			||||||
 | 
					                    if len(files) > 1:
 | 
				
			||||||
 | 
					                        raise FsckFormatError(f"{path}: can't load _format, "
 | 
				
			||||||
 | 
					                                              f"but data is also present")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    # Since the stream was empty, just remove it
 | 
				
			||||||
 | 
					                    self.fix_remove_stream(sid, path, bulk,
 | 
				
			||||||
 | 
					                                           "empty, with corrupted format file")
 | 
				
			||||||
 | 
					                except FsckError as e:
 | 
				
			||||||
 | 
					                    raise e
 | 
				
			||||||
 | 
					                except Exception as e: # pragma: no cover
 | 
				
			||||||
 | 
					                    # No coverage because this is an unknown/unexpected error
 | 
				
			||||||
 | 
					                    raise FsckError("%s: can't open bulkdata: %s",
 | 
				
			||||||
 | 
					                                    path, str(e))
 | 
				
			||||||
 | 
					                tab.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def fix_row_overlap(self, sid, path, existing, new):
 | 
				
			||||||
 | 
					        # If the file rows (spos, epos) overlap in the interval table,
 | 
				
			||||||
 | 
					        # and the overlapping ranges look like this:
 | 
				
			||||||
 | 
					        #    A --------- C
 | 
				
			||||||
 | 
					        #           B -------- D
 | 
				
			||||||
 | 
					        # Then we can try changing the first interval to go from
 | 
				
			||||||
 | 
					        # A to B instead.
 | 
				
			||||||
 | 
					        msg = (f"{path}: overlap in file offsets:\n"
 | 
				
			||||||
 | 
					               f"existing ranges: {existing}\n"
 | 
				
			||||||
 | 
					               f"overlapping interval: {new}")
 | 
				
			||||||
 | 
					        if not self.fix:
 | 
				
			||||||
 | 
					            raise FixableFsckError(msg)
 | 
				
			||||||
 | 
					        err(f"\n{msg}\nSeeing if we can truncate one of them...\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # See if there'e exactly one interval that overlaps the
 | 
				
			||||||
 | 
					        # conflicting one in the right way
 | 
				
			||||||
 | 
					        match = None
 | 
				
			||||||
 | 
					        for intv in self.stream_interval[sid]:
 | 
				
			||||||
 | 
					            (stime, etime, spos, epos) = intv
 | 
				
			||||||
 | 
					            if spos < new.start and epos > new.start:
 | 
				
			||||||
 | 
					                if match:
 | 
				
			||||||
 | 
					                    err(f"no, more than one interval matched:\n"
 | 
				
			||||||
 | 
					                        f"{intv}\n{match}\n")
 | 
				
			||||||
 | 
					                    raise FsckError(f"{path}: unfixable overlap")
 | 
				
			||||||
 | 
					                match = intv
 | 
				
			||||||
 | 
					        if match is None:
 | 
				
			||||||
 | 
					            err(f"no intervals overlapped in the right way\n")
 | 
				
			||||||
 | 
					            raise FsckError(f"{path}: unfixable overlap")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Truncate the file position
 | 
				
			||||||
 | 
					        err(f"truncating {match}\n")
 | 
				
			||||||
 | 
					        with self.sql:
 | 
				
			||||||
 | 
					            cur = self.sql.cursor()
 | 
				
			||||||
 | 
					            cur.execute("UPDATE ranges SET end_pos=? "
 | 
				
			||||||
 | 
					                        "WHERE stream_id=? AND start_time=? AND "
 | 
				
			||||||
 | 
					                        "end_time=? AND start_pos=? AND end_pos=?",
 | 
				
			||||||
 | 
					                        (new.start, sid, *match))
 | 
				
			||||||
 | 
					            if cur.rowcount != 1:  # pragma: no cover (shouldn't fail)
 | 
				
			||||||
 | 
					                raise FsckError("failed to fix SQL database")
 | 
				
			||||||
 | 
					        raise RetryFsck
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ### Check that bulkdata is good enough to be opened
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @retry_if_raised(RetryFsck)
 | 
				
			||||||
 | 
					    def check_bulkdata(self, sid, path, bulk):
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            with open(os.path.join(bulk, b"_format"), "rb") as f:
 | 
				
			||||||
 | 
					                fmt = pickle.load(f)
 | 
				
			||||||
 | 
					        except Exception as e:
 | 
				
			||||||
 | 
					            raise FsckFormatError(f"{path}: can't load _format file ({e})")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if fmt["version"] != 3:
 | 
				
			||||||
 | 
					            raise FsckFormatError("%s: bad or unsupported bulkdata version %d",
 | 
				
			||||||
 | 
					                                  path, fmt["version"])
 | 
				
			||||||
 | 
					        rows_per_file = int(fmt["rows_per_file"])
 | 
				
			||||||
 | 
					        if rows_per_file < 1:
 | 
				
			||||||
 | 
					            raise FsckFormatError(f"{path}: bad rows_per_file {rows_per_file}")
 | 
				
			||||||
 | 
					        files_per_dir = int(fmt["files_per_dir"])
 | 
				
			||||||
 | 
					        if files_per_dir < 1:
 | 
				
			||||||
 | 
					            raise FsckFormatError(f"{path}: bad files_per_dir {files_per_dir}")
 | 
				
			||||||
 | 
					        layout = fmt["layout"]
 | 
				
			||||||
 | 
					        if layout != self.stream_layout[sid]:
 | 
				
			||||||
 | 
					            raise FsckFormatError("%s: layout mismatch %s != %s", path,
 | 
				
			||||||
 | 
					                                  layout, self.stream_layout[sid])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Every file should have a size that's the multiple of the row size
 | 
				
			||||||
 | 
					        rkt = nilmdb.server.rocket.Rocket(layout, None)
 | 
				
			||||||
 | 
					        row_size = rkt.binary_size
 | 
				
			||||||
 | 
					        rkt.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Find all directories
 | 
				
			||||||
 | 
					        regex = re.compile(b"^[0-9a-f]{4,}$")
 | 
				
			||||||
 | 
					        subdirs = sorted(filter(regex.search, os.listdir(bulk)),
 | 
				
			||||||
 | 
					                         key=lambda x: int(x, 16), reverse=True)
 | 
				
			||||||
 | 
					        for subdir in subdirs:
 | 
				
			||||||
 | 
					            # Find all files in that dir
 | 
				
			||||||
 | 
					            subpath = os.path.join(bulk, subdir)
 | 
				
			||||||
 | 
					            files = list(filter(regex.search, os.listdir(subpath)))
 | 
				
			||||||
 | 
					            if not files:
 | 
				
			||||||
 | 
					                self.fix_empty_subdir(subpath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Verify that their size is a multiple of the row size
 | 
				
			||||||
 | 
					            for filename in files:
 | 
				
			||||||
 | 
					                filepath = os.path.join(subpath, filename)
 | 
				
			||||||
 | 
					                offset = os.path.getsize(filepath)
 | 
				
			||||||
 | 
					                if offset % row_size:
 | 
				
			||||||
 | 
					                    self.fix_bad_filesize(path, filepath, offset, row_size)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def fix_empty_subdir(self, subpath):
 | 
				
			||||||
 | 
					        msg = sprintf("bulkdata path %s is missing data files", subpath)
 | 
				
			||||||
 | 
					        if not self.fix:
 | 
				
			||||||
 | 
					            raise FixableFsckError(msg)
 | 
				
			||||||
 | 
					        # Try to fix it by just deleting whatever is present,
 | 
				
			||||||
 | 
					        # as long as it's only ".removed" files.
 | 
				
			||||||
 | 
					        err("\n%s\n", msg)
 | 
				
			||||||
 | 
					        for fn in os.listdir(subpath):
 | 
				
			||||||
 | 
					            if not fn.endswith(b".removed"):
 | 
				
			||||||
 | 
					                raise FsckError("can't fix automatically: please manually "
 | 
				
			||||||
 | 
					                                "remove the file '%s' and try again",
 | 
				
			||||||
 | 
					                                os.path.join(subpath, fn).decode(
 | 
				
			||||||
 | 
					                                    'utf-8', errors='backslashreplace'))
 | 
				
			||||||
 | 
					        # Remove the whole thing
 | 
				
			||||||
 | 
					        err("Removing empty subpath\n")
 | 
				
			||||||
 | 
					        shutil.rmtree(subpath)
 | 
				
			||||||
 | 
					        raise RetryFsck
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def fix_bad_filesize(self, path, filepath, offset, row_size):
 | 
				
			||||||
 | 
					        extra = offset % row_size
 | 
				
			||||||
 | 
					        msg = sprintf("%s: size of file %s (%d) is not a multiple" +
 | 
				
			||||||
 | 
					                      " of row size (%d): %d extra bytes present",
 | 
				
			||||||
 | 
					                      path, filepath, offset, row_size, extra)
 | 
				
			||||||
 | 
					        if not self.fix:
 | 
				
			||||||
 | 
					            raise FixableFsckError(msg)
 | 
				
			||||||
 | 
					        # Try to fix it by just truncating the file
 | 
				
			||||||
 | 
					        err("\n%s\n", msg)
 | 
				
			||||||
 | 
					        newsize = offset - extra
 | 
				
			||||||
 | 
					        err("Truncating file to %d bytes and retrying\n", newsize)
 | 
				
			||||||
 | 
					        with open(filepath, "r+b") as f:
 | 
				
			||||||
 | 
					            f.truncate(newsize)
 | 
				
			||||||
 | 
					            raise RetryFsck
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def fix_remove_stream(self, sid, path, bulk, reason):
 | 
				
			||||||
 | 
					        msg = f"stream {path} is corrupted: {reason}"
 | 
				
			||||||
 | 
					        if not self.fix:
 | 
				
			||||||
 | 
					            raise FixableFsckError(msg)
 | 
				
			||||||
 | 
					        # Remove the stream from disk and the database
 | 
				
			||||||
 | 
					        err(f"\n{msg}\n")
 | 
				
			||||||
 | 
					        err(f"Removing stream {path} from disk and database\n")
 | 
				
			||||||
 | 
					        shutil.rmtree(bulk)
 | 
				
			||||||
 | 
					        with self.sql:
 | 
				
			||||||
 | 
					            cur = self.sql.cursor()
 | 
				
			||||||
 | 
					            cur.execute("DELETE FROM streams WHERE id=?",
 | 
				
			||||||
 | 
					                        (sid,))
 | 
				
			||||||
 | 
					            if cur.rowcount != 1:  # pragma: no cover (shouldn't fail)
 | 
				
			||||||
 | 
					                raise FsckError("failed to remove stream")
 | 
				
			||||||
 | 
					            cur.execute("DELETE FROM ranges WHERE stream_id=?", (sid,))
 | 
				
			||||||
 | 
					            cur.execute("DELETE FROM metadata WHERE stream_id=?", (sid,))
 | 
				
			||||||
 | 
					        raise RetryFsck
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ### Check interval endpoints
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def check_intervals(self):
 | 
				
			||||||
 | 
					        total_ints = sum(len(x) for x in list(self.stream_interval.values()))
 | 
				
			||||||
 | 
					        log("checking %s intervals\n", "{:,d}".format(total_ints))
 | 
				
			||||||
 | 
					        done = 0
 | 
				
			||||||
 | 
					        with Progress(total_ints) as pbar:
 | 
				
			||||||
 | 
					            for sid in self.stream_interval:
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    bulk = self.bulkpath + self.stream_path[sid]
 | 
				
			||||||
 | 
					                    bulk = bulk.encode('utf-8')
 | 
				
			||||||
 | 
					                    tab = nilmdb.server.bulkdata.Table(bulk)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    def update(x):
 | 
				
			||||||
 | 
					                        pbar.update(done + x)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    ints = self.stream_interval[sid]
 | 
				
			||||||
 | 
					                    done += self.check_table_intervals(sid, ints, tab, update)
 | 
				
			||||||
 | 
					                finally:
 | 
				
			||||||
 | 
					                    tab.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def check_table_intervals(self, sid, ints, tab, update):
 | 
				
			||||||
 | 
					        # look in the table to make sure we can pick out the interval's
 | 
				
			||||||
 | 
					        # endpoints
 | 
				
			||||||
 | 
					        path = self.stream_path[sid]  # noqa: F841 unused
 | 
				
			||||||
 | 
					        tab.file_open.cache_remove_all()
 | 
				
			||||||
 | 
					        for (i, intv) in enumerate(ints):
 | 
				
			||||||
 | 
					            update(i)
 | 
				
			||||||
 | 
					            (stime, etime, spos, epos) = intv
 | 
				
			||||||
 | 
					            if spos == epos and spos >= 0 and spos <= tab.nrows:
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                srow = tab[spos]    # noqa: F841 unused
 | 
				
			||||||
 | 
					                erow = tab[epos-1]  # noqa: F841 unused
 | 
				
			||||||
 | 
					            except Exception as e:
 | 
				
			||||||
 | 
					                self.fix_bad_interval(sid, intv, tab, str(e))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        return len(ints)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def fix_bad_interval(self, sid, intv, tab, msg):
 | 
				
			||||||
 | 
					        path = self.stream_path[sid]
 | 
				
			||||||
 | 
					        msg = sprintf("%s: interval %s error accessing rows: %s",
 | 
				
			||||||
 | 
					                      path, str(intv), str(msg))
 | 
				
			||||||
 | 
					        if not self.fix:
 | 
				
			||||||
 | 
					            raise FixableFsckError(msg)
 | 
				
			||||||
 | 
					        err("\n%s\n", msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        (stime, etime, spos, epos) = intv
 | 
				
			||||||
 | 
					        # If it's just that the end pos is more than the number of rows
 | 
				
			||||||
 | 
					        # in the table, lower end pos and truncate interval time too.
 | 
				
			||||||
 | 
					        if spos < tab.nrows and epos >= tab.nrows:
 | 
				
			||||||
 | 
					            err("end position is past endrows, but it can be truncated\n")
 | 
				
			||||||
 | 
					            err("old end: time %d, pos %d\n", etime, epos)
 | 
				
			||||||
 | 
					            new_epos = tab.nrows
 | 
				
			||||||
 | 
					            new_etime = tab[new_epos-1] + 1
 | 
				
			||||||
 | 
					            err("new end: time %d, pos %d\n", new_etime, new_epos)
 | 
				
			||||||
 | 
					            if stime < new_etime:
 | 
				
			||||||
 | 
					                # Change it in SQL
 | 
				
			||||||
 | 
					                with self.sql:
 | 
				
			||||||
 | 
					                    cur = self.sql.cursor()
 | 
				
			||||||
 | 
					                    cur.execute("UPDATE ranges SET end_time=?, end_pos=? "
 | 
				
			||||||
 | 
					                                "WHERE stream_id=? AND start_time=? AND "
 | 
				
			||||||
 | 
					                                "end_time=? AND start_pos=? AND end_pos=?",
 | 
				
			||||||
 | 
					                                (new_etime, new_epos, sid, stime, etime,
 | 
				
			||||||
 | 
					                                 spos, epos))
 | 
				
			||||||
 | 
					                    if cur.rowcount != 1:  # pragma: no cover (shouldn't fail)
 | 
				
			||||||
 | 
					                        raise FsckError("failed to fix SQL database")
 | 
				
			||||||
 | 
					                raise RetryFsck
 | 
				
			||||||
 | 
					            err("actually it can't be truncated; times are bad too\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Otherwise, the only hope is to delete the interval entirely.
 | 
				
			||||||
 | 
					        err("*** Deleting the entire interval from SQL.\n")
 | 
				
			||||||
 | 
					        err("This may leave stale data on disk.  To fix that, copy all "
 | 
				
			||||||
 | 
					            "data from this stream to a new stream using nilm-copy, then\n")
 | 
				
			||||||
 | 
					        err("remove all data from and destroy %s.\n", path)
 | 
				
			||||||
 | 
					        with self.sql:
 | 
				
			||||||
 | 
					            cur = self.sql.cursor()
 | 
				
			||||||
 | 
					            cur.execute("DELETE FROM ranges WHERE "
 | 
				
			||||||
 | 
					                        "stream_id=? AND start_time=? AND "
 | 
				
			||||||
 | 
					                        "end_time=? AND start_pos=? AND end_pos=?",
 | 
				
			||||||
 | 
					                        (sid, stime, etime, spos, epos))
 | 
				
			||||||
 | 
					            if cur.rowcount != 1:  # pragma: no cover (shouldn't fail)
 | 
				
			||||||
 | 
					                raise FsckError("failed to remove interval")
 | 
				
			||||||
 | 
					        raise RetryFsck
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ### Check data in each interval
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def check_data(self):
 | 
				
			||||||
 | 
					        total_rows = sum(sum((y[3] - y[2]) for y in x)
 | 
				
			||||||
 | 
					                         for x in list(self.stream_interval.values()))
 | 
				
			||||||
 | 
					        log("checking %s rows of data\n", "{:,d}".format(total_rows))
 | 
				
			||||||
 | 
					        done = 0
 | 
				
			||||||
 | 
					        with Progress(total_rows) as pbar:
 | 
				
			||||||
 | 
					            for sid in self.stream_interval:
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    bulk = self.bulkpath + self.stream_path[sid]
 | 
				
			||||||
 | 
					                    bulk = bulk.encode('utf-8')
 | 
				
			||||||
 | 
					                    tab = nilmdb.server.bulkdata.Table(bulk)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    def update(x):
 | 
				
			||||||
 | 
					                        pbar.update(done + x)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    ints = self.stream_interval[sid]
 | 
				
			||||||
 | 
					                    done += self.check_table_data(sid, ints, tab, update)
 | 
				
			||||||
 | 
					                finally:
 | 
				
			||||||
 | 
					                    tab.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def check_table_data(self, sid, ints, tab, update):
 | 
				
			||||||
 | 
					        # Pull out all of the interval's data and verify that it's
 | 
				
			||||||
 | 
					        # monotonic.
 | 
				
			||||||
 | 
					        maxrows = getattr(self, 'maxrows_override', 100000)
 | 
				
			||||||
 | 
					        path = self.stream_path[sid]
 | 
				
			||||||
 | 
					        layout = self.stream_layout[sid]
 | 
				
			||||||
 | 
					        dtype = nilmdb.client.numpyclient.layout_to_dtype(layout)
 | 
				
			||||||
 | 
					        tab.file_open.cache_remove_all()
 | 
				
			||||||
 | 
					        done = 0
 | 
				
			||||||
 | 
					        for intv in ints:
 | 
				
			||||||
 | 
					            last_ts = None
 | 
				
			||||||
 | 
					            (stime, etime, spos, epos) = intv
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Break interval into maxrows-sized chunks
 | 
				
			||||||
 | 
					            next_start = spos
 | 
				
			||||||
 | 
					            while next_start < epos:
 | 
				
			||||||
 | 
					                start = next_start
 | 
				
			||||||
 | 
					                stop = min(start + maxrows, epos)
 | 
				
			||||||
 | 
					                count = stop - start
 | 
				
			||||||
 | 
					                next_start = stop
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # Get raw data, convert to NumPy arary
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    raw = tab.get_data(start, stop, binary=True)
 | 
				
			||||||
 | 
					                    data = numpy.frombuffer(raw, dtype)
 | 
				
			||||||
 | 
					                except Exception as e:  # pragma: no cover
 | 
				
			||||||
 | 
					                    # No coverage because it's hard to trigger this -- earlier
 | 
				
			||||||
 | 
					                    # checks check the ranges, so this would probably be a real
 | 
				
			||||||
 | 
					                    # disk error, malloc failure, etc.
 | 
				
			||||||
 | 
					                    raise FsckError(
 | 
				
			||||||
 | 
					                        "%s: failed to grab rows %d through %d: %s",
 | 
				
			||||||
 | 
					                        path, start, stop, repr(e))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                ts = data['timestamp']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # Verify that all timestamps are in range.
 | 
				
			||||||
 | 
					                match = (ts < stime) | (ts >= etime)
 | 
				
			||||||
 | 
					                if match.any():
 | 
				
			||||||
 | 
					                    row = numpy.argmax(match)
 | 
				
			||||||
 | 
					                    if ts[row] != 0:
 | 
				
			||||||
 | 
					                        raise FsckError("%s: data timestamp %d at row %d "
 | 
				
			||||||
 | 
					                                        "outside interval range [%d,%d)",
 | 
				
			||||||
 | 
					                                        path, ts[row], row + start,
 | 
				
			||||||
 | 
					                                        stime, etime)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    # Timestamp is zero and out of the expected range;
 | 
				
			||||||
 | 
					                    # assume file ends with zeroed data and just truncate it.
 | 
				
			||||||
 | 
					                    self.fix_table_by_truncating(
 | 
				
			||||||
 | 
					                        path, tab, row + start,
 | 
				
			||||||
 | 
					                        "data timestamp is out of range, and zero")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # Verify that timestamps are monotonic
 | 
				
			||||||
 | 
					                match = numpy.diff(ts) <= 0
 | 
				
			||||||
 | 
					                if match.any():
 | 
				
			||||||
 | 
					                    row = numpy.argmax(match)
 | 
				
			||||||
 | 
					                    if ts[row+1] != 0:
 | 
				
			||||||
 | 
					                        raise FsckError("%s: non-monotonic timestamp (%d -> %d)"
 | 
				
			||||||
 | 
					                                        " at row %d", path, ts[row], ts[row+1],
 | 
				
			||||||
 | 
					                                        row + start)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    # Timestamp is zero and non-monotonic;
 | 
				
			||||||
 | 
					                    # assume file ends with zeroed data and just truncate it.
 | 
				
			||||||
 | 
					                    self.fix_table_by_truncating(
 | 
				
			||||||
 | 
					                        path, tab, row + start + 1,
 | 
				
			||||||
 | 
					                        "data timestamp is non-monotonic, and zero")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                first_ts = ts[0]
 | 
				
			||||||
 | 
					                if last_ts is not None and first_ts <= last_ts:
 | 
				
			||||||
 | 
					                    raise FsckError("%s: first interval timestamp %d is not "
 | 
				
			||||||
 | 
					                                    "greater than the previous last interval "
 | 
				
			||||||
 | 
					                                    "timestamp %d, at row %d",
 | 
				
			||||||
 | 
					                                    path, first_ts, last_ts, start)
 | 
				
			||||||
 | 
					                last_ts = ts[-1]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # The previous errors are fixable, by removing the
 | 
				
			||||||
 | 
					                # offending intervals, or changing the data
 | 
				
			||||||
 | 
					                # timestamps.  But these are probably unlikely errors,
 | 
				
			||||||
 | 
					                # so it's not worth implementing that yet.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # Done
 | 
				
			||||||
 | 
					                done += count
 | 
				
			||||||
 | 
					                update(done)
 | 
				
			||||||
 | 
					        return done
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def fix_table_by_truncating(self, path, tab, row, reason):
 | 
				
			||||||
 | 
					        # Simple fix for bad data: truncate the table at the given row.
 | 
				
			||||||
 | 
					        # On retry, fix_bad_interval will correct the database and timestamps
 | 
				
			||||||
 | 
					        # to account for this truncation.
 | 
				
			||||||
 | 
					        msg = f"{path}: bad data in table, starting at row {row}: {reason}"
 | 
				
			||||||
 | 
					        if not self.fix:
 | 
				
			||||||
 | 
					            raise FixableFsckError(msg)
 | 
				
			||||||
 | 
					        err(f"\n{msg}\nWill try truncating table\n")
 | 
				
			||||||
 | 
					        (subdir, fname, offs, count) = tab._offset_from_row(row)
 | 
				
			||||||
 | 
					        tab._remove_or_truncate_file(subdir, fname, offs)
 | 
				
			||||||
 | 
					        raise RetryFsck
 | 
				
			||||||
							
								
								
									
										27
									
								
								nilmdb/scripts/nilmdb_fsck.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										27
									
								
								nilmdb/scripts/nilmdb_fsck.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,27 @@
 | 
				
			|||||||
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import nilmdb.fsck
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def main():
 | 
				
			||||||
 | 
					    """Main entry point for the 'nilmdb-fsck' command line script"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
 | 
					        description='Check database consistency',
 | 
				
			||||||
 | 
					        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
 | 
				
			||||||
 | 
					    parser.add_argument("-v", "--version", action="version",
 | 
				
			||||||
 | 
					                        version=nilmdb.__version__)
 | 
				
			||||||
 | 
					    parser.add_argument("-f", "--fix", action="store_true",
 | 
				
			||||||
 | 
					                        default=False, help='Fix errors when possible '
 | 
				
			||||||
 | 
					                        '(which may involve removing data)')
 | 
				
			||||||
 | 
					    parser.add_argument("-n", "--no-data", action="store_true",
 | 
				
			||||||
 | 
					                        default=False, help='Skip the slow full-data check')
 | 
				
			||||||
 | 
					    parser.add_argument('database', help='Database directory')
 | 
				
			||||||
 | 
					    args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    nilmdb.fsck.Fsck(args.database, args.fix).check(skip_data=args.no_data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if __name__ == "__main__":
 | 
				
			||||||
 | 
					    main()
 | 
				
			||||||
@@ -1,38 +1,43 @@
 | 
				
			|||||||
#!/usr/bin/python
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import os
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					import socket
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import cherrypy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.server
 | 
					import nilmdb.server
 | 
				
			||||||
import argparse
 | 
					
 | 
				
			||||||
import os
 | 
					 | 
				
			||||||
import socket
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def main():
 | 
					def main():
 | 
				
			||||||
    """Main entry point for the 'nilmdb-server' command line script"""
 | 
					    """Main entry point for the 'nilmdb-server' command line script"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    parser = argparse.ArgumentParser(
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
        description = 'Run the NilmDB server',
 | 
					        description='Run the NilmDB server',
 | 
				
			||||||
        formatter_class = argparse.ArgumentDefaultsHelpFormatter)
 | 
					        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    parser.add_argument("-V", "--version", action="version",
 | 
					    parser.add_argument("-v", "--version", action="version",
 | 
				
			||||||
                        version = nilmdb.__version__)
 | 
					                        version=nilmdb.__version__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = parser.add_argument_group("Standard options")
 | 
					    group = parser.add_argument_group("Standard options")
 | 
				
			||||||
    group.add_argument('-a', '--address',
 | 
					    group.add_argument('-a', '--address',
 | 
				
			||||||
                       help = 'Only listen on the given address',
 | 
					                       help='Only listen on the given address',
 | 
				
			||||||
                       default = '0.0.0.0')
 | 
					                       default='0.0.0.0')
 | 
				
			||||||
    group.add_argument('-p', '--port', help = 'Listen on the given port',
 | 
					    group.add_argument('-p', '--port', help='Listen on the given port',
 | 
				
			||||||
                       type = int, default = 12380)
 | 
					                       type=int, default=12380)
 | 
				
			||||||
    group.add_argument('-d', '--database', help = 'Database directory',
 | 
					    group.add_argument('-d', '--database', help='Database directory',
 | 
				
			||||||
                       default = os.path.join(os.getcwd(), "db"))
 | 
					                       default="./db")
 | 
				
			||||||
    group.add_argument('-q', '--quiet', help = 'Silence output',
 | 
					    group.add_argument('-q', '--quiet', help='Silence output',
 | 
				
			||||||
                       action = 'store_true')
 | 
					                       action='store_true')
 | 
				
			||||||
    group.add_argument('-t', '--traceback',
 | 
					    group.add_argument('-t', '--traceback',
 | 
				
			||||||
                       help = 'Provide tracebacks in client errors',
 | 
					                       help='Provide tracebacks in client errors',
 | 
				
			||||||
                       action = 'store_true', default = False)
 | 
					                       action='store_true', default=False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    group = parser.add_argument_group("Debug options")
 | 
					    group = parser.add_argument_group("Debug options")
 | 
				
			||||||
    group.add_argument('-y', '--yappi', help = 'Run under yappi profiler and '
 | 
					    group.add_argument('-y', '--yappi', help='Run under yappi profiler and '
 | 
				
			||||||
                       'invoke interactive shell afterwards',
 | 
					                       'invoke interactive shell afterwards',
 | 
				
			||||||
                       action = 'store_true')
 | 
					                       action='store_true')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    args = parser.parse_args()
 | 
					    args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -41,47 +46,54 @@ def main():
 | 
				
			|||||||
    db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
 | 
					    db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(args.database)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Configure the server
 | 
					    # Configure the server
 | 
				
			||||||
    if args.quiet:
 | 
					    if not args.quiet:
 | 
				
			||||||
        embedded = True
 | 
					        cherrypy._cpconfig.environments['embedded']['log.screen'] = True
 | 
				
			||||||
    else:
 | 
					
 | 
				
			||||||
        embedded = False
 | 
					 | 
				
			||||||
    server = nilmdb.server.Server(db,
 | 
					    server = nilmdb.server.Server(db,
 | 
				
			||||||
                                  host = args.address,
 | 
					                                  host=args.address,
 | 
				
			||||||
                                  port = args.port,
 | 
					                                  port=args.port,
 | 
				
			||||||
                                  embedded = embedded,
 | 
					                                  force_traceback=args.traceback)
 | 
				
			||||||
                                  force_traceback = args.traceback)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Print info
 | 
					    # Print info
 | 
				
			||||||
    if not args.quiet:
 | 
					    if not args.quiet:
 | 
				
			||||||
        print "Version: %s" % nilmdb.__version__
 | 
					        print("Version: %s" % nilmdb.__version__)
 | 
				
			||||||
        print "Database: %s" % (os.path.realpath(args.database))
 | 
					        print("Database: %s" % (os.path.realpath(args.database)))
 | 
				
			||||||
        if args.address == '0.0.0.0' or args.address == '::':
 | 
					        if args.address == '0.0.0.0' or args.address == '::':
 | 
				
			||||||
            host = socket.getfqdn()
 | 
					            host = socket.getfqdn()
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            host = args.address
 | 
					            host = args.address
 | 
				
			||||||
        print "Server URL: http://%s:%d/" % ( host, args.port)
 | 
					        print("Server URL: http://%s:%d/" % (host, args.port))
 | 
				
			||||||
        print "----"
 | 
					        print("----")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Run it
 | 
					    # Run it
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
        if args.yappi:
 | 
					        if args.yappi:
 | 
				
			||||||
        print "Running in yappi"
 | 
					            print("Running in yappi")
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                import yappi
 | 
					                import yappi
 | 
				
			||||||
                yappi.start()
 | 
					                yappi.start()
 | 
				
			||||||
            server.start(blocking = True)
 | 
					                server.start(blocking=True)
 | 
				
			||||||
            finally:
 | 
					            finally:
 | 
				
			||||||
                yappi.stop()
 | 
					                yappi.stop()
 | 
				
			||||||
            yappi.print_stats(sort_type = yappi.SORTTYPE_TTOT, limit = 50)
 | 
					                stats = yappi.get_func_stats()
 | 
				
			||||||
 | 
					                stats.sort("ttot")
 | 
				
			||||||
 | 
					                stats.print_all()
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
                    from IPython import embed
 | 
					                    from IPython import embed
 | 
				
			||||||
            embed(header = "Use the yappi object to explore further, "
 | 
					                    embed(header="Use the `yappi` or `stats` object to "
 | 
				
			||||||
                  "quit to exit")
 | 
					                          "explore further, `quit` to exit")
 | 
				
			||||||
 | 
					                except ModuleNotFoundError:
 | 
				
			||||||
 | 
					                    print("\nInstall ipython to explore further")
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
        server.start(blocking = True)
 | 
					            server.start(blocking=True)
 | 
				
			||||||
 | 
					    except nilmdb.server.serverutil.CherryPyExit:
 | 
				
			||||||
    # Clean up
 | 
					        print("Exiting due to CherryPy error", file=sys.stderr)
 | 
				
			||||||
 | 
					        raise
 | 
				
			||||||
 | 
					    finally:
 | 
				
			||||||
        if not args.quiet:
 | 
					        if not args.quiet:
 | 
				
			||||||
        print "Closing database"
 | 
					            print("Closing database")
 | 
				
			||||||
            db.close()
 | 
					            db.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if __name__ == "__main__":
 | 
					if __name__ == "__main__":
 | 
				
			||||||
    main()
 | 
					    main()
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,10 +1,12 @@
 | 
				
			|||||||
#!/usr/bin/python
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.cmdline
 | 
					import nilmdb.cmdline
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def main():
 | 
					def main():
 | 
				
			||||||
    """Main entry point for the 'nilmtool' command line script"""
 | 
					    """Main entry point for the 'nilmtool' command line script"""
 | 
				
			||||||
    nilmdb.cmdline.Cmdline().run()
 | 
					    nilmdb.cmdline.Cmdline().run()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if __name__ == "__main__":
 | 
					if __name__ == "__main__":
 | 
				
			||||||
    main()
 | 
					    main()
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,21 +1,9 @@
 | 
				
			|||||||
"""nilmdb.server"""
 | 
					"""nilmdb.server"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from __future__ import absolute_import
 | 
					# Set up pyximport to automatically rebuild Cython modules if needed.
 | 
				
			||||||
 | 
					import pyximport
 | 
				
			||||||
# Try to set up pyximport to automatically rebuild Cython modules.  If
 | 
					pyximport.install(inplace=True, build_in_temp=False)
 | 
				
			||||||
# this doesn't work, it's OK, as long as the modules were built externally.
 | 
					 | 
				
			||||||
# (e.g. python setup.py build_ext --inplace)
 | 
					 | 
				
			||||||
try: # pragma: no cover
 | 
					 | 
				
			||||||
    import Cython
 | 
					 | 
				
			||||||
    import distutils.version
 | 
					 | 
				
			||||||
    if (distutils.version.LooseVersion(Cython.__version__) <
 | 
					 | 
				
			||||||
        distutils.version.LooseVersion("0.17")): # pragma: no cover
 | 
					 | 
				
			||||||
        raise ImportError("Cython version too old")
 | 
					 | 
				
			||||||
    import pyximport
 | 
					 | 
				
			||||||
    pyximport.install(inplace = True, build_in_temp = False)
 | 
					 | 
				
			||||||
except (ImportError, TypeError): # pragma: no cover
 | 
					 | 
				
			||||||
    pass
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
from nilmdb.server.nilmdb import NilmDB
 | 
					from nilmdb.server.nilmdb import NilmDB
 | 
				
			||||||
from nilmdb.server.server import Server
 | 
					from nilmdb.server.server import Server, wsgi_application
 | 
				
			||||||
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
 | 
					from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,81 +1,110 @@
 | 
				
			|||||||
# Fixed record size bulk data storage
 | 
					# Fixed record size bulk data storage
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Need absolute_import so that "import nilmdb" won't pull in
 | 
					 | 
				
			||||||
# nilmdb.py, but will pull the parent nilmdb module instead.
 | 
					 | 
				
			||||||
from __future__ import absolute_import
 | 
					 | 
				
			||||||
from __future__ import division
 | 
					 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					 | 
				
			||||||
from nilmdb.utils.time import timestamp_to_string as timestamp_to_string
 | 
					 | 
				
			||||||
import nilmdb.utils
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
import cPickle as pickle
 | 
					 | 
				
			||||||
import re
 | 
					import re
 | 
				
			||||||
import sys
 | 
					import sys
 | 
				
			||||||
 | 
					import pickle
 | 
				
			||||||
import tempfile
 | 
					import tempfile
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import sprintf
 | 
				
			||||||
 | 
					from nilmdb.utils.time import timestamp_to_string
 | 
				
			||||||
 | 
					import nilmdb.utils
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import nilmdb.utils.lock
 | 
				
			||||||
from . import rocket
 | 
					from . import rocket
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Up to 256 open file descriptors at any given time.
 | 
					# Up to 256 open file descriptors at any given time.
 | 
				
			||||||
# These variables are global so they can be used in the decorator arguments.
 | 
					# These variables are global so they can be used in the decorator arguments.
 | 
				
			||||||
table_cache_size = 16
 | 
					table_cache_size = 32
 | 
				
			||||||
fd_cache_size = 16
 | 
					fd_cache_size = 8
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@nilmdb.utils.must_close(wrap_verify = False)
 | 
					
 | 
				
			||||||
class BulkData(object):
 | 
					@nilmdb.utils.must_close(wrap_verify=False)
 | 
				
			||||||
 | 
					class BulkData():
 | 
				
			||||||
    def __init__(self, basepath, **kwargs):
 | 
					    def __init__(self, basepath, **kwargs):
 | 
				
			||||||
 | 
					        if isinstance(basepath, str):
 | 
				
			||||||
 | 
					            self.basepath = self._encode_filename(basepath)
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
            self.basepath = basepath
 | 
					            self.basepath = basepath
 | 
				
			||||||
        self.root = os.path.join(self.basepath, "data")
 | 
					        self.root = os.path.join(self.basepath, b"data")
 | 
				
			||||||
 | 
					        self.lock = self.root + b".lock"
 | 
				
			||||||
 | 
					        self.lockfile = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Tuneables
 | 
					        # Tuneables
 | 
				
			||||||
        if "file_size" in kwargs:
 | 
					        if "file_size" in kwargs and kwargs["file_size"] is not None:
 | 
				
			||||||
            self.file_size = kwargs["file_size"]
 | 
					            self.file_size = kwargs["file_size"]
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            # Default to approximately 128 MiB per file
 | 
					            # Default to approximately 128 MiB per file
 | 
				
			||||||
            self.file_size = 128 * 1024 * 1024
 | 
					            self.file_size = 128 * 1024 * 1024
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if "files_per_dir" in kwargs:
 | 
					        if "files_per_dir" in kwargs and kwargs["files_per_dir"] is not None:
 | 
				
			||||||
            self.files_per_dir = kwargs["files_per_dir"]
 | 
					            self.files_per_dir = kwargs["files_per_dir"]
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            # 32768 files per dir should work even on FAT32
 | 
					            # 32768 files per dir should work even on FAT32
 | 
				
			||||||
            self.files_per_dir = 32768
 | 
					            self.files_per_dir = 32768
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if "initial_nrows" in kwargs and kwargs["initial_nrows"] is not None:
 | 
				
			||||||
 | 
					            self.initial_nrows = kwargs["initial_nrows"]
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            # First row is 0
 | 
				
			||||||
 | 
					            self.initial_nrows = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Make root path
 | 
					        # Make root path
 | 
				
			||||||
        if not os.path.isdir(self.root):
 | 
					        if not os.path.isdir(self.root):
 | 
				
			||||||
            os.mkdir(self.root)
 | 
					            os.mkdir(self.root)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Create the lock
 | 
				
			||||||
 | 
					        self.lockfile = open(self.lock, "w")
 | 
				
			||||||
 | 
					        if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
 | 
				
			||||||
 | 
					            raise IOError('database at "' +
 | 
				
			||||||
 | 
					                          self._decode_filename(self.basepath) +
 | 
				
			||||||
 | 
					                          '" is already locked by another process')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def close(self):
 | 
					    def close(self):
 | 
				
			||||||
        self.getnode.cache_remove_all()
 | 
					        self.getnode.cache_remove_all()
 | 
				
			||||||
 | 
					        if self.lockfile:
 | 
				
			||||||
 | 
					            nilmdb.utils.lock.exclusive_unlock(self.lockfile)
 | 
				
			||||||
 | 
					            self.lockfile.close()
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                os.unlink(self.lock)
 | 
				
			||||||
 | 
					            except OSError:
 | 
				
			||||||
 | 
					                pass
 | 
				
			||||||
 | 
					            self.lockfile = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _encode_filename(self, path):
 | 
					    def _encode_filename(self, path):
 | 
				
			||||||
        # Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(),
 | 
					        # Translate unicode strings to raw bytes, if needed.  We
 | 
				
			||||||
        # because we want to be able to represent all code points and the user
 | 
					        # always manipulate paths internally as bytes.
 | 
				
			||||||
        # will never be directly exposed to filenames.  We can then do path
 | 
					 | 
				
			||||||
        # manipulations on the UTF-8 directly.
 | 
					 | 
				
			||||||
        if isinstance(path, unicode):
 | 
					 | 
				
			||||||
        return path.encode('utf-8')
 | 
					        return path.encode('utf-8')
 | 
				
			||||||
        return path
 | 
					
 | 
				
			||||||
 | 
					    def _decode_filename(self, path):
 | 
				
			||||||
 | 
					        # Translate raw bytes to unicode strings, escaping if needed
 | 
				
			||||||
 | 
					        return path.decode('utf-8', errors='backslashreplace')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _create_check_ospath(self, ospath):
 | 
					    def _create_check_ospath(self, ospath):
 | 
				
			||||||
        if ospath[-1] == '/':
 | 
					        if ospath[-1:] == b'/':
 | 
				
			||||||
            raise ValueError("invalid path; should not end with a /")
 | 
					            raise ValueError("invalid path; should not end with a /")
 | 
				
			||||||
        if Table.exists(ospath):
 | 
					        if Table.exists(ospath):
 | 
				
			||||||
            raise ValueError("stream already exists at this path")
 | 
					            raise ValueError("stream already exists at this path")
 | 
				
			||||||
        if os.path.isdir(ospath):
 | 
					        if os.path.isdir(ospath):
 | 
				
			||||||
            raise ValueError("subdirs of this path already exist")
 | 
					            # Look for any files in subdirectories.  Fully empty subdirectories
 | 
				
			||||||
 | 
					            # are OK; they might be there during a rename
 | 
				
			||||||
 | 
					            for (root, dirs, files) in os.walk(ospath):
 | 
				
			||||||
 | 
					                if files:
 | 
				
			||||||
 | 
					                    raise ValueError(
 | 
				
			||||||
 | 
					                        "non-empty subdirs of this path already exist")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _create_parents(self, unicodepath):
 | 
					    def _create_parents(self, unicodepath):
 | 
				
			||||||
        """Verify the path name, and create parent directories if they
 | 
					        """Verify the path name, and create parent directories if they
 | 
				
			||||||
        don't exist.  Returns a list of elements that got created."""
 | 
					        don't exist.  Returns a list of elements that got created."""
 | 
				
			||||||
        path = self._encode_filename(unicodepath)
 | 
					        path = self._encode_filename(unicodepath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if path[0] != '/':
 | 
					        if path[0:1] != b'/':
 | 
				
			||||||
            raise ValueError("paths must start with /")
 | 
					            raise ValueError("paths must start with / ")
 | 
				
			||||||
        [ group, node ] = path.rsplit("/", 1)
 | 
					        [group, node] = path.rsplit(b"/", 1)
 | 
				
			||||||
        if group == '':
 | 
					        if group == b'':
 | 
				
			||||||
            raise ValueError("invalid path; path must contain at least one "
 | 
					            raise ValueError("invalid path; path must contain at least one "
 | 
				
			||||||
                             "folder")
 | 
					                             "folder")
 | 
				
			||||||
        if node == '':
 | 
					        if node == b'':
 | 
				
			||||||
            raise ValueError("invalid path; should not end with a /")
 | 
					            raise ValueError("invalid path; should not end with a /")
 | 
				
			||||||
        if not Table.valid_path(path):
 | 
					        if not Table.valid_path(path):
 | 
				
			||||||
            raise ValueError("path name is invalid or contains reserved words")
 | 
					            raise ValueError("path name is invalid or contains reserved words")
 | 
				
			||||||
@@ -86,7 +115,7 @@ class BulkData(object):
 | 
				
			|||||||
        # os.path.join)
 | 
					        # os.path.join)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Make directories leading up to this one
 | 
					        # Make directories leading up to this one
 | 
				
			||||||
        elements = path.lstrip('/').split('/')
 | 
					        elements = path.lstrip(b'/').split(b'/')
 | 
				
			||||||
        made_dirs = []
 | 
					        made_dirs = []
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            # Make parent elements
 | 
					            # Make parent elements
 | 
				
			||||||
@@ -97,15 +126,11 @@ class BulkData(object):
 | 
				
			|||||||
                if not os.path.isdir(ospath):
 | 
					                if not os.path.isdir(ospath):
 | 
				
			||||||
                    os.mkdir(ospath)
 | 
					                    os.mkdir(ospath)
 | 
				
			||||||
                    made_dirs.append(ospath)
 | 
					                    made_dirs.append(ospath)
 | 
				
			||||||
        except Exception as e:
 | 
					        except Exception:
 | 
				
			||||||
            # Try to remove paths that we created; ignore errors
 | 
					            # Remove paths that we created
 | 
				
			||||||
            exc_info = sys.exc_info()
 | 
					            for ospath in reversed(made_dirs):
 | 
				
			||||||
            for ospath in reversed(made_dirs): # pragma: no cover (hard to hit)
 | 
					 | 
				
			||||||
                try:
 | 
					 | 
				
			||||||
                os.rmdir(ospath)
 | 
					                os.rmdir(ospath)
 | 
				
			||||||
                except OSError:
 | 
					            raise
 | 
				
			||||||
                    pass
 | 
					 | 
				
			||||||
            raise exc_info[1], None, exc_info[2]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return elements
 | 
					        return elements
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -134,13 +159,13 @@ class BulkData(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
            # Open and cache it
 | 
					            # Open and cache it
 | 
				
			||||||
            self.getnode(unicodepath)
 | 
					            self.getnode(unicodepath)
 | 
				
			||||||
        except:
 | 
					        except Exception:
 | 
				
			||||||
            exc_info = sys.exc_info()
 | 
					            exc_info = sys.exc_info()
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                os.rmdir(ospath)
 | 
					                os.rmdir(ospath)
 | 
				
			||||||
            except OSError:
 | 
					            except OSError:
 | 
				
			||||||
                pass
 | 
					                pass
 | 
				
			||||||
            raise exc_info[1], None, exc_info[2]
 | 
					            raise exc_info[1].with_traceback(exc_info[2])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Success
 | 
					        # Success
 | 
				
			||||||
        return
 | 
					        return
 | 
				
			||||||
@@ -148,8 +173,8 @@ class BulkData(object):
 | 
				
			|||||||
    def _remove_leaves(self, unicodepath):
 | 
					    def _remove_leaves(self, unicodepath):
 | 
				
			||||||
        """Remove empty directories starting at the leaves of unicodepath"""
 | 
					        """Remove empty directories starting at the leaves of unicodepath"""
 | 
				
			||||||
        path = self._encode_filename(unicodepath)
 | 
					        path = self._encode_filename(unicodepath)
 | 
				
			||||||
        elements = path.lstrip('/').split('/')
 | 
					        elements = path.lstrip(b'/').split(b'/')
 | 
				
			||||||
        for i in reversed(range(len(elements))):
 | 
					        for i in reversed(list(range(len(elements)))):
 | 
				
			||||||
            ospath = os.path.join(self.root, *elements[0:i+1])
 | 
					            ospath = os.path.join(self.root, *elements[0:i+1])
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                os.rmdir(ospath)
 | 
					                os.rmdir(ospath)
 | 
				
			||||||
@@ -163,22 +188,27 @@ class BulkData(object):
 | 
				
			|||||||
        newpath = self._encode_filename(newunicodepath)
 | 
					        newpath = self._encode_filename(newunicodepath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Get OS paths
 | 
					        # Get OS paths
 | 
				
			||||||
        oldelements = oldpath.lstrip('/').split('/')
 | 
					        oldelements = oldpath.lstrip(b'/').split(b'/')
 | 
				
			||||||
        oldospath = os.path.join(self.root, *oldelements)
 | 
					        oldospath = os.path.join(self.root, *oldelements)
 | 
				
			||||||
        newelements = newpath.lstrip('/').split('/')
 | 
					        newelements = newpath.lstrip(b'/').split(b'/')
 | 
				
			||||||
        newospath = os.path.join(self.root, *newelements)
 | 
					        newospath = os.path.join(self.root, *newelements)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Basic checks
 | 
					        # Basic checks
 | 
				
			||||||
        if oldospath == newospath:
 | 
					        if oldospath == newospath:
 | 
				
			||||||
            raise ValueError("old and new paths are the same")
 | 
					            raise ValueError("old and new paths are the same")
 | 
				
			||||||
        self._create_check_ospath(newospath)
 | 
					
 | 
				
			||||||
 | 
					        # Remove Table object at old path from cache
 | 
				
			||||||
 | 
					        self.getnode.cache_remove(self, oldunicodepath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Move the table to a temporary location
 | 
					        # Move the table to a temporary location
 | 
				
			||||||
        tmpdir = tempfile.mkdtemp(prefix = "rename-", dir = self.root)
 | 
					        tmpdir = tempfile.mkdtemp(prefix=b"rename-", dir=self.root)
 | 
				
			||||||
        tmppath = os.path.join(tmpdir, "table")
 | 
					        tmppath = os.path.join(tmpdir, b"table")
 | 
				
			||||||
        os.rename(oldospath, tmppath)
 | 
					        os.rename(oldospath, tmppath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
 | 
					            # Check destination path
 | 
				
			||||||
 | 
					            self._create_check_ospath(newospath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Create parent dirs for new location
 | 
					            # Create parent dirs for new location
 | 
				
			||||||
            self._create_parents(newunicodepath)
 | 
					            self._create_parents(newunicodepath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -200,7 +230,7 @@ class BulkData(object):
 | 
				
			|||||||
        path = self._encode_filename(unicodepath)
 | 
					        path = self._encode_filename(unicodepath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Get OS path
 | 
					        # Get OS path
 | 
				
			||||||
        elements = path.lstrip('/').split('/')
 | 
					        elements = path.lstrip(b'/').split(b'/')
 | 
				
			||||||
        ospath = os.path.join(self.root, *elements)
 | 
					        ospath = os.path.join(self.root, *elements)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Remove Table object from cache
 | 
					        # Remove Table object from cache
 | 
				
			||||||
@@ -209,7 +239,7 @@ class BulkData(object):
 | 
				
			|||||||
        # Remove the contents of the target directory
 | 
					        # Remove the contents of the target directory
 | 
				
			||||||
        if not Table.exists(ospath):
 | 
					        if not Table.exists(ospath):
 | 
				
			||||||
            raise ValueError("nothing at that path")
 | 
					            raise ValueError("nothing at that path")
 | 
				
			||||||
        for (root, dirs, files) in os.walk(ospath, topdown = False):
 | 
					        for (root, dirs, files) in os.walk(ospath, topdown=False):
 | 
				
			||||||
            for name in files:
 | 
					            for name in files:
 | 
				
			||||||
                os.remove(os.path.join(root, name))
 | 
					                os.remove(os.path.join(root, name))
 | 
				
			||||||
            for name in dirs:
 | 
					            for name in dirs:
 | 
				
			||||||
@@ -219,18 +249,19 @@ class BulkData(object):
 | 
				
			|||||||
        self._remove_leaves(unicodepath)
 | 
					        self._remove_leaves(unicodepath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Cache open tables
 | 
					    # Cache open tables
 | 
				
			||||||
    @nilmdb.utils.lru_cache(size = table_cache_size,
 | 
					    @nilmdb.utils.lru_cache(size=table_cache_size,
 | 
				
			||||||
                            onremove = lambda x: x.close())
 | 
					                            onremove=lambda x: x.close())
 | 
				
			||||||
    def getnode(self, unicodepath):
 | 
					    def getnode(self, unicodepath):
 | 
				
			||||||
        """Return a Table object corresponding to the given database
 | 
					        """Return a Table object corresponding to the given database
 | 
				
			||||||
        path, which must exist."""
 | 
					        path, which must exist."""
 | 
				
			||||||
        path = self._encode_filename(unicodepath)
 | 
					        path = self._encode_filename(unicodepath)
 | 
				
			||||||
        elements = path.lstrip('/').split('/')
 | 
					        elements = path.lstrip(b'/').split(b'/')
 | 
				
			||||||
        ospath = os.path.join(self.root, *elements)
 | 
					        ospath = os.path.join(self.root, *elements)
 | 
				
			||||||
        return Table(ospath)
 | 
					        return Table(ospath, self.initial_nrows)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@nilmdb.utils.must_close(wrap_verify = False)
 | 
					
 | 
				
			||||||
class Table(object):
 | 
					@nilmdb.utils.must_close(wrap_verify=False)
 | 
				
			||||||
 | 
					class Table():
 | 
				
			||||||
    """Tools to help access a single table (data at a specific OS path)."""
 | 
					    """Tools to help access a single table (data at a specific OS path)."""
 | 
				
			||||||
    # See design.md for design details
 | 
					    # See design.md for design details
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -238,12 +269,12 @@ class Table(object):
 | 
				
			|||||||
    @classmethod
 | 
					    @classmethod
 | 
				
			||||||
    def valid_path(cls, root):
 | 
					    def valid_path(cls, root):
 | 
				
			||||||
        """Return True if a root path is a valid name"""
 | 
					        """Return True if a root path is a valid name"""
 | 
				
			||||||
        return "_format" not in root.split("/")
 | 
					        return b"_format" not in root.split(b"/")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @classmethod
 | 
					    @classmethod
 | 
				
			||||||
    def exists(cls, root):
 | 
					    def exists(cls, root):
 | 
				
			||||||
        """Return True if a table appears to exist at this OS path"""
 | 
					        """Return True if a table appears to exist at this OS path"""
 | 
				
			||||||
        return os.path.isfile(os.path.join(root, "_format"))
 | 
					        return os.path.isfile(os.path.join(root, b"_format"))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @classmethod
 | 
					    @classmethod
 | 
				
			||||||
    def create(cls, root, layout, file_size, files_per_dir):
 | 
					    def create(cls, root, layout, file_size, files_per_dir):
 | 
				
			||||||
@@ -256,23 +287,26 @@ class Table(object):
 | 
				
			|||||||
        rows_per_file = max(file_size // rkt.binary_size, 1)
 | 
					        rows_per_file = max(file_size // rkt.binary_size, 1)
 | 
				
			||||||
        rkt.close()
 | 
					        rkt.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        fmt = { "rows_per_file": rows_per_file,
 | 
					        fmt = {
 | 
				
			||||||
 | 
					            "rows_per_file": rows_per_file,
 | 
				
			||||||
            "files_per_dir": files_per_dir,
 | 
					            "files_per_dir": files_per_dir,
 | 
				
			||||||
            "layout": layout,
 | 
					            "layout": layout,
 | 
				
			||||||
                "version": 3 }
 | 
					            "version": 3
 | 
				
			||||||
        with open(os.path.join(root, "_format"), "wb") as f:
 | 
					        }
 | 
				
			||||||
            pickle.dump(fmt, f, 2)
 | 
					        nilmdb.utils.atomic.replace_file(
 | 
				
			||||||
 | 
					            os.path.join(root, b"_format"), pickle.dumps(fmt, 2))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Normal methods
 | 
					    # Normal methods
 | 
				
			||||||
    def __init__(self, root):
 | 
					    def __init__(self, root, initial_nrows=0):
 | 
				
			||||||
        """'root' is the full OS path to the directory of this table"""
 | 
					        """'root' is the full OS path to the directory of this table"""
 | 
				
			||||||
        self.root = root
 | 
					        self.root = root
 | 
				
			||||||
 | 
					        self.initial_nrows = initial_nrows
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Load the format
 | 
					        # Load the format
 | 
				
			||||||
        with open(os.path.join(self.root, "_format"), "rb") as f:
 | 
					        with open(os.path.join(self.root, b"_format"), "rb") as f:
 | 
				
			||||||
            fmt = pickle.load(f)
 | 
					            fmt = pickle.load(f)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if fmt["version"] != 3: # pragma: no cover
 | 
					        if fmt["version"] != 3:
 | 
				
			||||||
            # Old versions used floating point timestamps, which aren't
 | 
					            # Old versions used floating point timestamps, which aren't
 | 
				
			||||||
            # valid anymore.
 | 
					            # valid anymore.
 | 
				
			||||||
            raise NotImplementedError("old version " + str(fmt["version"]) +
 | 
					            raise NotImplementedError("old version " + str(fmt["version"]) +
 | 
				
			||||||
@@ -302,31 +336,38 @@ class Table(object):
 | 
				
			|||||||
        # greater than the row number of any piece of data that
 | 
					        # greater than the row number of any piece of data that
 | 
				
			||||||
        # currently exists, not necessarily all data that _ever_
 | 
					        # currently exists, not necessarily all data that _ever_
 | 
				
			||||||
        # existed.
 | 
					        # existed.
 | 
				
			||||||
        regex = re.compile("^[0-9a-f]{4,}$")
 | 
					        regex = re.compile(b"^[0-9a-f]{4,}$")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Find the last directory.  We sort and loop through all of them,
 | 
					        # Find the last directory.  We sort and loop through all of them,
 | 
				
			||||||
        # starting with the numerically greatest, because the dirs could be
 | 
					        # starting with the numerically greatest, because the dirs could be
 | 
				
			||||||
        # empty if something was deleted.
 | 
					        # empty if something was deleted but the directory was unexpectedly
 | 
				
			||||||
 | 
					        # not deleted.
 | 
				
			||||||
        subdirs = sorted(filter(regex.search, os.listdir(self.root)),
 | 
					        subdirs = sorted(filter(regex.search, os.listdir(self.root)),
 | 
				
			||||||
                         key = lambda x: int(x, 16), reverse = True)
 | 
					                         key=lambda x: int(x, 16), reverse=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        for subdir in subdirs:
 | 
					        for subdir in subdirs:
 | 
				
			||||||
            # Now find the last file in that dir
 | 
					            # Now find the last file in that dir
 | 
				
			||||||
            path = os.path.join(self.root, subdir)
 | 
					            path = os.path.join(self.root, subdir)
 | 
				
			||||||
            files = filter(regex.search, os.listdir(path))
 | 
					            files = list(filter(regex.search, os.listdir(path)))
 | 
				
			||||||
            if not files: # pragma: no cover (shouldn't occur)
 | 
					            if not files:
 | 
				
			||||||
                # Empty dir: try the next one
 | 
					                # Empty dir: try the next one
 | 
				
			||||||
                continue
 | 
					                continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Find the numerical max
 | 
					            # Find the numerical max
 | 
				
			||||||
            filename = max(files, key = lambda x: int(x, 16))
 | 
					            filename = max(files, key=lambda x: int(x, 16))
 | 
				
			||||||
            offset = os.path.getsize(os.path.join(self.root, subdir, filename))
 | 
					            offset = os.path.getsize(os.path.join(self.root, subdir, filename))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Convert to row number
 | 
					            # Convert to row number
 | 
				
			||||||
            return self._row_from_offset(subdir, filename, offset)
 | 
					            return self._row_from_offset(subdir, filename, offset)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # No files, so no data
 | 
					        # No files, so no data.  We typically start at row 0 in this
 | 
				
			||||||
        return 0
 | 
					        # case, although initial_nrows is specified during some tests
 | 
				
			||||||
 | 
					        # to exercise other parts of the code better.  Since we have
 | 
				
			||||||
 | 
					        # no files yet, round initial_nrows up so it points to a row
 | 
				
			||||||
 | 
					        # that would begin a new file.
 | 
				
			||||||
 | 
					        nrows = ((self.initial_nrows + (self.rows_per_file - 1)) //
 | 
				
			||||||
 | 
					                 self.rows_per_file) * self.rows_per_file
 | 
				
			||||||
 | 
					        return nrows
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _offset_from_row(self, row):
 | 
					    def _offset_from_row(self, row):
 | 
				
			||||||
        """Return a (subdir, filename, offset, count) tuple:
 | 
					        """Return a (subdir, filename, offset, count) tuple:
 | 
				
			||||||
@@ -339,8 +380,8 @@ class Table(object):
 | 
				
			|||||||
        filenum = row // self.rows_per_file
 | 
					        filenum = row // self.rows_per_file
 | 
				
			||||||
        # It's OK if these format specifiers are too short; the filenames
 | 
					        # It's OK if these format specifiers are too short; the filenames
 | 
				
			||||||
        # will just get longer but will still sort correctly.
 | 
					        # will just get longer but will still sort correctly.
 | 
				
			||||||
        dirname = sprintf("%04x", filenum // self.files_per_dir)
 | 
					        dirname = sprintf(b"%04x", filenum // self.files_per_dir)
 | 
				
			||||||
        filename = sprintf("%04x", filenum % self.files_per_dir)
 | 
					        filename = sprintf(b"%04x", filenum % self.files_per_dir)
 | 
				
			||||||
        offset = (row % self.rows_per_file) * self.row_size
 | 
					        offset = (row % self.rows_per_file) * self.row_size
 | 
				
			||||||
        count = self.rows_per_file - (row % self.rows_per_file)
 | 
					        count = self.rows_per_file - (row % self.rows_per_file)
 | 
				
			||||||
        return (dirname, filename, offset, count)
 | 
					        return (dirname, filename, offset, count)
 | 
				
			||||||
@@ -348,14 +389,14 @@ class Table(object):
 | 
				
			|||||||
    def _row_from_offset(self, subdir, filename, offset):
 | 
					    def _row_from_offset(self, subdir, filename, offset):
 | 
				
			||||||
        """Return the row number that corresponds to the given
 | 
					        """Return the row number that corresponds to the given
 | 
				
			||||||
        'subdir/filename' and byte-offset within that file."""
 | 
					        'subdir/filename' and byte-offset within that file."""
 | 
				
			||||||
        if (offset % self.row_size) != 0: # pragma: no cover
 | 
					        if (offset % self.row_size) != 0:
 | 
				
			||||||
            # this shouldn't occur, unless there is some corruption somewhere
 | 
					            # this shouldn't occur, unless there is some corruption somewhere
 | 
				
			||||||
            raise ValueError("file offset is not a multiple of data size")
 | 
					            raise ValueError("file offset is not a multiple of data size")
 | 
				
			||||||
        filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
 | 
					        filenum = int(subdir, 16) * self.files_per_dir + int(filename, 16)
 | 
				
			||||||
        row = (filenum * self.rows_per_file) + (offset // self.row_size)
 | 
					        row = (filenum * self.rows_per_file) + (offset // self.row_size)
 | 
				
			||||||
        return row
 | 
					        return row
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _remove_or_truncate_file(self, subdir, filename, offset = 0):
 | 
					    def _remove_or_truncate_file(self, subdir, filename, offset=0):
 | 
				
			||||||
        """Remove the given file, and remove the subdirectory too
 | 
					        """Remove the given file, and remove the subdirectory too
 | 
				
			||||||
        if it's empty.  If offset is nonzero, truncate the file
 | 
					        if it's empty.  If offset is nonzero, truncate the file
 | 
				
			||||||
        to that size instead."""
 | 
					        to that size instead."""
 | 
				
			||||||
@@ -371,12 +412,12 @@ class Table(object):
 | 
				
			|||||||
            # Try deleting subdir, too
 | 
					            # Try deleting subdir, too
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                os.rmdir(os.path.join(self.root, subdir))
 | 
					                os.rmdir(os.path.join(self.root, subdir))
 | 
				
			||||||
            except:
 | 
					            except Exception:
 | 
				
			||||||
                pass
 | 
					                pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Cache open files
 | 
					    # Cache open files
 | 
				
			||||||
    @nilmdb.utils.lru_cache(size = fd_cache_size,
 | 
					    @nilmdb.utils.lru_cache(size=fd_cache_size,
 | 
				
			||||||
                            onremove = lambda f: f.close())
 | 
					                            onremove=lambda f: f.close())
 | 
				
			||||||
    def file_open(self, subdir, filename):
 | 
					    def file_open(self, subdir, filename):
 | 
				
			||||||
        """Open and map a given 'subdir/filename' (relative to self.root).
 | 
					        """Open and map a given 'subdir/filename' (relative to self.root).
 | 
				
			||||||
        Will be automatically closed when evicted from the cache."""
 | 
					        Will be automatically closed when evicted from the cache."""
 | 
				
			||||||
@@ -389,12 +430,18 @@ class Table(object):
 | 
				
			|||||||
        return rocket.Rocket(self.layout,
 | 
					        return rocket.Rocket(self.layout,
 | 
				
			||||||
                             os.path.join(self.root, subdir, filename))
 | 
					                             os.path.join(self.root, subdir, filename))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def append_string(self, data, start, end):
 | 
					    def append_data(self, data, start, end, binary=False):
 | 
				
			||||||
        """Parse the formatted string in 'data', according to the
 | 
					        """Parse the formatted string in 'data', according to the
 | 
				
			||||||
        current layout, and append it to the table.  If any timestamps
 | 
					        current layout, and append it to the table.  If any timestamps
 | 
				
			||||||
        are non-monotonic, or don't fall between 'start' and 'end',
 | 
					        are non-monotonic, or don't fall between 'start' and 'end',
 | 
				
			||||||
        a ValueError is raised.
 | 
					        a ValueError is raised.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Note that data is always of 'bytes' type.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'binary' is True, the data should be in raw binary format
 | 
				
			||||||
 | 
					        instead: little-endian, matching the current table's layout,
 | 
				
			||||||
 | 
					        including the int64 timestamp.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        If this function succeeds, it returns normally.  Otherwise,
 | 
					        If this function succeeds, it returns normally.  Otherwise,
 | 
				
			||||||
        the table is reverted back to its original state by truncating
 | 
					        the table is reverted back to its original state by truncating
 | 
				
			||||||
        or deleting files as necessary."""
 | 
					        or deleting files as necessary."""
 | 
				
			||||||
@@ -407,24 +454,33 @@ class Table(object):
 | 
				
			|||||||
            while data_offset < len(data):
 | 
					            while data_offset < len(data):
 | 
				
			||||||
                # See how many rows we can fit into the current file,
 | 
					                # See how many rows we can fit into the current file,
 | 
				
			||||||
                # and open it
 | 
					                # and open it
 | 
				
			||||||
                (subdir, fname, offset, count) = self._offset_from_row(tot_rows)
 | 
					                (subdir, fname, offs, count) = self._offset_from_row(tot_rows)
 | 
				
			||||||
                f = self.file_open(subdir, fname)
 | 
					                f = self.file_open(subdir, fname)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # Ask the rocket object to parse and append up to "count"
 | 
					                # Ask the rocket object to parse and append up to "count"
 | 
				
			||||||
                # rows of data, verifying things along the way.
 | 
					                # rows of data, verifying things along the way.
 | 
				
			||||||
                try:
 | 
					                try:
 | 
				
			||||||
 | 
					                    if binary:
 | 
				
			||||||
 | 
					                        appender = f.append_binary
 | 
				
			||||||
 | 
					                    else:
 | 
				
			||||||
 | 
					                        appender = f.append_string
 | 
				
			||||||
                    (added_rows, data_offset, last_timestamp, linenum
 | 
					                    (added_rows, data_offset, last_timestamp, linenum
 | 
				
			||||||
                     ) = f.append_string(count, data, data_offset, linenum,
 | 
					                     ) = appender(count, data, data_offset, linenum,
 | 
				
			||||||
                                  start, end, last_timestamp)
 | 
					                                  start, end, last_timestamp)
 | 
				
			||||||
                except rocket.ParseError as e:
 | 
					                except rocket.ParseError as e:
 | 
				
			||||||
                    (linenum, colnum, errtype, obj) = e.args
 | 
					                    (linenum, colnum, errtype, obj) = e.args
 | 
				
			||||||
 | 
					                    if binary:
 | 
				
			||||||
 | 
					                        where = "byte %d: " % (linenum)
 | 
				
			||||||
 | 
					                    else:
 | 
				
			||||||
                        where = "line %d, column %d: " % (linenum, colnum)
 | 
					                        where = "line %d, column %d: " % (linenum, colnum)
 | 
				
			||||||
                    # Extract out the error line, add column marker
 | 
					                    # Extract out the error line, add column marker
 | 
				
			||||||
                    try:
 | 
					                    try:
 | 
				
			||||||
 | 
					                        if binary:
 | 
				
			||||||
 | 
					                            raise IndexError
 | 
				
			||||||
                        bad = data.splitlines()[linenum-1]
 | 
					                        bad = data.splitlines()[linenum-1]
 | 
				
			||||||
                        badptr = ' ' * (colnum - 1) + '^'
 | 
					                        bad += b'\n' + b' ' * (colnum - 1) + b'^'
 | 
				
			||||||
                    except IndexError: # pragma: no cover
 | 
					                    except IndexError:
 | 
				
			||||||
                        bad = ""
 | 
					                        bad = b""
 | 
				
			||||||
                    if errtype == rocket.ERR_NON_MONOTONIC:
 | 
					                    if errtype == rocket.ERR_NON_MONOTONIC:
 | 
				
			||||||
                        err = "timestamp is not monotonically increasing"
 | 
					                        err = "timestamp is not monotonically increasing"
 | 
				
			||||||
                    elif errtype == rocket.ERR_OUT_OF_INTERVAL:
 | 
					                    elif errtype == rocket.ERR_OUT_OF_INTERVAL:
 | 
				
			||||||
@@ -438,16 +494,17 @@ class Table(object):
 | 
				
			|||||||
                                          timestamp_to_string(end))
 | 
					                                          timestamp_to_string(end))
 | 
				
			||||||
                    else:
 | 
					                    else:
 | 
				
			||||||
                        err = str(obj)
 | 
					                        err = str(obj)
 | 
				
			||||||
 | 
					                    bad_str = bad.decode('utf-8', errors='backslashreplace')
 | 
				
			||||||
                    raise ValueError("error parsing input data: " +
 | 
					                    raise ValueError("error parsing input data: " +
 | 
				
			||||||
                                     where + err + "\n" + bad + "\n" + badptr)
 | 
					                                     where + err + "\n" + bad_str)
 | 
				
			||||||
                tot_rows += added_rows
 | 
					                tot_rows += added_rows
 | 
				
			||||||
        except Exception:
 | 
					        except Exception:
 | 
				
			||||||
            # Some failure, so try to roll things back by truncating or
 | 
					            # Some failure, so try to roll things back by truncating or
 | 
				
			||||||
            # deleting files that we may have appended data to.
 | 
					            # deleting files that we may have appended data to.
 | 
				
			||||||
            cleanpos = self.nrows
 | 
					            cleanpos = self.nrows
 | 
				
			||||||
            while cleanpos <= tot_rows:
 | 
					            while cleanpos <= tot_rows:
 | 
				
			||||||
                (subdir, fname, offset, count) = self._offset_from_row(cleanpos)
 | 
					                (subdir, fname, offs, count) = self._offset_from_row(cleanpos)
 | 
				
			||||||
                self._remove_or_truncate_file(subdir, fname, offset)
 | 
					                self._remove_or_truncate_file(subdir, fname, offs)
 | 
				
			||||||
                cleanpos += count
 | 
					                cleanpos += count
 | 
				
			||||||
            # Re-raise original exception
 | 
					            # Re-raise original exception
 | 
				
			||||||
            raise
 | 
					            raise
 | 
				
			||||||
@@ -455,14 +512,11 @@ class Table(object):
 | 
				
			|||||||
            # Success, so update self.nrows accordingly
 | 
					            # Success, so update self.nrows accordingly
 | 
				
			||||||
            self.nrows = tot_rows
 | 
					            self.nrows = tot_rows
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_data(self, start, stop):
 | 
					    def get_data(self, start, stop, binary=False):
 | 
				
			||||||
        """Extract data corresponding to Python range [n:m],
 | 
					        """Extract data corresponding to Python range [n:m],
 | 
				
			||||||
        and returns a formatted string"""
 | 
					        and returns a formatted string"""
 | 
				
			||||||
        if (start is None or
 | 
					        if (start is None or stop is None or
 | 
				
			||||||
            stop is None or
 | 
					                start > stop or start < 0 or stop > self.nrows):
 | 
				
			||||||
            start > stop or
 | 
					 | 
				
			||||||
            start < 0 or
 | 
					 | 
				
			||||||
            stop > self.nrows):
 | 
					 | 
				
			||||||
            raise IndexError("Index out of range")
 | 
					            raise IndexError("Index out of range")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        ret = []
 | 
					        ret = []
 | 
				
			||||||
@@ -473,10 +527,13 @@ class Table(object):
 | 
				
			|||||||
            if count > remaining:
 | 
					            if count > remaining:
 | 
				
			||||||
                count = remaining
 | 
					                count = remaining
 | 
				
			||||||
            f = self.file_open(subdir, filename)
 | 
					            f = self.file_open(subdir, filename)
 | 
				
			||||||
 | 
					            if binary:
 | 
				
			||||||
 | 
					                ret.append(f.extract_binary(offset, count))
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
                ret.append(f.extract_string(offset, count))
 | 
					                ret.append(f.extract_string(offset, count))
 | 
				
			||||||
            remaining -= count
 | 
					            remaining -= count
 | 
				
			||||||
            row += count
 | 
					            row += count
 | 
				
			||||||
        return "".join(ret)
 | 
					        return b"".join(ret)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __getitem__(self, row):
 | 
					    def __getitem__(self, row):
 | 
				
			||||||
        """Extract timestamps from a row, with table[n] notation."""
 | 
					        """Extract timestamps from a row, with table[n] notation."""
 | 
				
			||||||
@@ -499,12 +556,12 @@ class Table(object):
 | 
				
			|||||||
        # file.  Only when the list covers the entire extent of the
 | 
					        # file.  Only when the list covers the entire extent of the
 | 
				
			||||||
        # file will that file be removed.
 | 
					        # file will that file be removed.
 | 
				
			||||||
        datafile = os.path.join(self.root, subdir, filename)
 | 
					        datafile = os.path.join(self.root, subdir, filename)
 | 
				
			||||||
        cachefile = datafile + ".removed"
 | 
					        cachefile = datafile + b".removed"
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            with open(cachefile, "rb") as f:
 | 
					            with open(cachefile, "rb") as f:
 | 
				
			||||||
                ranges = pickle.load(f)
 | 
					                ranges = pickle.load(f)
 | 
				
			||||||
            cachefile_present = True
 | 
					            cachefile_present = True
 | 
				
			||||||
        except:
 | 
					        except Exception:
 | 
				
			||||||
            ranges = []
 | 
					            ranges = []
 | 
				
			||||||
            cachefile_present = False
 | 
					            cachefile_present = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -526,7 +583,8 @@ class Table(object):
 | 
				
			|||||||
                # Not connected; append previous and start again
 | 
					                # Not connected; append previous and start again
 | 
				
			||||||
                merged.append(prev)
 | 
					                merged.append(prev)
 | 
				
			||||||
                prev = new
 | 
					                prev = new
 | 
				
			||||||
        if prev is not None:
 | 
					        # Last range we were looking at goes into the file.  We know
 | 
				
			||||||
 | 
					        # there was at least one (the one we just removed).
 | 
				
			||||||
        merged.append(prev)
 | 
					        merged.append(prev)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # If the range covered the whole file, we can delete it now.
 | 
					        # If the range covered the whole file, we can delete it now.
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,12 +1,15 @@
 | 
				
			|||||||
"""Exceptions"""
 | 
					"""Exceptions"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class NilmDBError(Exception):
 | 
					class NilmDBError(Exception):
 | 
				
			||||||
    """Base exception for NilmDB errors"""
 | 
					    """Base exception for NilmDB errors"""
 | 
				
			||||||
    def __init__(self, message = "Unspecified error"):
 | 
					    def __init__(self, msg="Unspecified error"):
 | 
				
			||||||
        Exception.__init__(self, message)
 | 
					        super().__init__(msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class StreamError(NilmDBError):
 | 
					class StreamError(NilmDBError):
 | 
				
			||||||
    pass
 | 
					    pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class OverlapError(NilmDBError):
 | 
					class OverlapError(NilmDBError):
 | 
				
			||||||
    pass
 | 
					    pass
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,3 +1,5 @@
 | 
				
			|||||||
 | 
					# cython: language_level=2
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""Interval, IntervalSet
 | 
					"""Interval, IntervalSet
 | 
				
			||||||
 | 
					
 | 
				
			||||||
The Interval implemented here is just like
 | 
					The Interval implemented here is just like
 | 
				
			||||||
@@ -58,9 +60,19 @@ cdef class Interval:
 | 
				
			|||||||
        return ("[" + timestamp_to_string(self.start) +
 | 
					        return ("[" + timestamp_to_string(self.start) +
 | 
				
			||||||
                " -> " + timestamp_to_string(self.end) + ")")
 | 
					                " -> " + timestamp_to_string(self.end) + ")")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __cmp__(self, Interval other):
 | 
					    # Compare two intervals.  If non-equal, order by start then end
 | 
				
			||||||
        """Compare two intervals.  If non-equal, order by start then end"""
 | 
					    def __lt__(self, Interval other):
 | 
				
			||||||
        return cmp(self.start, other.start) or cmp(self.end, other.end)
 | 
					        return (self.start, self.end) < (other.start, other.end)
 | 
				
			||||||
 | 
					    def __gt__(self, Interval other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) > (other.start, other.end)
 | 
				
			||||||
 | 
					    def __le__(self, Interval other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) <= (other.start, other.end)
 | 
				
			||||||
 | 
					    def __ge__(self, Interval other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) >= (other.start, other.end)
 | 
				
			||||||
 | 
					    def __eq__(self, Interval other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) == (other.start, other.end)
 | 
				
			||||||
 | 
					    def __ne__(self, Interval other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) != (other.start, other.end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    cpdef intersects(self, Interval other):
 | 
					    cpdef intersects(self, Interval other):
 | 
				
			||||||
        """Return True if two Interval objects intersect"""
 | 
					        """Return True if two Interval objects intersect"""
 | 
				
			||||||
@@ -286,22 +298,17 @@ cdef class IntervalSet:
 | 
				
			|||||||
        (potentially) subsetted to make the one that is being
 | 
					        (potentially) subsetted to make the one that is being
 | 
				
			||||||
        returned.
 | 
					        returned.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        if not isinstance(interval, Interval):
 | 
					        if orig:
 | 
				
			||||||
            raise TypeError("bad type")
 | 
					 | 
				
			||||||
            for n in self.tree.intersect(interval.start, interval.end):
 | 
					            for n in self.tree.intersect(interval.start, interval.end):
 | 
				
			||||||
                i = n.obj
 | 
					                i = n.obj
 | 
				
			||||||
            if i:
 | 
					 | 
				
			||||||
                if i.start >= interval.start and i.end <= interval.end:
 | 
					 | 
				
			||||||
                    if orig:
 | 
					 | 
				
			||||||
                        yield (i, i)
 | 
					 | 
				
			||||||
                    else:
 | 
					 | 
				
			||||||
                        yield i
 | 
					 | 
				
			||||||
                else:
 | 
					 | 
				
			||||||
                subset = i.subset(max(i.start, interval.start),
 | 
					                subset = i.subset(max(i.start, interval.start),
 | 
				
			||||||
                                  min(i.end, interval.end))
 | 
					                                  min(i.end, interval.end))
 | 
				
			||||||
                    if orig:
 | 
					 | 
				
			||||||
                yield (subset, i)
 | 
					                yield (subset, i)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
 | 
					            for n in self.tree.intersect(interval.start, interval.end):
 | 
				
			||||||
 | 
					                i = n.obj
 | 
				
			||||||
 | 
					                subset = i.subset(max(i.start, interval.start),
 | 
				
			||||||
 | 
					                                  min(i.end, interval.end))
 | 
				
			||||||
                yield subset
 | 
					                yield subset
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    cpdef intersects(self, Interval other):
 | 
					    cpdef intersects(self, Interval other):
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -7,11 +7,13 @@ Object that represents a NILM database file.
 | 
				
			|||||||
Manages both the SQL database and the table storage backend.
 | 
					Manages both the SQL database and the table storage backend.
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Need absolute_import so that "import nilmdb" won't pull in
 | 
					import os
 | 
				
			||||||
# nilmdb.py, but will pull the parent nilmdb module instead.
 | 
					import errno
 | 
				
			||||||
from __future__ import absolute_import
 | 
					import sqlite3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.utils
 | 
					import nilmdb.utils
 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					from nilmdb.utils.printf import printf
 | 
				
			||||||
 | 
					from nilmdb.utils.time import timestamp_to_bytes
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from nilmdb.utils.interval import IntervalError
 | 
					from nilmdb.utils.interval import IntervalError
 | 
				
			||||||
from nilmdb.server.interval import Interval, DBInterval, IntervalSet
 | 
					from nilmdb.server.interval import Interval, DBInterval, IntervalSet
 | 
				
			||||||
@@ -19,11 +21,6 @@ from nilmdb.server.interval import Interval, DBInterval, IntervalSet
 | 
				
			|||||||
from nilmdb.server import bulkdata
 | 
					from nilmdb.server import bulkdata
 | 
				
			||||||
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
 | 
					from nilmdb.server.errors import NilmDBError, StreamError, OverlapError
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import sqlite3
 | 
					 | 
				
			||||||
import os
 | 
					 | 
				
			||||||
import errno
 | 
					 | 
				
			||||||
import bisect
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Note about performance and transactions:
 | 
					# Note about performance and transactions:
 | 
				
			||||||
#
 | 
					#
 | 
				
			||||||
# Committing a transaction in the default sync mode (PRAGMA synchronous=FULL)
 | 
					# Committing a transaction in the default sync mode (PRAGMA synchronous=FULL)
 | 
				
			||||||
@@ -37,7 +34,7 @@ import bisect
 | 
				
			|||||||
# seems that 'PRAGMA synchronous=NORMAL' and 'PRAGMA journal_mode=WAL'
 | 
					# seems that 'PRAGMA synchronous=NORMAL' and 'PRAGMA journal_mode=WAL'
 | 
				
			||||||
# give an equivalent speedup more safely.  That is what is used here.
 | 
					# give an equivalent speedup more safely.  That is what is used here.
 | 
				
			||||||
_sql_schema_updates = {
 | 
					_sql_schema_updates = {
 | 
				
			||||||
    0: { "next": 1, "sql": """
 | 
					    0: {"next": 1, "sql": """
 | 
				
			||||||
    -- All streams
 | 
					    -- All streams
 | 
				
			||||||
    CREATE TABLE streams(
 | 
					    CREATE TABLE streams(
 | 
				
			||||||
        id INTEGER PRIMARY KEY,		-- stream ID
 | 
					        id INTEGER PRIMARY KEY,		-- stream ID
 | 
				
			||||||
@@ -61,29 +58,47 @@ _sql_schema_updates = {
 | 
				
			|||||||
        end_pos INTEGER NOT NULL
 | 
					        end_pos INTEGER NOT NULL
 | 
				
			||||||
    );
 | 
					    );
 | 
				
			||||||
    CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time);
 | 
					    CREATE INDEX _ranges_index ON ranges (stream_id, start_time, end_time);
 | 
				
			||||||
    """ },
 | 
					    """},
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    1: { "next": 3, "sql": """
 | 
					    1: {"next": 3, "sql": """
 | 
				
			||||||
    -- Generic dictionary-type metadata that can be associated with a stream
 | 
					    -- Generic dictionary-type metadata that can be associated with a stream
 | 
				
			||||||
    CREATE TABLE metadata(
 | 
					    CREATE TABLE metadata(
 | 
				
			||||||
        stream_id INTEGER NOT NULL,
 | 
					        stream_id INTEGER NOT NULL,
 | 
				
			||||||
        key TEXT NOT NULL,
 | 
					        key TEXT NOT NULL,
 | 
				
			||||||
        value TEXT
 | 
					        value TEXT
 | 
				
			||||||
    );
 | 
					    );
 | 
				
			||||||
    """ },
 | 
					    """},
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    2: { "error": "old format with floating-point timestamps requires "
 | 
					    2: {"error": "old format with floating-point timestamps requires "
 | 
				
			||||||
         "nilmdb 1.3.1 or older" },
 | 
					        "nilmdb 1.3.1 or older"},
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    3: { "next": None },
 | 
					    3: {"next": None},
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@nilmdb.utils.must_close()
 | 
					@nilmdb.utils.must_close()
 | 
				
			||||||
class NilmDB(object):
 | 
					class NilmDB():
 | 
				
			||||||
    verbose = 0
 | 
					    verbose = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(self, basepath, max_results=None,
 | 
					    def __init__(self, basepath,
 | 
				
			||||||
 | 
					                 max_results=None,
 | 
				
			||||||
 | 
					                 max_removals=None,
 | 
				
			||||||
 | 
					                 max_int_removals=None,
 | 
				
			||||||
                 bulkdata_args=None):
 | 
					                 bulkdata_args=None):
 | 
				
			||||||
 | 
					        """Initialize NilmDB at the given basepath.
 | 
				
			||||||
 | 
					        Other arguments are for debugging / testing:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'max_results' is the max rows to send in a single
 | 
				
			||||||
 | 
					        stream_intervals or stream_extract response.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'max_removals' is the max rows to delete at once
 | 
				
			||||||
 | 
					        in stream_remove.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'max_int_removals' is the max intervals to delete
 | 
				
			||||||
 | 
					        at once in stream_remove.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'bulkdata_args' is kwargs for the bulkdata module.
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
        if bulkdata_args is None:
 | 
					        if bulkdata_args is None:
 | 
				
			||||||
            bulkdata_args = {}
 | 
					            bulkdata_args = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -102,22 +117,26 @@ class NilmDB(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        # SQLite database too
 | 
					        # SQLite database too
 | 
				
			||||||
        sqlfilename = os.path.join(self.basepath, "data.sql")
 | 
					        sqlfilename = os.path.join(self.basepath, "data.sql")
 | 
				
			||||||
        self.con = sqlite3.connect(sqlfilename, check_same_thread = True)
 | 
					        self.con = sqlite3.connect(sqlfilename, check_same_thread=True)
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            self._sql_schema_update()
 | 
					            self._sql_schema_update()
 | 
				
			||||||
        finally: # pragma: no cover
 | 
					        except Exception:
 | 
				
			||||||
            self.data.close()
 | 
					            self.data.close()
 | 
				
			||||||
 | 
					            raise
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # See big comment at top about the performance implications of this
 | 
					        # See big comment at top about the performance implications of this
 | 
				
			||||||
        self.con.execute("PRAGMA synchronous=NORMAL")
 | 
					        self.con.execute("PRAGMA synchronous=NORMAL")
 | 
				
			||||||
        self.con.execute("PRAGMA journal_mode=WAL")
 | 
					        self.con.execute("PRAGMA journal_mode=WAL")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Approximate largest number of elements that we want to send
 | 
					        # Approximate largest number of elements that we want to send
 | 
				
			||||||
        # in a single reply (for stream_intervals, stream_extract)
 | 
					        # in a single reply (for stream_intervals, stream_extract).
 | 
				
			||||||
        if max_results:
 | 
					        self.max_results = max_results or 16384
 | 
				
			||||||
            self.max_results = max_results
 | 
					
 | 
				
			||||||
        else:
 | 
					        # Remove up to this many rows per call to stream_remove.
 | 
				
			||||||
            self.max_results = 16384
 | 
					        self.max_removals = max_removals or 1048576
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Remove up to this many intervals per call to stream_remove.
 | 
				
			||||||
 | 
					        self.max_int_removals = max_int_removals or 4096
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_basepath(self):
 | 
					    def get_basepath(self):
 | 
				
			||||||
        return self.basepath
 | 
					        return self.basepath
 | 
				
			||||||
@@ -126,6 +145,7 @@ class NilmDB(object):
 | 
				
			|||||||
        if self.con:
 | 
					        if self.con:
 | 
				
			||||||
            self.con.commit()
 | 
					            self.con.commit()
 | 
				
			||||||
            self.con.close()
 | 
					            self.con.close()
 | 
				
			||||||
 | 
					            self.con = None
 | 
				
			||||||
        self.data.close()
 | 
					        self.data.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _sql_schema_update(self):
 | 
					    def _sql_schema_update(self):
 | 
				
			||||||
@@ -134,18 +154,18 @@ class NilmDB(object):
 | 
				
			|||||||
        oldversion = version
 | 
					        oldversion = version
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        while True:
 | 
					        while True:
 | 
				
			||||||
            if version not in _sql_schema_updates: # pragma: no cover
 | 
					            if version not in _sql_schema_updates:
 | 
				
			||||||
                raise Exception(self.basepath + ": unknown database version "
 | 
					                raise Exception(self.basepath + ": unknown database version "
 | 
				
			||||||
                                + str(version))
 | 
					                                + str(version))
 | 
				
			||||||
            update = _sql_schema_updates[version]
 | 
					            update = _sql_schema_updates[version]
 | 
				
			||||||
            if "error" in update: # pragma: no cover
 | 
					            if "error" in update:
 | 
				
			||||||
                raise Exception(self.basepath + ": can't use database version "
 | 
					                raise Exception(self.basepath + ": can't use database version "
 | 
				
			||||||
                                + str(version) + ": " + update["error"])
 | 
					                                + str(version) + ": " + update["error"])
 | 
				
			||||||
            if update["next"] is None:
 | 
					            if update["next"] is None:
 | 
				
			||||||
                break
 | 
					                break
 | 
				
			||||||
            cur.executescript(update["sql"])
 | 
					            cur.executescript(update["sql"])
 | 
				
			||||||
            version = update["next"]
 | 
					            version = update["next"]
 | 
				
			||||||
            if self.verbose: # pragma: no cover
 | 
					            if self.verbose:
 | 
				
			||||||
                printf("Database schema updated to %d\n", version)
 | 
					                printf("Database schema updated to %d\n", version)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if version != oldversion:
 | 
					        if version != oldversion:
 | 
				
			||||||
@@ -161,7 +181,7 @@ class NilmDB(object):
 | 
				
			|||||||
            raise NilmDBError("start must precede end")
 | 
					            raise NilmDBError("start must precede end")
 | 
				
			||||||
        return (start, end)
 | 
					        return (start, end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @nilmdb.utils.lru_cache(size = 16)
 | 
					    @nilmdb.utils.lru_cache(size=64)
 | 
				
			||||||
    def _get_intervals(self, stream_id):
 | 
					    def _get_intervals(self, stream_id):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Return a mutable IntervalSet corresponding to the given stream ID.
 | 
					        Return a mutable IntervalSet corresponding to the given stream ID.
 | 
				
			||||||
@@ -176,7 +196,7 @@ class NilmDB(object):
 | 
				
			|||||||
                iset += DBInterval(start_time, end_time,
 | 
					                iset += DBInterval(start_time, end_time,
 | 
				
			||||||
                                   start_time, end_time,
 | 
					                                   start_time, end_time,
 | 
				
			||||||
                                   start_pos, end_pos)
 | 
					                                   start_pos, end_pos)
 | 
				
			||||||
        except IntervalError: # pragma: no cover
 | 
					        except IntervalError:
 | 
				
			||||||
            raise NilmDBError("unexpected overlap in ranges table!")
 | 
					            raise NilmDBError("unexpected overlap in ranges table!")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return iset
 | 
					        return iset
 | 
				
			||||||
@@ -203,10 +223,6 @@ class NilmDB(object):
 | 
				
			|||||||
        # Load this stream's intervals
 | 
					        # Load this stream's intervals
 | 
				
			||||||
        iset = self._get_intervals(stream_id)
 | 
					        iset = self._get_intervals(stream_id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Check for overlap
 | 
					 | 
				
			||||||
        if iset.intersects(interval): # pragma: no cover (gets caught earlier)
 | 
					 | 
				
			||||||
            raise NilmDBError("new interval overlaps existing data")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # Check for adjacency.  If there's a stream in the database
 | 
					        # Check for adjacency.  If there's a stream in the database
 | 
				
			||||||
        # that ends exactly when this one starts, and the database
 | 
					        # that ends exactly when this one starts, and the database
 | 
				
			||||||
        # rows match up, we can make one interval that covers the
 | 
					        # rows match up, we can make one interval that covers the
 | 
				
			||||||
@@ -249,10 +265,6 @@ class NilmDB(object):
 | 
				
			|||||||
         original: original DBInterval; must be already present in DB
 | 
					         original: original DBInterval; must be already present in DB
 | 
				
			||||||
        to_remove: DBInterval to remove; must be subset of 'original'
 | 
					        to_remove: DBInterval to remove; must be subset of 'original'
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        # Just return if we have nothing to remove
 | 
					 | 
				
			||||||
        if remove.start == remove.end: # pragma: no cover
 | 
					 | 
				
			||||||
            return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # Load this stream's intervals
 | 
					        # Load this stream's intervals
 | 
				
			||||||
        iset = self._get_intervals(stream_id)
 | 
					        iset = self._get_intervals(stream_id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -267,7 +279,8 @@ class NilmDB(object):
 | 
				
			|||||||
        # the removed piece was in the middle.
 | 
					        # the removed piece was in the middle.
 | 
				
			||||||
        def add(iset, start, end, start_pos, end_pos):
 | 
					        def add(iset, start, end, start_pos, end_pos):
 | 
				
			||||||
            iset += DBInterval(start, end, start, end, start_pos, end_pos)
 | 
					            iset += DBInterval(start, end, start, end, start_pos, end_pos)
 | 
				
			||||||
            self._sql_interval_insert(stream_id, start, end, start_pos, end_pos)
 | 
					            self._sql_interval_insert(stream_id, start, end,
 | 
				
			||||||
 | 
					                                      start_pos, end_pos)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if original.start != remove.start:
 | 
					        if original.start != remove.start:
 | 
				
			||||||
            # Interval before the removed region
 | 
					            # Interval before the removed region
 | 
				
			||||||
@@ -284,7 +297,7 @@ class NilmDB(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        return
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_list(self, path = None, layout = None, extended = False):
 | 
					    def stream_list(self, path=None, layout=None, extended=False):
 | 
				
			||||||
        """Return list of lists of all streams in the database.
 | 
					        """Return list of lists of all streams in the database.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        If path is specified, include only streams with a path that
 | 
					        If path is specified, include only streams with a path that
 | 
				
			||||||
@@ -293,10 +306,10 @@ class NilmDB(object):
 | 
				
			|||||||
        If layout is specified, include only streams with a layout
 | 
					        If layout is specified, include only streams with a layout
 | 
				
			||||||
        that matches the given string.
 | 
					        that matches the given string.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        If extended = False, returns a list of lists containing
 | 
					        If extended=False, returns a list of lists containing
 | 
				
			||||||
        the path and layout: [ path, layout ]
 | 
					        the path and layout: [ path, layout ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        If extended = True, returns a list of lists containing
 | 
					        If extended=True, returns a list of lists containing
 | 
				
			||||||
        more information:
 | 
					        more information:
 | 
				
			||||||
           path
 | 
					           path
 | 
				
			||||||
           layout
 | 
					           layout
 | 
				
			||||||
@@ -323,9 +336,9 @@ class NilmDB(object):
 | 
				
			|||||||
            params += (path,)
 | 
					            params += (path,)
 | 
				
			||||||
        query += " GROUP BY streams.id ORDER BY streams.path"
 | 
					        query += " GROUP BY streams.id ORDER BY streams.path"
 | 
				
			||||||
        result = self.con.execute(query, params).fetchall()
 | 
					        result = self.con.execute(query, params).fetchall()
 | 
				
			||||||
        return [ list(x) for x in result ]
 | 
					        return [list(x) for x in result]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_intervals(self, path, start = None, end = None, diffpath = None):
 | 
					    def stream_intervals(self, path, start=None, end=None, diffpath=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        List all intervals in 'path' between 'start' and 'end'.  If
 | 
					        List all intervals in 'path' between 'start' and 'end'.  If
 | 
				
			||||||
        'diffpath' is not none, list instead the set-difference
 | 
					        'diffpath' is not none, list instead the set-difference
 | 
				
			||||||
@@ -334,14 +347,14 @@ class NilmDB(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        Returns (intervals, restart) tuple.
 | 
					        Returns (intervals, restart) tuple.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        intervals is a list of [start,end] timestamps of all intervals
 | 
					        'intervals' is a list of [start,end] timestamps of all intervals
 | 
				
			||||||
        that exist for path, between start and end.
 | 
					        that exist for path, between start and end.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        restart, if nonzero, means that there were too many results to
 | 
					        'restart', if not None, means that there were too many results
 | 
				
			||||||
        return in a single request.  The data is complete from the
 | 
					        to return in a single request.  The data is complete from the
 | 
				
			||||||
        starting timestamp to the point at which it was truncated,
 | 
					        starting timestamp to the point at which it was truncated, and
 | 
				
			||||||
        and a new request with a start time of 'restart' will fetch
 | 
					        a new request with a start time of 'restart' will fetch the
 | 
				
			||||||
        the next block of data.
 | 
					        next block of data.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        stream_id = self._stream_id(path)
 | 
					        stream_id = self._stream_id(path)
 | 
				
			||||||
        intervals = self._get_intervals(stream_id)
 | 
					        intervals = self._get_intervals(stream_id)
 | 
				
			||||||
@@ -363,7 +376,7 @@ class NilmDB(object):
 | 
				
			|||||||
                break
 | 
					                break
 | 
				
			||||||
            result.append([i.start, i.end])
 | 
					            result.append([i.start, i.end])
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            restart = 0
 | 
					            restart = None
 | 
				
			||||||
        return (result, restart)
 | 
					        return (result, restart)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_create(self, path, layout_name):
 | 
					    def stream_create(self, path, layout_name):
 | 
				
			||||||
@@ -397,8 +410,8 @@ class NilmDB(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def stream_set_metadata(self, path, data):
 | 
					    def stream_set_metadata(self, path, data):
 | 
				
			||||||
        """Set stream metadata from a dictionary, e.g.
 | 
					        """Set stream metadata from a dictionary, e.g.
 | 
				
			||||||
           { description = 'Downstairs lighting',
 | 
					           { description: 'Downstairs lighting',
 | 
				
			||||||
             v_scaling = 123.45 }
 | 
					             v_scaling: 123.45 }
 | 
				
			||||||
           This replaces all existing metadata.
 | 
					           This replaces all existing metadata.
 | 
				
			||||||
           """
 | 
					           """
 | 
				
			||||||
        stream_id = self._stream_id(path)
 | 
					        stream_id = self._stream_id(path)
 | 
				
			||||||
@@ -439,28 +452,37 @@ class NilmDB(object):
 | 
				
			|||||||
                        (newpath, stream_id))
 | 
					                        (newpath, stream_id))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_destroy(self, path):
 | 
					    def stream_destroy(self, path):
 | 
				
			||||||
        """Fully remove a table and all of its data from the database.
 | 
					        """Fully remove a table from the database.  Fails if there are
 | 
				
			||||||
        No way to undo it!  Metadata is removed."""
 | 
					        any intervals data present; remove them first.  Metadata is
 | 
				
			||||||
 | 
					        also removed."""
 | 
				
			||||||
        stream_id = self._stream_id(path)
 | 
					        stream_id = self._stream_id(path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Delete the cached interval data (if it was cached)
 | 
					        # Verify that no intervals are present, and clear the cache
 | 
				
			||||||
 | 
					        iset = self._get_intervals(stream_id)
 | 
				
			||||||
 | 
					        if iset:
 | 
				
			||||||
 | 
					            raise NilmDBError("all intervals must be removed before "
 | 
				
			||||||
 | 
					                              "destroying a stream")
 | 
				
			||||||
        self._get_intervals.cache_remove(self, stream_id)
 | 
					        self._get_intervals.cache_remove(self, stream_id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Delete the data
 | 
					        # Delete the bulkdata storage
 | 
				
			||||||
        self.data.destroy(path)
 | 
					        self.data.destroy(path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Delete metadata, stream, intervals
 | 
					        # Delete metadata, stream, intervals (should be none)
 | 
				
			||||||
        with self.con as con:
 | 
					        with self.con as con:
 | 
				
			||||||
            con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,))
 | 
					            con.execute("DELETE FROM metadata WHERE stream_id=?", (stream_id,))
 | 
				
			||||||
            con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,))
 | 
					            con.execute("DELETE FROM ranges WHERE stream_id=?", (stream_id,))
 | 
				
			||||||
            con.execute("DELETE FROM streams WHERE id=?", (stream_id,))
 | 
					            con.execute("DELETE FROM streams WHERE id=?", (stream_id,))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_insert(self, path, start, end, data):
 | 
					    def stream_insert(self, path, start, end, data, binary=False):
 | 
				
			||||||
        """Insert new data into the database.
 | 
					        """Insert new data into the database.
 | 
				
			||||||
           path: Path at which to add the data
 | 
					           path: Path at which to add the data
 | 
				
			||||||
           start: Starting timestamp
 | 
					           start: Starting timestamp
 | 
				
			||||||
           end: Ending timestamp
 | 
					           end: Ending timestamp
 | 
				
			||||||
           data: Textual data, formatted according to the layout of path
 | 
					           data: Textual data, formatted according to the layout of path
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					           'binary', if True, means that 'data' is raw binary:
 | 
				
			||||||
 | 
					           little-endian, matching the current table's layout,
 | 
				
			||||||
 | 
					           including the int64 timestamp.
 | 
				
			||||||
           """
 | 
					           """
 | 
				
			||||||
        # First check for basic overlap using timestamp info given.
 | 
					        # First check for basic overlap using timestamp info given.
 | 
				
			||||||
        stream_id = self._stream_id(path)
 | 
					        stream_id = self._stream_id(path)
 | 
				
			||||||
@@ -474,7 +496,7 @@ class NilmDB(object):
 | 
				
			|||||||
        # there are any parse errors.
 | 
					        # there are any parse errors.
 | 
				
			||||||
        table = self.data.getnode(path)
 | 
					        table = self.data.getnode(path)
 | 
				
			||||||
        row_start = table.nrows
 | 
					        row_start = table.nrows
 | 
				
			||||||
        table.append_string(data, start, end)
 | 
					        table.append_data(data, start, end, binary)
 | 
				
			||||||
        row_end = table.nrows
 | 
					        row_end = table.nrows
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Insert the record into the sql database.
 | 
					        # Insert the record into the sql database.
 | 
				
			||||||
@@ -483,6 +505,17 @@ class NilmDB(object):
 | 
				
			|||||||
        # And that's all
 | 
					        # And that's all
 | 
				
			||||||
        return
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def _bisect_left(self, a, x, lo, hi):
 | 
				
			||||||
 | 
					        # Like bisect.bisect_left, but doesn't choke on large indices on
 | 
				
			||||||
 | 
					        # 32-bit systems, like bisect's fast C implementation does.
 | 
				
			||||||
 | 
					        while lo < hi:
 | 
				
			||||||
 | 
					            mid = (lo + hi) // 2
 | 
				
			||||||
 | 
					            if a[mid] < x:
 | 
				
			||||||
 | 
					                lo = mid + 1
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                hi = mid
 | 
				
			||||||
 | 
					        return lo
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def _find_start(self, table, dbinterval):
 | 
					    def _find_start(self, table, dbinterval):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Given a DBInterval, find the row in the database that
 | 
					        Given a DBInterval, find the row in the database that
 | 
				
			||||||
@@ -493,7 +526,7 @@ class NilmDB(object):
 | 
				
			|||||||
        # Optimization for the common case where an interval wasn't truncated
 | 
					        # Optimization for the common case where an interval wasn't truncated
 | 
				
			||||||
        if dbinterval.start == dbinterval.db_start:
 | 
					        if dbinterval.start == dbinterval.db_start:
 | 
				
			||||||
            return dbinterval.db_startpos
 | 
					            return dbinterval.db_startpos
 | 
				
			||||||
        return bisect.bisect_left(table,
 | 
					        return self._bisect_left(table,
 | 
				
			||||||
                                 dbinterval.start,
 | 
					                                 dbinterval.start,
 | 
				
			||||||
                                 dbinterval.db_startpos,
 | 
					                                 dbinterval.db_startpos,
 | 
				
			||||||
                                 dbinterval.db_endpos)
 | 
					                                 dbinterval.db_endpos)
 | 
				
			||||||
@@ -512,28 +545,36 @@ class NilmDB(object):
 | 
				
			|||||||
        # want to include the given timestamp in the results.  This is
 | 
					        # want to include the given timestamp in the results.  This is
 | 
				
			||||||
        # so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return
 | 
					        # so a queries like 1:00 -> 2:00 and 2:00 -> 3:00 return
 | 
				
			||||||
        # non-overlapping data.
 | 
					        # non-overlapping data.
 | 
				
			||||||
        return bisect.bisect_left(table,
 | 
					        return self._bisect_left(table,
 | 
				
			||||||
                                 dbinterval.end,
 | 
					                                 dbinterval.end,
 | 
				
			||||||
                                 dbinterval.db_startpos,
 | 
					                                 dbinterval.db_startpos,
 | 
				
			||||||
                                 dbinterval.db_endpos)
 | 
					                                 dbinterval.db_endpos)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_extract(self, path, start = None, end = None, count = False):
 | 
					    def stream_extract(self, path, start=None, end=None,
 | 
				
			||||||
 | 
					                       count=False, markup=False, binary=False):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Returns (data, restart) tuple.
 | 
					        Returns (data, restart) tuple.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        data is ASCII-formatted data from the database, formatted
 | 
					        'data' is ASCII-formatted data from the database, formatted
 | 
				
			||||||
        according to the layout of the stream.
 | 
					        according to the layout of the stream.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        restart, if nonzero, means that there were too many results to
 | 
					        'restart', if not None, means that there were too many results to
 | 
				
			||||||
        return in a single request.  The data is complete from the
 | 
					        return in a single request.  The data is complete from the
 | 
				
			||||||
        starting timestamp to the point at which it was truncated,
 | 
					        starting timestamp to the point at which it was truncated,
 | 
				
			||||||
        and a new request with a start time of 'restart' will fetch
 | 
					        and a new request with a start time of 'restart' will fetch
 | 
				
			||||||
        the next block of data.
 | 
					        the next block of data.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        count, if true, means to not return raw data, but just the count
 | 
					        'count', if true, means to not return raw data, but just the count
 | 
				
			||||||
        of rows that would have been returned.  This is much faster
 | 
					        of rows that would have been returned.  This is much faster
 | 
				
			||||||
        than actually fetching the data.  It is not limited by
 | 
					        than actually fetching the data.  It is not limited by
 | 
				
			||||||
        max_results.
 | 
					        max_results.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'markup', if true, indicates that returned data should be
 | 
				
			||||||
 | 
					        marked with a comment denoting when a particular interval
 | 
				
			||||||
 | 
					        starts, and another comment when an interval ends.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'binary', if true, means to return raw binary rather than
 | 
				
			||||||
 | 
					        ASCII-formatted data.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        stream_id = self._stream_id(path)
 | 
					        stream_id = self._stream_id(path)
 | 
				
			||||||
        table = self.data.getnode(path)
 | 
					        table = self.data.getnode(path)
 | 
				
			||||||
@@ -543,7 +584,9 @@ class NilmDB(object):
 | 
				
			|||||||
        result = []
 | 
					        result = []
 | 
				
			||||||
        matched = 0
 | 
					        matched = 0
 | 
				
			||||||
        remaining = self.max_results
 | 
					        remaining = self.max_results
 | 
				
			||||||
        restart = 0
 | 
					        restart = None
 | 
				
			||||||
 | 
					        if binary and (markup or count):
 | 
				
			||||||
 | 
					            raise NilmDBError("binary mode can't be used with markup or count")
 | 
				
			||||||
        for interval in intervals.intersection(requested):
 | 
					        for interval in intervals.intersection(requested):
 | 
				
			||||||
            # Reading single rows from the table is too slow, so
 | 
					            # Reading single rows from the table is too slow, so
 | 
				
			||||||
            # we use two bisections to find both the starting and
 | 
					            # we use two bisections to find both the starting and
 | 
				
			||||||
@@ -562,25 +605,46 @@ class NilmDB(object):
 | 
				
			|||||||
                row_end = row_max
 | 
					                row_end = row_max
 | 
				
			||||||
                restart = table[row_max]
 | 
					                restart = table[row_max]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Add markup
 | 
				
			||||||
 | 
					            if markup:
 | 
				
			||||||
 | 
					                result.append(b"# interval-start " +
 | 
				
			||||||
 | 
					                              timestamp_to_bytes(interval.start) + b"\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Gather these results up
 | 
					            # Gather these results up
 | 
				
			||||||
            result.append(table.get_data(row_start, row_end))
 | 
					            result.append(table.get_data(row_start, row_end, binary))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Count them
 | 
					            # Count them
 | 
				
			||||||
            remaining -= row_end - row_start
 | 
					            remaining -= row_end - row_start
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if restart:
 | 
					            # Add markup, and exit if restart is set.
 | 
				
			||||||
 | 
					            if restart is not None:
 | 
				
			||||||
 | 
					                if markup:
 | 
				
			||||||
 | 
					                    result.append(b"# interval-end " +
 | 
				
			||||||
 | 
					                                  timestamp_to_bytes(restart) + b"\n")
 | 
				
			||||||
                break
 | 
					                break
 | 
				
			||||||
 | 
					            if markup:
 | 
				
			||||||
 | 
					                result.append(b"# interval-end " +
 | 
				
			||||||
 | 
					                              timestamp_to_bytes(interval.end) + b"\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if count:
 | 
					        if count:
 | 
				
			||||||
            return matched
 | 
					            return matched
 | 
				
			||||||
        return ("".join(result), restart)
 | 
					        full_result = b"".join(result)
 | 
				
			||||||
 | 
					        return (full_result, restart)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stream_remove(self, path, start = None, end = None):
 | 
					    def stream_remove(self, path, start=None, end=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Remove data from the specified time interval within a stream.
 | 
					        Remove data from the specified time interval within a stream.
 | 
				
			||||||
        Removes all data in the interval [start, end), and intervals
 | 
					
 | 
				
			||||||
        are truncated or split appropriately.  Returns the number of
 | 
					        Removes data in the interval [start, end), and intervals are
 | 
				
			||||||
        data points removed.
 | 
					        truncated or split appropriately.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Returns a (removed, restart) tuple.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'removed' is the number of data points that were removed.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'restart', if not None, means there were too many rows to
 | 
				
			||||||
 | 
					        remove in a single request.  This function should be called
 | 
				
			||||||
 | 
					        again with a start time of 'restart' to complete the removal.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        stream_id = self._stream_id(path)
 | 
					        stream_id = self._stream_id(path)
 | 
				
			||||||
        table = self.data.getnode(path)
 | 
					        table = self.data.getnode(path)
 | 
				
			||||||
@@ -588,16 +652,34 @@ class NilmDB(object):
 | 
				
			|||||||
        (start, end) = self._check_user_times(start, end)
 | 
					        (start, end) = self._check_user_times(start, end)
 | 
				
			||||||
        to_remove = Interval(start, end)
 | 
					        to_remove = Interval(start, end)
 | 
				
			||||||
        removed = 0
 | 
					        removed = 0
 | 
				
			||||||
 | 
					        remaining = self.max_removals
 | 
				
			||||||
 | 
					        int_remaining = self.max_int_removals
 | 
				
			||||||
 | 
					        restart = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Can't remove intervals from within the iterator, so we need to
 | 
					        # Can't remove intervals from within the iterator, so we need to
 | 
				
			||||||
        # remember what's currently in the intersection now.
 | 
					        # remember what's currently in the intersection now.
 | 
				
			||||||
        all_candidates = list(intervals.intersection(to_remove, orig = True))
 | 
					        all_candidates = list(intervals.intersection(to_remove, orig=True))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        remove_start = None
 | 
				
			||||||
 | 
					        remove_end = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        for (dbint, orig) in all_candidates:
 | 
					        for (dbint, orig) in all_candidates:
 | 
				
			||||||
 | 
					            # Stop if we've hit the max number of interval removals
 | 
				
			||||||
 | 
					            if int_remaining <= 0:
 | 
				
			||||||
 | 
					                restart = dbint.start
 | 
				
			||||||
 | 
					                break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Find row start and end
 | 
					            # Find row start and end
 | 
				
			||||||
            row_start = self._find_start(table, dbint)
 | 
					            row_start = self._find_start(table, dbint)
 | 
				
			||||||
            row_end = self._find_end(table, dbint)
 | 
					            row_end = self._find_end(table, dbint)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Shorten it if we'll hit the maximum number of removals
 | 
				
			||||||
 | 
					            row_max = row_start + remaining
 | 
				
			||||||
 | 
					            if row_max < row_end:
 | 
				
			||||||
 | 
					                row_end = row_max
 | 
				
			||||||
 | 
					                dbint.end = table[row_max]
 | 
				
			||||||
 | 
					                restart = dbint.end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Adjust the DBInterval to match the newly found ends
 | 
					            # Adjust the DBInterval to match the newly found ends
 | 
				
			||||||
            dbint.db_start = dbint.start
 | 
					            dbint.db_start = dbint.start
 | 
				
			||||||
            dbint.db_end = dbint.end
 | 
					            dbint.db_end = dbint.end
 | 
				
			||||||
@@ -607,10 +689,29 @@ class NilmDB(object):
 | 
				
			|||||||
            # Remove interval from the database
 | 
					            # Remove interval from the database
 | 
				
			||||||
            self._remove_interval(stream_id, orig, dbint)
 | 
					            self._remove_interval(stream_id, orig, dbint)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Remove data from the underlying table storage
 | 
					            # Remove data from the underlying table storage,
 | 
				
			||||||
            table.remove(row_start, row_end)
 | 
					            # coalescing adjacent removals to reduce the number of calls
 | 
				
			||||||
 | 
					            # to table.remove.
 | 
				
			||||||
 | 
					            if remove_end == row_start:
 | 
				
			||||||
 | 
					                # Extend our coalesced region
 | 
				
			||||||
 | 
					                remove_end = row_end
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                # Perform previous removal, then save this one
 | 
				
			||||||
 | 
					                if remove_end is not None:
 | 
				
			||||||
 | 
					                    table.remove(remove_start, remove_end)
 | 
				
			||||||
 | 
					                remove_start = row_start
 | 
				
			||||||
 | 
					                remove_end = row_end
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Count how many were removed
 | 
					            # Count how many were removed
 | 
				
			||||||
            removed += row_end - row_start
 | 
					            removed += row_end - row_start
 | 
				
			||||||
 | 
					            remaining -= row_end - row_start
 | 
				
			||||||
 | 
					            int_remaining -= 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return removed
 | 
					            if restart is not None:
 | 
				
			||||||
 | 
					                break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Perform any final coalesced removal
 | 
				
			||||||
 | 
					        if remove_end is not None:
 | 
				
			||||||
 | 
					            table.remove(remove_start, remove_end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        return (removed, restart)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,3 +1,5 @@
 | 
				
			|||||||
 | 
					# cython: language_level=2
 | 
				
			||||||
 | 
					
 | 
				
			||||||
cdef class RBNode:
 | 
					cdef class RBNode:
 | 
				
			||||||
    cdef public object obj
 | 
					    cdef public object obj
 | 
				
			||||||
    cdef public double start, end
 | 
					    cdef public double start, end
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,6 @@
 | 
				
			|||||||
# cython: profile=False
 | 
					# cython: profile=False
 | 
				
			||||||
# cython: cdivision=True
 | 
					# cython: cdivision=True
 | 
				
			||||||
 | 
					# cython: language_level=2
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
Jim Paris <jim@jtan.com>
 | 
					Jim Paris <jim@jtan.com>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,6 +5,9 @@
 | 
				
			|||||||
#include <ctype.h>
 | 
					#include <ctype.h>
 | 
				
			||||||
#include <stdint.h>
 | 
					#include <stdint.h>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#define __STDC_FORMAT_MACROS
 | 
				
			||||||
 | 
					#include <inttypes.h>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/* Values missing from stdint.h */
 | 
					/* Values missing from stdint.h */
 | 
				
			||||||
#define UINT8_MIN 0
 | 
					#define UINT8_MIN 0
 | 
				
			||||||
#define UINT16_MIN 0
 | 
					#define UINT16_MIN 0
 | 
				
			||||||
@@ -19,16 +22,9 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
typedef int64_t timestamp_t;
 | 
					typedef int64_t timestamp_t;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/* This code probably needs to be double-checked for the case where
 | 
					 | 
				
			||||||
   sizeof(long) != 8, so enforce that here with something that will
 | 
					 | 
				
			||||||
   fail at build time.  We assume that the python integer type can
 | 
					 | 
				
			||||||
   hold an int64_t. */
 | 
					 | 
				
			||||||
const static char __long_ok[1 - 2*!(sizeof(int64_t) ==
 | 
					 | 
				
			||||||
				    sizeof(long int))] = { 0 };
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
/* Somewhat arbitrary, just so we can use fixed sizes for strings
 | 
					/* Somewhat arbitrary, just so we can use fixed sizes for strings
 | 
				
			||||||
   etc. */
 | 
					   etc. */
 | 
				
			||||||
static const int MAX_LAYOUT_COUNT = 128;
 | 
					static const int MAX_LAYOUT_COUNT = 1024;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/* Error object and constants */
 | 
					/* Error object and constants */
 | 
				
			||||||
static PyObject *ParseError;
 | 
					static PyObject *ParseError;
 | 
				
			||||||
@@ -58,7 +54,7 @@ static PyObject *raise_str(int line, int col, int code, const char *string)
 | 
				
			|||||||
static PyObject *raise_int(int line, int col, int code, int64_t num)
 | 
					static PyObject *raise_int(int line, int col, int code, int64_t num)
 | 
				
			||||||
{
 | 
					{
 | 
				
			||||||
	PyObject *o;
 | 
						PyObject *o;
 | 
				
			||||||
	o = Py_BuildValue("(iiil)", line, col, code, num);
 | 
						o = Py_BuildValue("(iiiL)", line, col, code, (long long)num);
 | 
				
			||||||
	if (o != NULL) {
 | 
						if (o != NULL) {
 | 
				
			||||||
		PyErr_SetObject(ParseError, o);
 | 
							PyErr_SetObject(ParseError, o);
 | 
				
			||||||
		Py_DECREF(o);
 | 
							Py_DECREF(o);
 | 
				
			||||||
@@ -142,7 +138,7 @@ static void Rocket_dealloc(Rocket *self)
 | 
				
			|||||||
		fclose(self->file);
 | 
							fclose(self->file);
 | 
				
			||||||
		self->file = NULL;
 | 
							self->file = NULL;
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	self->ob_type->tp_free((PyObject *)self);
 | 
						Py_TYPE(self)->tp_free((PyObject *)self);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 | 
					static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 | 
				
			||||||
@@ -164,13 +160,19 @@ static PyObject *Rocket_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 | 
				
			|||||||
static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
 | 
					static int Rocket_init(Rocket *self, PyObject *args, PyObject *kwds)
 | 
				
			||||||
{
 | 
					{
 | 
				
			||||||
	const char *layout, *path;
 | 
						const char *layout, *path;
 | 
				
			||||||
 | 
					        int pathlen;
 | 
				
			||||||
	static char *kwlist[] = { "layout", "file", NULL };
 | 
						static char *kwlist[] = { "layout", "file", NULL };
 | 
				
			||||||
	if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz", kwlist,
 | 
						if (!PyArg_ParseTupleAndKeywords(args, kwds, "sz#", kwlist,
 | 
				
			||||||
					 &layout, &path))
 | 
										 &layout, &path, &pathlen))
 | 
				
			||||||
		return -1;
 | 
							return -1;
 | 
				
			||||||
	if (!layout)
 | 
						if (!layout)
 | 
				
			||||||
		return -1;
 | 
							return -1;
 | 
				
			||||||
	if (path) {
 | 
						if (path) {
 | 
				
			||||||
 | 
					                if (strlen(path) != (size_t)pathlen) {
 | 
				
			||||||
 | 
					                        PyErr_SetString(PyExc_ValueError, "path must not "
 | 
				
			||||||
 | 
					                                        "contain NUL characters");
 | 
				
			||||||
 | 
					                        return -1;
 | 
				
			||||||
 | 
					                }
 | 
				
			||||||
		if ((self->file = fopen(path, "a+b")) == NULL) {
 | 
							if ((self->file = fopen(path, "a+b")) == NULL) {
 | 
				
			||||||
			PyErr_SetFromErrno(PyExc_OSError);
 | 
								PyErr_SetFromErrno(PyExc_OSError);
 | 
				
			||||||
			return -1;
 | 
								return -1;
 | 
				
			||||||
@@ -243,17 +245,17 @@ static PyObject *Rocket_get_file_size(Rocket *self)
 | 
				
			|||||||
			return NULL;
 | 
								return NULL;
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	return PyInt_FromLong(self->file_size);
 | 
						return PyLong_FromLong(self->file_size);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/****
 | 
					/****
 | 
				
			||||||
 * Append from string
 | 
					 * Append from string
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
static inline long int strtol10(const char *nptr, char **endptr) {
 | 
					static inline long int strtoll10(const char *nptr, char **endptr) {
 | 
				
			||||||
	return strtol(nptr, endptr, 10);
 | 
						return strtoll(nptr, endptr, 10);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
static inline long int strtoul10(const char *nptr, char **endptr) {
 | 
					static inline long int strtoull10(const char *nptr, char **endptr) {
 | 
				
			||||||
	return strtoul(nptr, endptr, 10);
 | 
						return strtoull(nptr, endptr, 10);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/* .append_string(count, data, offset, linenum, start, end, last_timestamp) */
 | 
					/* .append_string(count, data, offset, linenum, start, end, last_timestamp) */
 | 
				
			||||||
@@ -264,6 +266,7 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
	int offset;
 | 
						int offset;
 | 
				
			||||||
	const char *linestart;
 | 
						const char *linestart;
 | 
				
			||||||
	int linenum;
 | 
						int linenum;
 | 
				
			||||||
 | 
					        long long ll1, ll2, ll3;
 | 
				
			||||||
	timestamp_t start;
 | 
						timestamp_t start;
 | 
				
			||||||
	timestamp_t end;
 | 
						timestamp_t end;
 | 
				
			||||||
	timestamp_t last_timestamp;
 | 
						timestamp_t last_timestamp;
 | 
				
			||||||
@@ -276,14 +279,15 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
	union64_t t64;
 | 
						union64_t t64;
 | 
				
			||||||
	int i;
 | 
						int i;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	/* It would be nice to use 't#' instead of 's' for data,
 | 
					        /* Input data is bytes.  Using 'y#' instead of 'y' might be
 | 
				
			||||||
	   but we need the null termination for strto*.  If we had
 | 
					           preferable, but strto* requires the null terminator. */
 | 
				
			||||||
	   strnto* that took a length, we could use t# and not require
 | 
						if (!PyArg_ParseTuple(args, "iyiiLLL:append_string", &count,
 | 
				
			||||||
	   a copy. */
 | 
					 | 
				
			||||||
	if (!PyArg_ParseTuple(args, "isiilll:append_string", &count,
 | 
					 | 
				
			||||||
			      &data, &offset, &linenum,
 | 
								      &data, &offset, &linenum,
 | 
				
			||||||
			      &start, &end, &last_timestamp))
 | 
								      &ll1, &ll2, &ll3))
 | 
				
			||||||
		return NULL;
 | 
							return NULL;
 | 
				
			||||||
 | 
					        start = ll1;
 | 
				
			||||||
 | 
					        end = ll2;
 | 
				
			||||||
 | 
					        last_timestamp = ll3;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	/* Skip spaces, but don't skip over a newline. */
 | 
						/* Skip spaces, but don't skip over a newline. */
 | 
				
			||||||
#define SKIP_BLANK(buf) do {			\
 | 
					#define SKIP_BLANK(buf) do {			\
 | 
				
			||||||
@@ -372,14 +376,14 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
				goto extra_data_on_line;		\
 | 
									goto extra_data_on_line;		\
 | 
				
			||||||
			break
 | 
								break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
			CS(INT8,   strtol10,  t64.i, t8.i,  t8.u,         , 1);
 | 
								CS(INT8,   strtoll10,  t64.i, t8.i,  t8.u,         , 1);
 | 
				
			||||||
			CS(UINT8,  strtoul10, t64.u, t8.u,  t8.u,         , 1);
 | 
								CS(UINT8,  strtoull10, t64.u, t8.u,  t8.u,         , 1);
 | 
				
			||||||
			CS(INT16,  strtol10,  t64.i, t16.i, t16.u, le16toh, 2);
 | 
								CS(INT16,  strtoll10,  t64.i, t16.i, t16.u, le16toh, 2);
 | 
				
			||||||
			CS(UINT16, strtoul10, t64.u, t16.u, t16.u, le16toh, 2);
 | 
								CS(UINT16, strtoull10, t64.u, t16.u, t16.u, le16toh, 2);
 | 
				
			||||||
			CS(INT32,  strtol10,  t64.i, t32.i, t32.u, le32toh, 4);
 | 
								CS(INT32,  strtoll10,  t64.i, t32.i, t32.u, le32toh, 4);
 | 
				
			||||||
			CS(UINT32, strtoul10, t64.u, t32.u, t32.u, le32toh, 4);
 | 
								CS(UINT32, strtoull10, t64.u, t32.u, t32.u, le32toh, 4);
 | 
				
			||||||
			CS(INT64,  strtol10,  t64.i, t64.i, t64.u, le64toh, 8);
 | 
								CS(INT64,  strtoll10,  t64.i, t64.i, t64.u, le64toh, 8);
 | 
				
			||||||
			CS(UINT64, strtoul10, t64.u, t64.u, t64.u, le64toh, 8);
 | 
								CS(UINT64, strtoull10, t64.u, t64.u, t64.u, le64toh, 8);
 | 
				
			||||||
			CS(FLOAT32, strtod,   t64.d, t32.f, t32.u, le32toh, 4);
 | 
								CS(FLOAT32, strtod,   t64.d, t32.f, t32.u, le32toh, 4);
 | 
				
			||||||
			CS(FLOAT64, strtod,   t64.d, t64.d, t64.u, le64toh, 8);
 | 
								CS(FLOAT64, strtod,   t64.d, t64.d, t64.u, le64toh, 8);
 | 
				
			||||||
#undef CS
 | 
					#undef CS
 | 
				
			||||||
@@ -397,7 +401,8 @@ static PyObject *Rocket_append_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
	/* Build return value and return */
 | 
						/* Build return value and return */
 | 
				
			||||||
	offset = buf - data;
 | 
						offset = buf - data;
 | 
				
			||||||
	PyObject *o;
 | 
						PyObject *o;
 | 
				
			||||||
	o = Py_BuildValue("(iili)", written, offset, last_timestamp, linenum);
 | 
						o = Py_BuildValue("(iiLi)", written, offset,
 | 
				
			||||||
 | 
					                          (long long)last_timestamp, linenum);
 | 
				
			||||||
	return o;
 | 
						return o;
 | 
				
			||||||
err:
 | 
					err:
 | 
				
			||||||
	PyErr_SetFromErrno(PyExc_OSError);
 | 
						PyErr_SetFromErrno(PyExc_OSError);
 | 
				
			||||||
@@ -420,7 +425,73 @@ extra_data_on_line:
 | 
				
			|||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/****
 | 
					/****
 | 
				
			||||||
 * Extract to string
 | 
					 * Append from binary data
 | 
				
			||||||
 | 
					 */
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/* .append_binary(count, data, offset, linenum, start, end, last_timestamp) */
 | 
				
			||||||
 | 
					static PyObject *Rocket_append_binary(Rocket *self, PyObject *args)
 | 
				
			||||||
 | 
					{
 | 
				
			||||||
 | 
					        int count;
 | 
				
			||||||
 | 
						const uint8_t *data;
 | 
				
			||||||
 | 
					        int data_len;
 | 
				
			||||||
 | 
					        int linenum;
 | 
				
			||||||
 | 
						int offset;
 | 
				
			||||||
 | 
					        long long ll1, ll2, ll3;
 | 
				
			||||||
 | 
						timestamp_t start;
 | 
				
			||||||
 | 
						timestamp_t end;
 | 
				
			||||||
 | 
						timestamp_t last_timestamp;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						if (!PyArg_ParseTuple(args, "iy#iiLLL:append_binary",
 | 
				
			||||||
 | 
					                              &count, &data, &data_len, &offset,
 | 
				
			||||||
 | 
					                              &linenum, &ll1, &ll2, &ll3))
 | 
				
			||||||
 | 
							return NULL;
 | 
				
			||||||
 | 
					        start = ll1;
 | 
				
			||||||
 | 
					        end = ll2;
 | 
				
			||||||
 | 
					        last_timestamp = ll3;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        /* Advance to offset */
 | 
				
			||||||
 | 
					        if (offset > data_len)
 | 
				
			||||||
 | 
					                return raise_str(0, 0, ERR_OTHER, "bad offset");
 | 
				
			||||||
 | 
					        data += offset;
 | 
				
			||||||
 | 
					        data_len -= offset;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        /* Figure out max number of rows to insert */
 | 
				
			||||||
 | 
					        int rows = data_len / self->binary_size;
 | 
				
			||||||
 | 
					        if (rows > count)
 | 
				
			||||||
 | 
					                rows = count;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        /* Check timestamps */
 | 
				
			||||||
 | 
					        timestamp_t ts;
 | 
				
			||||||
 | 
						int i;
 | 
				
			||||||
 | 
					        for (i = 0; i < rows; i++) {
 | 
				
			||||||
 | 
					                /* Read raw timestamp, byteswap if needed */
 | 
				
			||||||
 | 
					                memcpy(&ts, &data[i * self->binary_size], 8);
 | 
				
			||||||
 | 
					                ts = le64toh(ts);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                /* Check limits */
 | 
				
			||||||
 | 
					                if (ts <= last_timestamp)
 | 
				
			||||||
 | 
					                        return raise_int(i, 0, ERR_NON_MONOTONIC, ts);
 | 
				
			||||||
 | 
					                last_timestamp = ts;
 | 
				
			||||||
 | 
					                if (ts < start || ts >= end)
 | 
				
			||||||
 | 
					                        return raise_int(i, 0, ERR_OUT_OF_INTERVAL, ts);
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        /* Write binary data */
 | 
				
			||||||
 | 
					        if (fwrite(data, self->binary_size, rows, self->file) != (size_t)rows) {
 | 
				
			||||||
 | 
					                PyErr_SetFromErrno(PyExc_OSError);
 | 
				
			||||||
 | 
					                return NULL;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
						fflush(self->file);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						/* Build return value and return */
 | 
				
			||||||
 | 
						PyObject *o;
 | 
				
			||||||
 | 
						o = Py_BuildValue("(iiLi)", rows, offset + rows * self->binary_size,
 | 
				
			||||||
 | 
					                          (long long)last_timestamp, linenum);
 | 
				
			||||||
 | 
						return o;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/****
 | 
				
			||||||
 | 
					 * Extract to binary bytes object containing ASCII text-formatted data
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
 | 
					static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
 | 
				
			||||||
@@ -472,7 +543,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
		if (fread(&t64.u, 8, 1, self->file) != 1)
 | 
							if (fread(&t64.u, 8, 1, self->file) != 1)
 | 
				
			||||||
			goto err;
 | 
								goto err;
 | 
				
			||||||
		t64.u = le64toh(t64.u);
 | 
							t64.u = le64toh(t64.u);
 | 
				
			||||||
		ret = sprintf(&str[len], "%ld", t64.i);
 | 
							ret = sprintf(&str[len], "%" PRId64, t64.i);
 | 
				
			||||||
		if (ret <= 0)
 | 
							if (ret <= 0)
 | 
				
			||||||
			goto err;
 | 
								goto err;
 | 
				
			||||||
		len += ret;
 | 
							len += ret;
 | 
				
			||||||
@@ -484,7 +555,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
			/* read and format in a loop */			\
 | 
								/* read and format in a loop */			\
 | 
				
			||||||
			for (i = 0; i < self->layout_count; i++) {	\
 | 
								for (i = 0; i < self->layout_count; i++) {	\
 | 
				
			||||||
				if (fread(&disktype, bytes,		\
 | 
									if (fread(&disktype, bytes,		\
 | 
				
			||||||
					  1, self->file) < 0)		\
 | 
										  1, self->file) != 1)		\
 | 
				
			||||||
					goto err;			\
 | 
										goto err;			\
 | 
				
			||||||
				disktype = letoh(disktype);		\
 | 
									disktype = letoh(disktype);		\
 | 
				
			||||||
				ret = sprintf(&str[len], " " fmt,	\
 | 
									ret = sprintf(&str[len], " " fmt,	\
 | 
				
			||||||
@@ -494,14 +565,14 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
				len += ret;				\
 | 
									len += ret;				\
 | 
				
			||||||
			}						\
 | 
								}						\
 | 
				
			||||||
			break
 | 
								break
 | 
				
			||||||
			CASE(INT8,   "%hhd",   t8.i,  t8.u,         , 1);
 | 
								CASE(INT8,   "%" PRId8,  t8.i,  t8.u,         , 1);
 | 
				
			||||||
			CASE(UINT8,  "%hhu",   t8.u,  t8.u,         , 1);
 | 
								CASE(UINT8,  "%" PRIu8,  t8.u,  t8.u,         , 1);
 | 
				
			||||||
			CASE(INT16,  "%hd",    t16.i, t16.u, le16toh, 2);
 | 
								CASE(INT16,  "%" PRId16, t16.i, t16.u, le16toh, 2);
 | 
				
			||||||
			CASE(UINT16, "%hu",    t16.u, t16.u, le16toh, 2);
 | 
								CASE(UINT16, "%" PRIu16, t16.u, t16.u, le16toh, 2);
 | 
				
			||||||
			CASE(INT32,  "%d",     t32.i, t32.u, le32toh, 4);
 | 
								CASE(INT32,  "%" PRId32, t32.i, t32.u, le32toh, 4);
 | 
				
			||||||
			CASE(UINT32, "%u",     t32.u, t32.u, le32toh, 4);
 | 
								CASE(UINT32, "%" PRIu32, t32.u, t32.u, le32toh, 4);
 | 
				
			||||||
			CASE(INT64,  "%ld",    t64.i, t64.u, le64toh, 8);
 | 
								CASE(INT64,  "%" PRId64, t64.i, t64.u, le64toh, 8);
 | 
				
			||||||
			CASE(UINT64, "%lu",    t64.u, t64.u, le64toh, 8);
 | 
								CASE(UINT64, "%" PRIu64, t64.u, t64.u, le64toh, 8);
 | 
				
			||||||
			/* These next two are a bit debatable.  floats
 | 
								/* These next two are a bit debatable.  floats
 | 
				
			||||||
			   are 6-9 significant figures, so we print 7.
 | 
								   are 6-9 significant figures, so we print 7.
 | 
				
			||||||
			   Doubles are 15-19, so we print 17.  This is
 | 
								   Doubles are 15-19, so we print 17.  This is
 | 
				
			||||||
@@ -518,7 +589,7 @@ static PyObject *Rocket_extract_string(Rocket *self, PyObject *args)
 | 
				
			|||||||
		str[len++] = '\n';
 | 
							str[len++] = '\n';
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	PyObject *pystr = PyString_FromStringAndSize(str, len);
 | 
						PyObject *pystr = PyBytes_FromStringAndSize(str, len);
 | 
				
			||||||
	free(str);
 | 
						free(str);
 | 
				
			||||||
	return pystr;
 | 
						return pystr;
 | 
				
			||||||
err:
 | 
					err:
 | 
				
			||||||
@@ -527,6 +598,46 @@ err:
 | 
				
			|||||||
	return NULL;
 | 
						return NULL;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/****
 | 
				
			||||||
 | 
					 * Extract to binary bytes object containing raw little-endian binary data
 | 
				
			||||||
 | 
					 */
 | 
				
			||||||
 | 
					static PyObject *Rocket_extract_binary(Rocket *self, PyObject *args)
 | 
				
			||||||
 | 
					{
 | 
				
			||||||
 | 
						long count;
 | 
				
			||||||
 | 
						long offset;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						if (!PyArg_ParseTuple(args, "ll", &offset, &count))
 | 
				
			||||||
 | 
							return NULL;
 | 
				
			||||||
 | 
						if (!self->file) {
 | 
				
			||||||
 | 
							PyErr_SetString(PyExc_Exception, "no file");
 | 
				
			||||||
 | 
							return NULL;
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
						/* Seek to target location */
 | 
				
			||||||
 | 
						if (fseek(self->file, offset, SEEK_SET) < 0) {
 | 
				
			||||||
 | 
							PyErr_SetFromErrno(PyExc_OSError);
 | 
				
			||||||
 | 
							return NULL;
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        uint8_t *str;
 | 
				
			||||||
 | 
					        int len = count * self->binary_size;
 | 
				
			||||||
 | 
					        str = malloc(len);
 | 
				
			||||||
 | 
					        if (str == NULL) {
 | 
				
			||||||
 | 
					                PyErr_SetFromErrno(PyExc_OSError);
 | 
				
			||||||
 | 
					                return NULL;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        /* Data in the file is already in the desired little-endian
 | 
				
			||||||
 | 
					           binary format, so just read it directly. */
 | 
				
			||||||
 | 
					        if (fread(str, self->binary_size, count, self->file) != (size_t)count) {
 | 
				
			||||||
 | 
					                free(str);
 | 
				
			||||||
 | 
					                PyErr_SetFromErrno(PyExc_OSError);
 | 
				
			||||||
 | 
					                return NULL;
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						PyObject *pystr = PyBytes_FromStringAndSize((char *)str, len);
 | 
				
			||||||
 | 
						free(str);
 | 
				
			||||||
 | 
						return pystr;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/****
 | 
					/****
 | 
				
			||||||
 * Extract timestamp
 | 
					 * Extract timestamp
 | 
				
			||||||
@@ -551,7 +662,7 @@ static PyObject *Rocket_extract_timestamp(Rocket *self, PyObject *args)
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
	/* Convert and return */
 | 
						/* Convert and return */
 | 
				
			||||||
	t64.u = le64toh(t64.u);
 | 
						t64.u = le64toh(t64.u);
 | 
				
			||||||
	return Py_BuildValue("l", t64.i);
 | 
						return Py_BuildValue("L", (long long)t64.i);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/****
 | 
					/****
 | 
				
			||||||
@@ -571,11 +682,13 @@ static PyMemberDef Rocket_members[] = {
 | 
				
			|||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static PyMethodDef Rocket_methods[] = {
 | 
					static PyMethodDef Rocket_methods[] = {
 | 
				
			||||||
	{ "close", (PyCFunction)Rocket_close, METH_NOARGS,
 | 
						{ "close",
 | 
				
			||||||
 | 
					          (PyCFunction)Rocket_close, METH_NOARGS,
 | 
				
			||||||
	  "close(self)\n\n"
 | 
						  "close(self)\n\n"
 | 
				
			||||||
	  "Close file handle" },
 | 
						  "Close file handle" },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	{ "append_string", (PyCFunction)Rocket_append_string, METH_VARARGS,
 | 
						{ "append_string",
 | 
				
			||||||
 | 
					          (PyCFunction)Rocket_append_string, METH_VARARGS,
 | 
				
			||||||
	  "append_string(self, count, data, offset, line, start, end, ts)\n\n"
 | 
						  "append_string(self, count, data, offset, line, start, end, ts)\n\n"
 | 
				
			||||||
          "Parse string and append data.\n"
 | 
					          "Parse string and append data.\n"
 | 
				
			||||||
	  "\n"
 | 
						  "\n"
 | 
				
			||||||
@@ -590,16 +703,46 @@ static PyMethodDef Rocket_methods[] = {
 | 
				
			|||||||
	  "Raises ParseError if timestamps are non-monotonic, outside\n"
 | 
						  "Raises ParseError if timestamps are non-monotonic, outside\n"
 | 
				
			||||||
	  "the start/end interval etc.\n"
 | 
						  "the start/end interval etc.\n"
 | 
				
			||||||
	  "\n"
 | 
						  "\n"
 | 
				
			||||||
          "On success, return a tuple with three values:\n"
 | 
					          "On success, return a tuple:\n"
 | 
				
			||||||
          "  added_rows: how many rows were added from the file\n"
 | 
					          "  added_rows: how many rows were added from the file\n"
 | 
				
			||||||
          "  data_offset: current offset into the data string\n"
 | 
					          "  data_offset: current offset into the data string\n"
 | 
				
			||||||
          "  last_timestamp: last timestamp we parsed" },
 | 
					          "  last_timestamp: last timestamp we parsed\n"
 | 
				
			||||||
 | 
					          "  linenum: current line number" },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	{ "extract_string", (PyCFunction)Rocket_extract_string, METH_VARARGS,
 | 
						{ "append_binary",
 | 
				
			||||||
 | 
						  (PyCFunction)Rocket_append_binary, METH_VARARGS,
 | 
				
			||||||
 | 
						  "append_binary(self, count, data, offset, line, start, end, ts)\n\n"
 | 
				
			||||||
 | 
					          "Append binary data, which must match the data layout.\n"
 | 
				
			||||||
 | 
						  "\n"
 | 
				
			||||||
 | 
						  "  count: maximum number of rows to add\n"
 | 
				
			||||||
 | 
					          "  data: binary data\n"
 | 
				
			||||||
 | 
					          "  offset: byte offset into data to start adding\n"
 | 
				
			||||||
 | 
					          "  line: current line number (unused)\n"
 | 
				
			||||||
 | 
					          "  start: starting timestamp for interval\n"
 | 
				
			||||||
 | 
					          "  end: end timestamp for interval\n"
 | 
				
			||||||
 | 
					          "  ts: last timestamp that was previously parsed\n"
 | 
				
			||||||
 | 
						  "\n"
 | 
				
			||||||
 | 
						  "Raises ParseError if timestamps are non-monotonic, outside\n"
 | 
				
			||||||
 | 
						  "the start/end interval etc.\n"
 | 
				
			||||||
 | 
						  "\n"
 | 
				
			||||||
 | 
					          "On success, return a tuple:\n"
 | 
				
			||||||
 | 
					          "  added_rows: how many rows were added from the file\n"
 | 
				
			||||||
 | 
					          "  data_offset: current offset into the data string\n"
 | 
				
			||||||
 | 
					          "  last_timestamp: last timestamp we parsed\n"
 | 
				
			||||||
 | 
					          "  linenum: current line number (copied from argument)" },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						{ "extract_string",
 | 
				
			||||||
 | 
					          (PyCFunction)Rocket_extract_string, METH_VARARGS,
 | 
				
			||||||
	  "extract_string(self, offset, count)\n\n"
 | 
						  "extract_string(self, offset, count)\n\n"
 | 
				
			||||||
	  "Extract count rows of data from the file at offset offset.\n"
 | 
						  "Extract count rows of data from the file at offset offset.\n"
 | 
				
			||||||
	  "Return an ascii formatted string according to the layout" },
 | 
						  "Return an ascii formatted string according to the layout" },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						{ "extract_binary",
 | 
				
			||||||
 | 
						  (PyCFunction)Rocket_extract_binary, METH_VARARGS,
 | 
				
			||||||
 | 
						  "extract_binary(self, offset, count)\n\n"
 | 
				
			||||||
 | 
						  "Extract count rows of data from the file at offset offset.\n"
 | 
				
			||||||
 | 
						  "Return a raw binary string of data matching the data layout." },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	{ "extract_timestamp",
 | 
						{ "extract_timestamp",
 | 
				
			||||||
	  (PyCFunction)Rocket_extract_timestamp, METH_VARARGS,
 | 
						  (PyCFunction)Rocket_extract_timestamp, METH_VARARGS,
 | 
				
			||||||
	  "extract_timestamp(self, offset)\n\n"
 | 
						  "extract_timestamp(self, offset)\n\n"
 | 
				
			||||||
@@ -609,7 +752,7 @@ static PyMethodDef Rocket_methods[] = {
 | 
				
			|||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static PyTypeObject RocketType = {
 | 
					static PyTypeObject RocketType = {
 | 
				
			||||||
	PyObject_HEAD_INIT(NULL)
 | 
						PyVarObject_HEAD_INIT(NULL, 0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	.tp_name	= "rocket.Rocket",
 | 
						.tp_name	= "rocket.Rocket",
 | 
				
			||||||
	.tp_basicsize	= sizeof(Rocket),
 | 
						.tp_basicsize	= sizeof(Rocket),
 | 
				
			||||||
@@ -634,17 +777,23 @@ static PyMethodDef module_methods[] = {
 | 
				
			|||||||
	{ NULL },
 | 
						{ NULL },
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
PyMODINIT_FUNC
 | 
					static struct PyModuleDef moduledef = {
 | 
				
			||||||
initrocket(void)
 | 
					        PyModuleDef_HEAD_INIT,
 | 
				
			||||||
 | 
					        .m_name        = "rocker",
 | 
				
			||||||
 | 
					        .m_doc         = "Rocket data parsing and formatting module",
 | 
				
			||||||
 | 
					        .m_size        = -1,
 | 
				
			||||||
 | 
					        .m_methods     = module_methods,
 | 
				
			||||||
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					PyMODINIT_FUNC PyInit_rocket(void)
 | 
				
			||||||
{
 | 
					{
 | 
				
			||||||
	PyObject *module;
 | 
						PyObject *module;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	RocketType.tp_new = PyType_GenericNew;
 | 
						RocketType.tp_new = PyType_GenericNew;
 | 
				
			||||||
	if (PyType_Ready(&RocketType) < 0)
 | 
						if (PyType_Ready(&RocketType) < 0)
 | 
				
			||||||
		return;
 | 
							return NULL;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	module = Py_InitModule3("rocket", module_methods,
 | 
						module = PyModule_Create(&moduledef);
 | 
				
			||||||
				"Rocket data parsing and formatting module");
 | 
					 | 
				
			||||||
	Py_INCREF(&RocketType);
 | 
						Py_INCREF(&RocketType);
 | 
				
			||||||
	PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
 | 
						PyModule_AddObject(module, "Rocket", (PyObject *)&RocketType);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -653,5 +802,5 @@ initrocket(void)
 | 
				
			|||||||
	PyModule_AddObject(module, "ParseError", ParseError);
 | 
						PyModule_AddObject(module, "ParseError", ParseError);
 | 
				
			||||||
	add_parseerror_codes(module);
 | 
						add_parseerror_codes(module);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	return;
 | 
						return module;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,151 +1,49 @@
 | 
				
			|||||||
"""CherryPy-based server for accessing NILM database via HTTP"""
 | 
					"""CherryPy-based server for accessing NILM database via HTTP"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Need absolute_import so that "import nilmdb" won't pull in
 | 
					import os
 | 
				
			||||||
# nilmdb.py, but will pull the nilmdb module instead.
 | 
					import json
 | 
				
			||||||
from __future__ import absolute_import
 | 
					import socket
 | 
				
			||||||
 | 
					import traceback
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import psutil
 | 
				
			||||||
 | 
					import cherrypy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import nilmdb.server
 | 
					import nilmdb.server
 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					from nilmdb.utils.printf import sprintf
 | 
				
			||||||
from nilmdb.server.errors import NilmDBError
 | 
					from nilmdb.server.errors import NilmDBError
 | 
				
			||||||
from nilmdb.utils.time import string_to_timestamp
 | 
					from nilmdb.utils.time import string_to_timestamp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import cherrypy
 | 
					from nilmdb.server.serverutil import (
 | 
				
			||||||
import sys
 | 
					    chunked_response,
 | 
				
			||||||
import os
 | 
					    response_type,
 | 
				
			||||||
import simplejson as json
 | 
					    exception_to_httperror,
 | 
				
			||||||
import decorator
 | 
					    CORS_allow,
 | 
				
			||||||
import psutil
 | 
					    json_to_request_params,
 | 
				
			||||||
 | 
					    json_error_page,
 | 
				
			||||||
 | 
					    cherrypy_start,
 | 
				
			||||||
 | 
					    cherrypy_stop,
 | 
				
			||||||
 | 
					    bool_param,
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class NilmApp(object):
 | 
					# Add CORS_allow tool
 | 
				
			||||||
 | 
					cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class NilmApp():
 | 
				
			||||||
    def __init__(self, db):
 | 
					    def __init__(self, db):
 | 
				
			||||||
        self.db = db
 | 
					        self.db = db
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Decorators
 | 
					 | 
				
			||||||
def chunked_response(func):
 | 
					 | 
				
			||||||
    """Decorator to enable chunked responses."""
 | 
					 | 
				
			||||||
    # Set this to False to get better tracebacks from some requests
 | 
					 | 
				
			||||||
    # (/stream/extract, /stream/intervals).
 | 
					 | 
				
			||||||
    func._cp_config = { 'response.stream': True }
 | 
					 | 
				
			||||||
    return func
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def response_type(content_type):
 | 
					 | 
				
			||||||
    """Return a decorator-generating function that sets the
 | 
					 | 
				
			||||||
    response type to the specified string."""
 | 
					 | 
				
			||||||
    def wrapper(func, *args, **kwargs):
 | 
					 | 
				
			||||||
        cherrypy.response.headers['Content-Type'] = content_type
 | 
					 | 
				
			||||||
        return func(*args, **kwargs)
 | 
					 | 
				
			||||||
    return decorator.decorator(wrapper)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@decorator.decorator
 | 
					 | 
				
			||||||
def workaround_cp_bug_1200(func, *args, **kwargs): # pragma: no cover
 | 
					 | 
				
			||||||
    """Decorator to work around CherryPy bug #1200 in a response
 | 
					 | 
				
			||||||
    generator.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Even if chunked responses are disabled, LookupError or
 | 
					 | 
				
			||||||
    UnicodeError exceptions may still be swallowed by CherryPy due to
 | 
					 | 
				
			||||||
    bug #1200.  This throws them as generic Exceptions instead so that
 | 
					 | 
				
			||||||
    they make it through.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    exc_info = None
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
        for val in func(*args, **kwargs):
 | 
					 | 
				
			||||||
            yield val
 | 
					 | 
				
			||||||
    except (LookupError, UnicodeError):
 | 
					 | 
				
			||||||
        # Re-raise it, but maintain the original traceback
 | 
					 | 
				
			||||||
        exc_info = sys.exc_info()
 | 
					 | 
				
			||||||
        new_exc = Exception(exc_info[0].__name__ + ": " + str(exc_info[1]))
 | 
					 | 
				
			||||||
        raise new_exc, None, exc_info[2]
 | 
					 | 
				
			||||||
    finally:
 | 
					 | 
				
			||||||
        del exc_info
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def exception_to_httperror(*expected):
 | 
					 | 
				
			||||||
    """Return a decorator-generating function that catches expected
 | 
					 | 
				
			||||||
    errors and throws a HTTPError describing it instead.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        @exception_to_httperror(NilmDBError, ValueError)
 | 
					 | 
				
			||||||
        def foo():
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    def wrapper(func, *args, **kwargs):
 | 
					 | 
				
			||||||
        exc_info = None
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            return func(*args, **kwargs)
 | 
					 | 
				
			||||||
        except expected:
 | 
					 | 
				
			||||||
            # Re-raise it, but maintain the original traceback
 | 
					 | 
				
			||||||
            exc_info = sys.exc_info()
 | 
					 | 
				
			||||||
            new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
 | 
					 | 
				
			||||||
            raise new_exc, None, exc_info[2]
 | 
					 | 
				
			||||||
        finally:
 | 
					 | 
				
			||||||
            del exc_info
 | 
					 | 
				
			||||||
    # We need to preserve the function's argspecs for CherryPy to
 | 
					 | 
				
			||||||
    # handle argument errors correctly.  Decorator.decorator takes
 | 
					 | 
				
			||||||
    # care of that.
 | 
					 | 
				
			||||||
    return decorator.decorator(wrapper)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Custom CherryPy tools
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def CORS_allow(methods):
 | 
					 | 
				
			||||||
    """This does several things:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Handles CORS preflight requests.
 | 
					 | 
				
			||||||
    Adds Allow: header to all requests.
 | 
					 | 
				
			||||||
    Raise 405 if request.method not in method.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    It is similar to cherrypy.tools.allow, with the CORS stuff added.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    request = cherrypy.request.headers
 | 
					 | 
				
			||||||
    response = cherrypy.response.headers
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if not isinstance(methods, (tuple, list)): # pragma: no cover
 | 
					 | 
				
			||||||
        methods = [ methods ]
 | 
					 | 
				
			||||||
    methods = [ m.upper() for m in methods if m ]
 | 
					 | 
				
			||||||
    if not methods: # pragma: no cover
 | 
					 | 
				
			||||||
        methods = [ 'GET', 'HEAD' ]
 | 
					 | 
				
			||||||
    elif 'GET' in methods and 'HEAD' not in methods: # pragma: no cover
 | 
					 | 
				
			||||||
        methods.append('HEAD')
 | 
					 | 
				
			||||||
    response['Allow'] = ', '.join(methods)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Allow all origins
 | 
					 | 
				
			||||||
    if 'Origin' in request:
 | 
					 | 
				
			||||||
        response['Access-Control-Allow-Origin'] = request['Origin']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # If it's a CORS request, send response.
 | 
					 | 
				
			||||||
    request_method = request.get("Access-Control-Request-Method", None)
 | 
					 | 
				
			||||||
    request_headers = request.get("Access-Control-Request-Headers", None)
 | 
					 | 
				
			||||||
    if (cherrypy.request.method == "OPTIONS" and
 | 
					 | 
				
			||||||
        request_method and request_headers):
 | 
					 | 
				
			||||||
        response['Access-Control-Allow-Headers'] = request_headers
 | 
					 | 
				
			||||||
        response['Access-Control-Allow-Methods'] = ', '.join(methods)
 | 
					 | 
				
			||||||
        # Try to stop further processing and return a 200 OK
 | 
					 | 
				
			||||||
        cherrypy.response.status = "200 OK"
 | 
					 | 
				
			||||||
        cherrypy.response.body = ""
 | 
					 | 
				
			||||||
        cherrypy.request.handler = lambda: ""
 | 
					 | 
				
			||||||
        return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Reject methods that were not explicitly allowed
 | 
					 | 
				
			||||||
    if cherrypy.request.method not in methods:
 | 
					 | 
				
			||||||
        raise cherrypy.HTTPError(405)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Helper for json_in tool to process JSON data into normal request
 | 
					 | 
				
			||||||
# parameters.
 | 
					 | 
				
			||||||
def json_to_request_params(body):
 | 
					 | 
				
			||||||
    cherrypy.lib.jsontools.json_processor(body)
 | 
					 | 
				
			||||||
    if not isinstance(cherrypy.request.json, dict):
 | 
					 | 
				
			||||||
        raise cherrypy.HTTPError(415)
 | 
					 | 
				
			||||||
    cherrypy.request.params.update(cherrypy.request.json)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# CherryPy apps
 | 
					# CherryPy apps
 | 
				
			||||||
class Root(NilmApp):
 | 
					class Root(NilmApp):
 | 
				
			||||||
    """Root application for NILM database"""
 | 
					    """Root application for NILM database"""
 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __init__(self, db):
 | 
					 | 
				
			||||||
        super(Root, self).__init__(db)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # /
 | 
					    # /
 | 
				
			||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
    def index(self):
 | 
					    def index(self):
 | 
				
			||||||
        raise cherrypy.NotFound()
 | 
					        cherrypy.response.headers['Content-Type'] = 'text/plain'
 | 
				
			||||||
 | 
					        msg = sprintf("This is NilmDB version %s, running on host %s.\n",
 | 
				
			||||||
 | 
					                      nilmdb.__version__, socket.getfqdn())
 | 
				
			||||||
 | 
					        return msg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # /favicon.ico
 | 
					    # /favicon.ico
 | 
				
			||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
@@ -165,27 +63,58 @@ class Root(NilmApp):
 | 
				
			|||||||
        """Return a dictionary with the database path,
 | 
					        """Return a dictionary with the database path,
 | 
				
			||||||
        size of the database in bytes, and free disk space in bytes"""
 | 
					        size of the database in bytes, and free disk space in bytes"""
 | 
				
			||||||
        path = self.db.get_basepath()
 | 
					        path = self.db.get_basepath()
 | 
				
			||||||
        return { "path": path,
 | 
					        usage = psutil.disk_usage(path)
 | 
				
			||||||
                 "size": nilmdb.utils.du(path),
 | 
					        dbsize = nilmdb.utils.du(path)
 | 
				
			||||||
                 "free": psutil.disk_usage(path).free }
 | 
					        return {
 | 
				
			||||||
 | 
					            "path": path,
 | 
				
			||||||
 | 
					            "size": dbsize,
 | 
				
			||||||
 | 
					            "other": max(usage.used - dbsize, 0),
 | 
				
			||||||
 | 
					            "reserved": max(usage.total - usage.used - usage.free, 0),
 | 
				
			||||||
 | 
					            "free": usage.free
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Stream(NilmApp):
 | 
					class Stream(NilmApp):
 | 
				
			||||||
    """Stream-specific operations"""
 | 
					    """Stream-specific operations"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Helpers
 | 
				
			||||||
 | 
					    def _get_times(self, start_param, end_param):
 | 
				
			||||||
 | 
					        (start, end) = (None, None)
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            if start_param is not None:
 | 
				
			||||||
 | 
					                start = string_to_timestamp(start_param)
 | 
				
			||||||
 | 
					        except Exception:
 | 
				
			||||||
 | 
					            raise cherrypy.HTTPError("400 Bad Request", sprintf(
 | 
				
			||||||
 | 
					                "invalid start (%s): must be a numeric timestamp",
 | 
				
			||||||
 | 
					                start_param))
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            if end_param is not None:
 | 
				
			||||||
 | 
					                end = string_to_timestamp(end_param)
 | 
				
			||||||
 | 
					        except Exception:
 | 
				
			||||||
 | 
					            raise cherrypy.HTTPError("400 Bad Request", sprintf(
 | 
				
			||||||
 | 
					                "invalid end (%s): must be a numeric timestamp", end_param))
 | 
				
			||||||
 | 
					        if start is not None and end is not None:
 | 
				
			||||||
 | 
					            if start >= end:
 | 
				
			||||||
 | 
					                raise cherrypy.HTTPError(
 | 
				
			||||||
 | 
					                    "400 Bad Request",
 | 
				
			||||||
 | 
					                    sprintf("start must precede end (%s >= %s)",
 | 
				
			||||||
 | 
					                            start_param, end_param))
 | 
				
			||||||
 | 
					        return (start, end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # /stream/list
 | 
					    # /stream/list
 | 
				
			||||||
    # /stream/list?layout=float32_8
 | 
					    # /stream/list?layout=float32_8
 | 
				
			||||||
    # /stream/list?path=/newton/prep&extended=1
 | 
					    # /stream/list?path=/newton/prep&extended=1
 | 
				
			||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.json_out()
 | 
				
			||||||
    def list(self, path = None, layout = None, extended = None):
 | 
					    def list(self, path=None, layout=None, extended=None):
 | 
				
			||||||
        """List all streams in the database.  With optional path or
 | 
					        """List all streams in the database.  With optional path or
 | 
				
			||||||
        layout parameter, just list streams that match the given path
 | 
					        layout parameter, just list streams that match the given path
 | 
				
			||||||
        or layout.
 | 
					        or layout.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        If extent is not given, returns a list of lists containing
 | 
					        If extended is missing or zero, returns a list of lists
 | 
				
			||||||
        the path and layout: [ path, layout ]
 | 
					        containing the path and layout: [ path, layout ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        If extended is provided, returns a list of lists containing
 | 
					        If extended is true, returns a list of lists containing
 | 
				
			||||||
        extended info: [ path, layout, extent_min, extent_max,
 | 
					        extended info: [ path, layout, extent_min, extent_max,
 | 
				
			||||||
        total_rows, total_seconds ].  More data may be added.
 | 
					        total_rows, total_seconds ].  More data may be added.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
@@ -196,7 +125,7 @@ class Stream(NilmApp):
 | 
				
			|||||||
    @cherrypy.tools.json_in()
 | 
					    @cherrypy.tools.json_in()
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.json_out()
 | 
				
			||||||
    @exception_to_httperror(NilmDBError, ValueError)
 | 
					    @exception_to_httperror(NilmDBError, ValueError)
 | 
				
			||||||
    @cherrypy.tools.CORS_allow(methods = ["POST"])
 | 
					    @cherrypy.tools.CORS_allow(methods=["POST"])
 | 
				
			||||||
    def create(self, path, layout):
 | 
					    def create(self, path, layout):
 | 
				
			||||||
        """Create a new stream in the database.  Provide path
 | 
					        """Create a new stream in the database.  Provide path
 | 
				
			||||||
        and one of the nilmdb.layout.layouts keys.
 | 
					        and one of the nilmdb.layout.layouts keys.
 | 
				
			||||||
@@ -208,9 +137,9 @@ class Stream(NilmApp):
 | 
				
			|||||||
    @cherrypy.tools.json_in()
 | 
					    @cherrypy.tools.json_in()
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.json_out()
 | 
				
			||||||
    @exception_to_httperror(NilmDBError)
 | 
					    @exception_to_httperror(NilmDBError)
 | 
				
			||||||
    @cherrypy.tools.CORS_allow(methods = ["POST"])
 | 
					    @cherrypy.tools.CORS_allow(methods=["POST"])
 | 
				
			||||||
    def destroy(self, path):
 | 
					    def destroy(self, path):
 | 
				
			||||||
        """Delete a stream and its associated data."""
 | 
					        """Delete a stream.  Fails if any data is still present."""
 | 
				
			||||||
        return self.db.stream_destroy(path)
 | 
					        return self.db.stream_destroy(path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # /stream/rename?oldpath=/newton/prep&newpath=/newton/prep/1
 | 
					    # /stream/rename?oldpath=/newton/prep&newpath=/newton/prep/1
 | 
				
			||||||
@@ -218,7 +147,7 @@ class Stream(NilmApp):
 | 
				
			|||||||
    @cherrypy.tools.json_in()
 | 
					    @cherrypy.tools.json_in()
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.json_out()
 | 
				
			||||||
    @exception_to_httperror(NilmDBError, ValueError)
 | 
					    @exception_to_httperror(NilmDBError, ValueError)
 | 
				
			||||||
    @cherrypy.tools.CORS_allow(methods = ["POST"])
 | 
					    @cherrypy.tools.CORS_allow(methods=["POST"])
 | 
				
			||||||
    def rename(self, oldpath, newpath):
 | 
					    def rename(self, oldpath, newpath):
 | 
				
			||||||
        """Rename a stream."""
 | 
					        """Rename a stream."""
 | 
				
			||||||
        return self.db.stream_rename(oldpath, newpath)
 | 
					        return self.db.stream_rename(oldpath, newpath)
 | 
				
			||||||
@@ -234,11 +163,11 @@ class Stream(NilmApp):
 | 
				
			|||||||
        try:
 | 
					        try:
 | 
				
			||||||
            data = self.db.stream_get_metadata(path)
 | 
					            data = self.db.stream_get_metadata(path)
 | 
				
			||||||
        except nilmdb.server.nilmdb.StreamError as e:
 | 
					        except nilmdb.server.nilmdb.StreamError as e:
 | 
				
			||||||
            raise cherrypy.HTTPError("404 Not Found", e.message)
 | 
					            raise cherrypy.HTTPError("404 Not Found", str(e))
 | 
				
			||||||
        if key is None:  # If no keys specified, return them all
 | 
					        if key is None:  # If no keys specified, return them all
 | 
				
			||||||
            key = data.keys()
 | 
					            key = list(data.keys())
 | 
				
			||||||
        elif not isinstance(key, list):
 | 
					        elif not isinstance(key, list):
 | 
				
			||||||
            key = [ key ]
 | 
					            key = [key]
 | 
				
			||||||
        result = {}
 | 
					        result = {}
 | 
				
			||||||
        for k in key:
 | 
					        for k in key:
 | 
				
			||||||
            if k in data:
 | 
					            if k in data:
 | 
				
			||||||
@@ -253,11 +182,9 @@ class Stream(NilmApp):
 | 
				
			|||||||
            try:
 | 
					            try:
 | 
				
			||||||
                data = dict(json.loads(data))
 | 
					                data = dict(json.loads(data))
 | 
				
			||||||
            except TypeError as e:
 | 
					            except TypeError as e:
 | 
				
			||||||
                raise NilmDBError("can't parse 'data' parameter: " + e.message)
 | 
					                raise NilmDBError("can't parse 'data' parameter: " + str(e))
 | 
				
			||||||
        for key in data:
 | 
					        for key in data:
 | 
				
			||||||
            if not (isinstance(data[key], basestring) or
 | 
					            if not isinstance(data[key], (str, float, int)):
 | 
				
			||||||
                    isinstance(data[key], float) or
 | 
					 | 
				
			||||||
                    isinstance(data[key], int)):
 | 
					 | 
				
			||||||
                raise NilmDBError("metadata values must be a string or number")
 | 
					                raise NilmDBError("metadata values must be a string or number")
 | 
				
			||||||
        function(path, data)
 | 
					        function(path, data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -266,7 +193,7 @@ class Stream(NilmApp):
 | 
				
			|||||||
    @cherrypy.tools.json_in()
 | 
					    @cherrypy.tools.json_in()
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.json_out()
 | 
				
			||||||
    @exception_to_httperror(NilmDBError, LookupError)
 | 
					    @exception_to_httperror(NilmDBError, LookupError)
 | 
				
			||||||
    @cherrypy.tools.CORS_allow(methods = ["POST"])
 | 
					    @cherrypy.tools.CORS_allow(methods=["POST"])
 | 
				
			||||||
    def set_metadata(self, path, data):
 | 
					    def set_metadata(self, path, data):
 | 
				
			||||||
        """Set metadata for the named stream, replacing any existing
 | 
					        """Set metadata for the named stream, replacing any existing
 | 
				
			||||||
        metadata.  Data can be json-encoded or a plain dictionary."""
 | 
					        metadata.  Data can be json-encoded or a plain dictionary."""
 | 
				
			||||||
@@ -277,7 +204,7 @@ class Stream(NilmApp):
 | 
				
			|||||||
    @cherrypy.tools.json_in()
 | 
					    @cherrypy.tools.json_in()
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.json_out()
 | 
				
			||||||
    @exception_to_httperror(NilmDBError, LookupError, ValueError)
 | 
					    @exception_to_httperror(NilmDBError, LookupError, ValueError)
 | 
				
			||||||
    @cherrypy.tools.CORS_allow(methods = ["POST"])
 | 
					    @cherrypy.tools.CORS_allow(methods=["POST"])
 | 
				
			||||||
    def update_metadata(self, path, data):
 | 
					    def update_metadata(self, path, data):
 | 
				
			||||||
        """Set metadata for the named stream, replacing any existing
 | 
					        """Set metadata for the named stream, replacing any existing
 | 
				
			||||||
        metadata.  Data can be json-encoded or a plain dictionary."""
 | 
					        metadata.  Data can be json-encoded or a plain dictionary."""
 | 
				
			||||||
@@ -287,33 +214,46 @@ class Stream(NilmApp):
 | 
				
			|||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.json_out()
 | 
				
			||||||
    @exception_to_httperror(NilmDBError, ValueError)
 | 
					    @exception_to_httperror(NilmDBError, ValueError)
 | 
				
			||||||
    @cherrypy.tools.CORS_allow(methods = ["PUT"])
 | 
					    @cherrypy.tools.CORS_allow(methods=["PUT"])
 | 
				
			||||||
    def insert(self, path, start, end):
 | 
					    def insert(self, path, start, end, binary=False):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Insert new data into the database.  Provide textual data
 | 
					        Insert new data into the database.  Provide textual data
 | 
				
			||||||
        (matching the path's layout) as a HTTP PUT.
 | 
					        (matching the path's layout) as a HTTP PUT.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        If 'binary' is True, expect raw binary data, rather than lines
 | 
				
			||||||
 | 
					        of ASCII-formatted data.  Raw binary data is always
 | 
				
			||||||
 | 
					        little-endian and matches the database types (including an
 | 
				
			||||||
 | 
					        int64 timestamp).
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
 | 
					        binary = bool_param(binary)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Important that we always read the input before throwing any
 | 
					        # Important that we always read the input before throwing any
 | 
				
			||||||
        # errors, to keep lengths happy for persistent connections.
 | 
					        # errors, to keep lengths happy for persistent connections.
 | 
				
			||||||
        # Note that CherryPy 3.2.2 has a bug where this fails for GET
 | 
					        # Note that CherryPy 3.2.2 has a bug where this fails for GET
 | 
				
			||||||
        # requests, if we ever want to handle those (issue #1134)
 | 
					        # requests, if we ever want to handle those (issue #1134)
 | 
				
			||||||
        body = cherrypy.request.body.read()
 | 
					        body = cherrypy.request.body.read()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Verify content type for binary data
 | 
				
			||||||
 | 
					        content_type = cherrypy.request.headers.get('content-type')
 | 
				
			||||||
 | 
					        if binary and content_type:
 | 
				
			||||||
 | 
					            if content_type != "application/octet-stream":
 | 
				
			||||||
 | 
					                raise cherrypy.HTTPError("400", "Content type must be "
 | 
				
			||||||
 | 
					                                         "application/octet-stream for "
 | 
				
			||||||
 | 
					                                         "binary data, not " + content_type)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Note that non-binary data is *not* decoded from bytes to string,
 | 
				
			||||||
 | 
					        # but rather passed directly to stream_insert.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Check path and get layout
 | 
					        # Check path and get layout
 | 
				
			||||||
        streams = self.db.stream_list(path = path)
 | 
					        if len(self.db.stream_list(path=path)) != 1:
 | 
				
			||||||
        if len(streams) != 1:
 | 
					            raise cherrypy.HTTPError("404", "No such stream: " + path)
 | 
				
			||||||
            raise cherrypy.HTTPError("404 Not Found", "No such stream")
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Check limits
 | 
					        # Check limits
 | 
				
			||||||
        start = string_to_timestamp(start)
 | 
					        (start, end) = self._get_times(start, end)
 | 
				
			||||||
        end = string_to_timestamp(end)
 | 
					 | 
				
			||||||
        if start >= end:
 | 
					 | 
				
			||||||
            raise cherrypy.HTTPError("400 Bad Request",
 | 
					 | 
				
			||||||
                                     "start must precede end")
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Pass the data directly to nilmdb, which will parse it and
 | 
					        # Pass the data directly to nilmdb, which will parse it and
 | 
				
			||||||
        # raise a ValueError if there are any problems.
 | 
					        # raise a ValueError if there are any problems.
 | 
				
			||||||
        self.db.stream_insert(path, start, end, body)
 | 
					        self.db.stream_insert(path, start, end, body, binary)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Done
 | 
					        # Done
 | 
				
			||||||
        return
 | 
					        return
 | 
				
			||||||
@@ -322,24 +262,34 @@ class Stream(NilmApp):
 | 
				
			|||||||
    # /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0
 | 
					    # /stream/remove?path=/newton/prep&start=1234567890.0&end=1234567899.0
 | 
				
			||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
    @cherrypy.tools.json_in()
 | 
					    @cherrypy.tools.json_in()
 | 
				
			||||||
    @cherrypy.tools.json_out()
 | 
					    @cherrypy.tools.CORS_allow(methods=["POST"])
 | 
				
			||||||
    @exception_to_httperror(NilmDBError)
 | 
					    @chunked_response
 | 
				
			||||||
    @cherrypy.tools.CORS_allow(methods = ["POST"])
 | 
					    @response_type("application/x-json-stream")
 | 
				
			||||||
    def remove(self, path, start = None, end = None):
 | 
					    def remove(self, path, start=None, end=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Remove data from the backend database.  Removes all data in
 | 
					        Remove data from the backend database.  Removes all data in
 | 
				
			||||||
        the interval [start, end).  Returns the number of data points
 | 
					        the interval [start, end).
 | 
				
			||||||
        removed.
 | 
					
 | 
				
			||||||
 | 
					        Returns the number of data points removed.  Since this is a potentially
 | 
				
			||||||
 | 
					        long-running operation, multiple numbers may be returned as the
 | 
				
			||||||
 | 
					        data gets removed from the backend database.  The total number of
 | 
				
			||||||
 | 
					        points removed is the sum of all of these numbers.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        if start is not None:
 | 
					        (start, end) = self._get_times(start, end)
 | 
				
			||||||
            start = string_to_timestamp(start)
 | 
					
 | 
				
			||||||
        if end is not None:
 | 
					        if len(self.db.stream_list(path=path)) != 1:
 | 
				
			||||||
            end = string_to_timestamp(end)
 | 
					            raise cherrypy.HTTPError("404", "No such stream: " + path)
 | 
				
			||||||
        if start is not None and end is not None:
 | 
					
 | 
				
			||||||
            if start >= end:
 | 
					        def content(start, end):
 | 
				
			||||||
                raise cherrypy.HTTPError("400 Bad Request",
 | 
					            # Note: disable chunked responses to see tracebacks from here.
 | 
				
			||||||
                                         "start must precede end")
 | 
					            while True:
 | 
				
			||||||
        return self.db.stream_remove(path, start, end)
 | 
					                (removed, restart) = self.db.stream_remove(path, start, end)
 | 
				
			||||||
 | 
					                response = json.dumps(removed) + "\r\n"
 | 
				
			||||||
 | 
					                yield response.encode('utf-8')
 | 
				
			||||||
 | 
					                if restart is None:
 | 
				
			||||||
 | 
					                    break
 | 
				
			||||||
 | 
					                start = restart
 | 
				
			||||||
 | 
					        return content(start, end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # /stream/intervals?path=/newton/prep
 | 
					    # /stream/intervals?path=/newton/prep
 | 
				
			||||||
    # /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0
 | 
					    # /stream/intervals?path=/newton/prep&start=1234567890.0&end=1234567899.0
 | 
				
			||||||
@@ -347,7 +297,7 @@ class Stream(NilmApp):
 | 
				
			|||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
    @chunked_response
 | 
					    @chunked_response
 | 
				
			||||||
    @response_type("application/x-json-stream")
 | 
					    @response_type("application/x-json-stream")
 | 
				
			||||||
    def intervals(self, path, start = None, end = None, diffpath = None):
 | 
					    def intervals(self, path, start=None, end=None, diffpath=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Get intervals from backend database.  Streams the resulting
 | 
					        Get intervals from backend database.  Streams the resulting
 | 
				
			||||||
        intervals as JSON strings separated by CR LF pairs.  This may
 | 
					        intervals as JSON strings separated by CR LF pairs.  This may
 | 
				
			||||||
@@ -362,31 +312,22 @@ class Stream(NilmApp):
 | 
				
			|||||||
        Note that the response type is the non-standard
 | 
					        Note that the response type is the non-standard
 | 
				
			||||||
        'application/x-json-stream' for lack of a better option.
 | 
					        'application/x-json-stream' for lack of a better option.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        if start is not None:
 | 
					        (start, end) = self._get_times(start, end)
 | 
				
			||||||
            start = string_to_timestamp(start)
 | 
					 | 
				
			||||||
        if end is not None:
 | 
					 | 
				
			||||||
            end = string_to_timestamp(end)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if start is not None and end is not None:
 | 
					        if len(self.db.stream_list(path=path)) != 1:
 | 
				
			||||||
            if start >= end:
 | 
					 | 
				
			||||||
                raise cherrypy.HTTPError("400 Bad Request",
 | 
					 | 
				
			||||||
                                         "start must precede end")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if len(self.db.stream_list(path = path)) != 1:
 | 
					 | 
				
			||||||
            raise cherrypy.HTTPError("404", "No such stream: " + path)
 | 
					            raise cherrypy.HTTPError("404", "No such stream: " + path)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if diffpath and len(self.db.stream_list(path = diffpath)) != 1:
 | 
					        if diffpath and len(self.db.stream_list(path=diffpath)) != 1:
 | 
				
			||||||
            raise cherrypy.HTTPError("404", "No such stream: " + diffpath)
 | 
					            raise cherrypy.HTTPError("404", "No such stream: " + diffpath)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        @workaround_cp_bug_1200
 | 
					 | 
				
			||||||
        def content(start, end):
 | 
					        def content(start, end):
 | 
				
			||||||
            # Note: disable chunked responses to see tracebacks from here.
 | 
					            # Note: disable chunked responses to see tracebacks from here.
 | 
				
			||||||
            while True:
 | 
					            while True:
 | 
				
			||||||
                (ints, restart) = self.db.stream_intervals(path, start, end,
 | 
					                (ints, restart) = self.db.stream_intervals(path, start, end,
 | 
				
			||||||
                                                           diffpath)
 | 
					                                                           diffpath)
 | 
				
			||||||
                response = ''.join([ json.dumps(i) + "\r\n" for i in ints ])
 | 
					                response = ''.join([json.dumps(i) + "\r\n" for i in ints])
 | 
				
			||||||
                yield response
 | 
					                yield response.encode('utf-8')
 | 
				
			||||||
                if restart == 0:
 | 
					                if restart is None:
 | 
				
			||||||
                    break
 | 
					                    break
 | 
				
			||||||
                start = restart
 | 
					                start = restart
 | 
				
			||||||
        return content(start, end)
 | 
					        return content(start, end)
 | 
				
			||||||
@@ -394,71 +335,87 @@ class Stream(NilmApp):
 | 
				
			|||||||
    # /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0
 | 
					    # /stream/extract?path=/newton/prep&start=1234567890.0&end=1234567899.0
 | 
				
			||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
    @chunked_response
 | 
					    @chunked_response
 | 
				
			||||||
    @response_type("text/plain")
 | 
					    def extract(self, path, start=None, end=None,
 | 
				
			||||||
    def extract(self, path, start = None, end = None, count = False):
 | 
					                count=False, markup=False, binary=False):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        Extract data from backend database.  Streams the resulting
 | 
					        Extract data from backend database.  Streams the resulting
 | 
				
			||||||
        entries as ASCII text lines separated by newlines.  This may
 | 
					        entries as ASCII text lines separated by newlines.  This may
 | 
				
			||||||
        make multiple requests to the nilmdb backend to avoid causing
 | 
					        make multiple requests to the nilmdb backend to avoid causing
 | 
				
			||||||
        it to block for too long.
 | 
					        it to block for too long.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Add count=True to return a count rather than actual data.
 | 
					        If 'count' is True, returns a count rather than actual data.
 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        if start is not None:
 | 
					 | 
				
			||||||
            start = string_to_timestamp(start)
 | 
					 | 
				
			||||||
        if end is not None:
 | 
					 | 
				
			||||||
            end = string_to_timestamp(end)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Check parameters
 | 
					        If 'markup' is True, adds comments to the stream denoting each
 | 
				
			||||||
        if start is not None and end is not None:
 | 
					        interval's start and end timestamp.
 | 
				
			||||||
            if start >= end:
 | 
					
 | 
				
			||||||
                raise cherrypy.HTTPError("400 Bad Request",
 | 
					        If 'binary' is True, return raw binary data, rather than lines
 | 
				
			||||||
                                         "start must precede end")
 | 
					        of ASCII-formatted data.  Raw binary data is always
 | 
				
			||||||
 | 
					        little-endian and matches the database types (including an
 | 
				
			||||||
 | 
					        int64 timestamp).
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        binary = bool_param(binary)
 | 
				
			||||||
 | 
					        markup = bool_param(markup)
 | 
				
			||||||
 | 
					        count = bool_param(count)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        (start, end) = self._get_times(start, end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Check path and get layout
 | 
					        # Check path and get layout
 | 
				
			||||||
        streams = self.db.stream_list(path = path)
 | 
					        if len(self.db.stream_list(path=path)) != 1:
 | 
				
			||||||
        if len(streams) != 1:
 | 
					            raise cherrypy.HTTPError("404", "No such stream: " + path)
 | 
				
			||||||
            raise cherrypy.HTTPError("404 Not Found", "No such stream")
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        @workaround_cp_bug_1200
 | 
					        if binary:
 | 
				
			||||||
        def content(start, end, count):
 | 
					            content_type = "application/octet-stream"
 | 
				
			||||||
 | 
					            if markup or count:
 | 
				
			||||||
 | 
					                raise cherrypy.HTTPError("400", "can't mix binary and "
 | 
				
			||||||
 | 
					                                         "markup or count modes")
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            content_type = "text/plain"
 | 
				
			||||||
 | 
					        cherrypy.response.headers['Content-Type'] = content_type
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def content(start, end):
 | 
				
			||||||
            # Note: disable chunked responses to see tracebacks from here.
 | 
					            # Note: disable chunked responses to see tracebacks from here.
 | 
				
			||||||
            if count:
 | 
					            if count:
 | 
				
			||||||
                matched = self.db.stream_extract(path, start, end, count)
 | 
					                matched = self.db.stream_extract(path, start, end,
 | 
				
			||||||
                yield sprintf("%d\n", matched)
 | 
					                                                 count=True)
 | 
				
			||||||
 | 
					                yield sprintf(b"%d\n", matched)
 | 
				
			||||||
                return
 | 
					                return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            while True:
 | 
					            while True:
 | 
				
			||||||
                (data, restart) = self.db.stream_extract(path, start, end)
 | 
					                (data, restart) = self.db.stream_extract(
 | 
				
			||||||
 | 
					                    path, start, end, count=False,
 | 
				
			||||||
 | 
					                    markup=markup, binary=binary)
 | 
				
			||||||
                yield data
 | 
					                yield data
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                if restart == 0:
 | 
					                if restart is None:
 | 
				
			||||||
                    return
 | 
					                    return
 | 
				
			||||||
                start = restart
 | 
					                start = restart
 | 
				
			||||||
        return content(start, end, count)
 | 
					        return content(start, end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Exiter(object):
 | 
					
 | 
				
			||||||
 | 
					class Exiter():
 | 
				
			||||||
    """App that exits the server, for testing"""
 | 
					    """App that exits the server, for testing"""
 | 
				
			||||||
    @cherrypy.expose
 | 
					    @cherrypy.expose
 | 
				
			||||||
    def index(self):
 | 
					    def index(self):
 | 
				
			||||||
        cherrypy.response.headers['Content-Type'] = 'text/plain'
 | 
					        cherrypy.response.headers['Content-Type'] = 'text/plain'
 | 
				
			||||||
        def content():
 | 
					 | 
				
			||||||
            yield 'Exiting by request'
 | 
					 | 
				
			||||||
            raise SystemExit
 | 
					 | 
				
			||||||
        return content()
 | 
					 | 
				
			||||||
    index._cp_config = { 'response.stream': True }
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Server(object):
 | 
					        def content():
 | 
				
			||||||
    def __init__(self, db, host = '127.0.0.1', port = 8080,
 | 
					            yield b'Exiting by request'
 | 
				
			||||||
                 stoppable = False,       # whether /exit URL exists
 | 
					            raise SystemExit
 | 
				
			||||||
                 embedded = True,         # hide diagnostics and output, etc
 | 
					
 | 
				
			||||||
                 fast_shutdown = False,   # don't wait for clients to disconn.
 | 
					        return content()
 | 
				
			||||||
                 force_traceback = False  # include traceback in all errors
 | 
					    index._cp_config = {'response.stream': True}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class Server():
 | 
				
			||||||
 | 
					    def __init__(self, db, host='127.0.0.1', port=8080,
 | 
				
			||||||
 | 
					                 stoppable=False,        # whether /exit URL exists
 | 
				
			||||||
 | 
					                 fast_shutdown=False,    # don't wait for clients to disconn.
 | 
				
			||||||
 | 
					                 force_traceback=False,  # include traceback in all errors
 | 
				
			||||||
 | 
					                 basepath='',            # base URL path for cherrypy.tree
 | 
				
			||||||
                 ):
 | 
					                 ):
 | 
				
			||||||
        # Save server version, just for verification during tests
 | 
					        # Save server version, just for verification during tests
 | 
				
			||||||
        self.version = nilmdb.__version__
 | 
					        self.version = nilmdb.__version__
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.embedded = embedded
 | 
					 | 
				
			||||||
        self.db = db
 | 
					        self.db = db
 | 
				
			||||||
        if not getattr(db, "_thread_safe", None):
 | 
					        if not getattr(db, "_thread_safe", None):
 | 
				
			||||||
            raise KeyError("Database object " + str(db) + " doesn't claim "
 | 
					            raise KeyError("Database object " + str(db) + " doesn't claim "
 | 
				
			||||||
@@ -468,13 +425,12 @@ class Server(object):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        # Build up global server configuration
 | 
					        # Build up global server configuration
 | 
				
			||||||
        cherrypy.config.update({
 | 
					        cherrypy.config.update({
 | 
				
			||||||
 | 
					            'environment': 'embedded',
 | 
				
			||||||
            'server.socket_host': host,
 | 
					            'server.socket_host': host,
 | 
				
			||||||
            'server.socket_port': port,
 | 
					            'server.socket_port': port,
 | 
				
			||||||
            'engine.autoreload_on': False,
 | 
					            'engine.autoreload.on': False,
 | 
				
			||||||
            'server.max_request_body_size': 8*1024*1024,
 | 
					            'server.max_request_body_size': 8*1024*1024,
 | 
				
			||||||
            })
 | 
					            })
 | 
				
			||||||
        if self.embedded:
 | 
					 | 
				
			||||||
            cherrypy.config.update({ 'environment': 'embedded' })
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Build up application specific configuration
 | 
					        # Build up application specific configuration
 | 
				
			||||||
        app_config = {}
 | 
					        app_config = {}
 | 
				
			||||||
@@ -483,23 +439,23 @@ class Server(object):
 | 
				
			|||||||
            })
 | 
					            })
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Some default headers to just help identify that things are working
 | 
					        # Some default headers to just help identify that things are working
 | 
				
			||||||
        app_config.update({ 'response.headers.X-Jim-Is-Awesome': 'yeah' })
 | 
					        app_config.update({'response.headers.X-Jim-Is-Awesome': 'yeah'})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Set up Cross-Origin Resource Sharing (CORS) handler so we
 | 
					        # Set up Cross-Origin Resource Sharing (CORS) handler so we
 | 
				
			||||||
        # can correctly respond to browsers' CORS preflight requests.
 | 
					        # can correctly respond to browsers' CORS preflight requests.
 | 
				
			||||||
        # This also limits verbs to GET and HEAD by default.
 | 
					        # This also limits verbs to GET and HEAD by default.
 | 
				
			||||||
        app_config.update({ 'tools.CORS_allow.on': True,
 | 
					        app_config.update({'tools.CORS_allow.on': True,
 | 
				
			||||||
                            'tools.CORS_allow.methods': ['GET', 'HEAD'] })
 | 
					                           'tools.CORS_allow.methods': ['GET', 'HEAD']})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Configure the 'json_in' tool to also allow other content-types
 | 
					        # Configure the 'json_in' tool to also allow other content-types
 | 
				
			||||||
        # (like x-www-form-urlencoded), and to treat JSON as a dict that
 | 
					        # (like x-www-form-urlencoded), and to treat JSON as a dict that
 | 
				
			||||||
        # fills requests.param.
 | 
					        # fills requests.param.
 | 
				
			||||||
        app_config.update({ 'tools.json_in.force': False,
 | 
					        app_config.update({'tools.json_in.force': False,
 | 
				
			||||||
                            'tools.json_in.processor': json_to_request_params })
 | 
					                           'tools.json_in.processor': json_to_request_params})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Send tracebacks in error responses.  They're hidden by the
 | 
					        # Send tracebacks in error responses.  They're hidden by the
 | 
				
			||||||
        # error_page function for client errors (code 400-499).
 | 
					        # error_page function for client errors (code 400-499).
 | 
				
			||||||
        app_config.update({ 'request.show_tracebacks' : True })
 | 
					        app_config.update({'request.show_tracebacks': True})
 | 
				
			||||||
        self.force_traceback = force_traceback
 | 
					        self.force_traceback = force_traceback
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Patch CherryPy error handler to never pad out error messages.
 | 
					        # Patch CherryPy error handler to never pad out error messages.
 | 
				
			||||||
@@ -513,79 +469,78 @@ class Server(object):
 | 
				
			|||||||
        if stoppable:
 | 
					        if stoppable:
 | 
				
			||||||
            root.exit = Exiter()
 | 
					            root.exit = Exiter()
 | 
				
			||||||
        cherrypy.tree.apps = {}
 | 
					        cherrypy.tree.apps = {}
 | 
				
			||||||
        cherrypy.tree.mount(root, "/", config = { "/" : app_config })
 | 
					        cherrypy.tree.mount(root, basepath, config={"/": app_config})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Shutdowns normally wait for clients to disconnect.  To speed
 | 
					        # Shutdowns normally wait for clients to disconnect.  To speed
 | 
				
			||||||
        # up tests, set fast_shutdown = True
 | 
					        # up tests, set fast_shutdown = True
 | 
				
			||||||
        if fast_shutdown:
 | 
					        if fast_shutdown:
 | 
				
			||||||
            # Setting timeout to 0 triggers os._exit(70) at shutdown, grr...
 | 
					            cherrypy.server.shutdown_timeout = 0
 | 
				
			||||||
            cherrypy.server.shutdown_timeout = 0.01
 | 
					 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            cherrypy.server.shutdown_timeout = 5
 | 
					            cherrypy.server.shutdown_timeout = 5
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Set up the WSGI application pointer for external programs
 | 
				
			||||||
 | 
					        self.wsgi_application = cherrypy.tree
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def json_error_page(self, status, message, traceback, version):
 | 
					    def json_error_page(self, status, message, traceback, version):
 | 
				
			||||||
        """Return a custom error page in JSON so the client can parse it"""
 | 
					        """Return a custom error page in JSON so the client can parse it"""
 | 
				
			||||||
        errordata = { "status" : status,
 | 
					        return json_error_page(status, message, traceback, version,
 | 
				
			||||||
                      "message" : message,
 | 
					                               self.force_traceback)
 | 
				
			||||||
                      "traceback" : traceback }
 | 
					 | 
				
			||||||
        # Don't send a traceback if the error was 400-499 (client's fault)
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            code = int(status.split()[0])
 | 
					 | 
				
			||||||
            if not self.force_traceback:
 | 
					 | 
				
			||||||
                if code >= 400 and code <= 499:
 | 
					 | 
				
			||||||
                    errordata["traceback"] = ""
 | 
					 | 
				
			||||||
        except Exception: # pragma: no cover
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
        # Override the response type, which was previously set to text/html
 | 
					 | 
				
			||||||
        cherrypy.serving.response.headers['Content-Type'] = (
 | 
					 | 
				
			||||||
            "application/json;charset=utf-8" )
 | 
					 | 
				
			||||||
        # Undo the HTML escaping that cherrypy's get_error_page function applies
 | 
					 | 
				
			||||||
        # (cherrypy issue 1135)
 | 
					 | 
				
			||||||
        for k, v in errordata.iteritems():
 | 
					 | 
				
			||||||
            v = v.replace("<","<")
 | 
					 | 
				
			||||||
            v = v.replace(">",">")
 | 
					 | 
				
			||||||
            v = v.replace("&","&")
 | 
					 | 
				
			||||||
            errordata[k] = v
 | 
					 | 
				
			||||||
        return json.dumps(errordata, separators=(',',':'))
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def start(self, blocking = False, event = None):
 | 
					    def start(self, blocking=False, event=None):
 | 
				
			||||||
 | 
					        cherrypy_start(blocking, event)
 | 
				
			||||||
        if not self.embedded: # pragma: no cover
 | 
					 | 
				
			||||||
            # Handle signals nicely
 | 
					 | 
				
			||||||
            if hasattr(cherrypy.engine, "signal_handler"):
 | 
					 | 
				
			||||||
                cherrypy.engine.signal_handler.subscribe()
 | 
					 | 
				
			||||||
            if hasattr(cherrypy.engine, "console_control_handler"):
 | 
					 | 
				
			||||||
                cherrypy.engine.console_control_handler.subscribe()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # Cherrypy stupidly calls os._exit(70) when it can't bind the
 | 
					 | 
				
			||||||
        # port.  At least try to print a reasonable error and continue
 | 
					 | 
				
			||||||
        # in this case, rather than just dying silently (as we would
 | 
					 | 
				
			||||||
        # otherwise do in embedded mode)
 | 
					 | 
				
			||||||
        real_exit = os._exit
 | 
					 | 
				
			||||||
        def fake_exit(code): # pragma: no cover
 | 
					 | 
				
			||||||
            if code == os.EX_SOFTWARE:
 | 
					 | 
				
			||||||
                fprintf(sys.stderr, "error: CherryPy called os._exit!\n")
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                real_exit(code)
 | 
					 | 
				
			||||||
        os._exit = fake_exit
 | 
					 | 
				
			||||||
        cherrypy.engine.start()
 | 
					 | 
				
			||||||
        os._exit = real_exit
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # Signal that the engine has started successfully
 | 
					 | 
				
			||||||
        if event is not None:
 | 
					 | 
				
			||||||
            event.set()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if blocking:
 | 
					 | 
				
			||||||
            try:
 | 
					 | 
				
			||||||
                cherrypy.engine.wait(cherrypy.engine.states.EXITING,
 | 
					 | 
				
			||||||
                                     interval = 0.1, channel = 'main')
 | 
					 | 
				
			||||||
            except (KeyboardInterrupt, IOError): # pragma: no cover
 | 
					 | 
				
			||||||
                cherrypy.engine.log('Keyboard Interrupt: shutting down bus')
 | 
					 | 
				
			||||||
                cherrypy.engine.exit()
 | 
					 | 
				
			||||||
            except SystemExit: # pragma: no cover
 | 
					 | 
				
			||||||
                cherrypy.engine.log('SystemExit raised: shutting down bus')
 | 
					 | 
				
			||||||
                cherrypy.engine.exit()
 | 
					 | 
				
			||||||
                raise
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stop(self):
 | 
					    def stop(self):
 | 
				
			||||||
        cherrypy.engine.exit()
 | 
					        cherrypy_stop()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
 | 
				
			||||||
 | 
					# instance since the database can only be opened once.  For this to
 | 
				
			||||||
 | 
					# work, the web server must use only a single process and single
 | 
				
			||||||
 | 
					# Python interpreter.  Multiple threads are OK.
 | 
				
			||||||
 | 
					_wsgi_server = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def wsgi_application(dbpath, basepath):
 | 
				
			||||||
 | 
					    """Return a WSGI application object with a database at the
 | 
				
			||||||
 | 
					    specified path.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    'dbpath' is a filesystem location, e.g. /home/nilm/db
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    'basepath' is the URL path of the application base, which
 | 
				
			||||||
 | 
					    is the same as the first argument to Apache's WSGIScriptAlias
 | 
				
			||||||
 | 
					    directive.
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    def application(environ, start_response):
 | 
				
			||||||
 | 
					        global _wsgi_server
 | 
				
			||||||
 | 
					        if _wsgi_server is None:
 | 
				
			||||||
 | 
					            # Try to start the server
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                db = nilmdb.utils.serializer_proxy(
 | 
				
			||||||
 | 
					                    nilmdb.server.NilmDB)(dbpath)
 | 
				
			||||||
 | 
					                _wsgi_server = nilmdb.server.Server(
 | 
				
			||||||
 | 
					                    db, basepath=basepath.rstrip('/'))
 | 
				
			||||||
 | 
					            except Exception:
 | 
				
			||||||
 | 
					                # Build an error message on failure
 | 
				
			||||||
 | 
					                import pprint
 | 
				
			||||||
 | 
					                err = sprintf("Initializing database at path '%s' failed:\n\n",
 | 
				
			||||||
 | 
					                              dbpath)
 | 
				
			||||||
 | 
					                err += traceback.format_exc()
 | 
				
			||||||
 | 
					                import pwd
 | 
				
			||||||
 | 
					                import grp
 | 
				
			||||||
 | 
					                err += sprintf("\nRunning as: uid=%d (%s), gid=%d (%s) "
 | 
				
			||||||
 | 
					                               "on host %s, pid %d\n",
 | 
				
			||||||
 | 
					                               os.getuid(), pwd.getpwuid(os.getuid())[0],
 | 
				
			||||||
 | 
					                               os.getgid(), grp.getgrgid(os.getgid())[0],
 | 
				
			||||||
 | 
					                               socket.gethostname(), os.getpid())
 | 
				
			||||||
 | 
					                err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
 | 
				
			||||||
 | 
					        if _wsgi_server is None:
 | 
				
			||||||
 | 
					            # Serve up the error with our own mini WSGI app.
 | 
				
			||||||
 | 
					            err_b = err.encode('utf-8')
 | 
				
			||||||
 | 
					            headers = [('Content-type', 'text/plain; charset=utf-8'),
 | 
				
			||||||
 | 
					                       ('Content-length', str(len(err_b)))]
 | 
				
			||||||
 | 
					            start_response("500 Internal Server Error", headers)
 | 
				
			||||||
 | 
					            return [err_b]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Call the normal application
 | 
				
			||||||
 | 
					        return _wsgi_server.wsgi_application(environ, start_response)
 | 
				
			||||||
 | 
					    return application
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										225
									
								
								nilmdb/server/serverutil.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										225
									
								
								nilmdb/server/serverutil.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,225 @@
 | 
				
			|||||||
 | 
					"""Miscellaneous decorators and other helpers for running a CherryPy
 | 
				
			||||||
 | 
					server"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import os
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					import json
 | 
				
			||||||
 | 
					import decorator
 | 
				
			||||||
 | 
					import functools
 | 
				
			||||||
 | 
					import threading
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import cherrypy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Helper to parse parameters into booleans
 | 
				
			||||||
 | 
					def bool_param(s):
 | 
				
			||||||
 | 
					    """Return a bool indicating whether parameter 's' was True or False,
 | 
				
			||||||
 | 
					    supporting a few different types for 's'."""
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        ss = s.lower()
 | 
				
			||||||
 | 
					        if ss in ["0", "false", "f", "no", "n"]:
 | 
				
			||||||
 | 
					            return False
 | 
				
			||||||
 | 
					        if ss in ["1", "true", "t", "yes", "y"]:
 | 
				
			||||||
 | 
					            return True
 | 
				
			||||||
 | 
					    except Exception:
 | 
				
			||||||
 | 
					        return bool(s)
 | 
				
			||||||
 | 
					    raise cherrypy.HTTPError("400 Bad Request",
 | 
				
			||||||
 | 
					                             "can't parse parameter: " + ss)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Decorators
 | 
				
			||||||
 | 
					def chunked_response(func):
 | 
				
			||||||
 | 
					    """Decorator to enable chunked responses."""
 | 
				
			||||||
 | 
					    # Set this to False to get better tracebacks from some requests
 | 
				
			||||||
 | 
					    # (/stream/extract, /stream/intervals).
 | 
				
			||||||
 | 
					    func._cp_config = {'response.stream': True}
 | 
				
			||||||
 | 
					    return func
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def response_type(content_type):
 | 
				
			||||||
 | 
					    """Return a decorator-generating function that sets the
 | 
				
			||||||
 | 
					    response type to the specified string."""
 | 
				
			||||||
 | 
					    def wrapper(func, *args, **kwargs):
 | 
				
			||||||
 | 
					        cherrypy.response.headers['Content-Type'] = content_type
 | 
				
			||||||
 | 
					        return func(*args, **kwargs)
 | 
				
			||||||
 | 
					    return decorator.decorator(wrapper)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def exception_to_httperror(*expected):
 | 
				
			||||||
 | 
					    """Return a decorator-generating function that catches expected
 | 
				
			||||||
 | 
					    errors and throws a HTTPError describing it instead.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        @exception_to_httperror(NilmDBError, ValueError)
 | 
				
			||||||
 | 
					        def foo():
 | 
				
			||||||
 | 
					            pass
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    def wrapper(func, *args, **kwargs):
 | 
				
			||||||
 | 
					        exc_info = None
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            return func(*args, **kwargs)
 | 
				
			||||||
 | 
					        except expected:
 | 
				
			||||||
 | 
					            # Re-raise it, but maintain the original traceback
 | 
				
			||||||
 | 
					            exc_info = sys.exc_info()
 | 
				
			||||||
 | 
					            new_exc = cherrypy.HTTPError("400 Bad Request", str(exc_info[1]))
 | 
				
			||||||
 | 
					            raise new_exc.with_traceback(exc_info[2])
 | 
				
			||||||
 | 
					        finally:
 | 
				
			||||||
 | 
					            del exc_info
 | 
				
			||||||
 | 
					    # We need to preserve the function's argspecs for CherryPy to
 | 
				
			||||||
 | 
					    # handle argument errors correctly.  Decorator.decorator takes
 | 
				
			||||||
 | 
					    # care of that.
 | 
				
			||||||
 | 
					    return decorator.decorator(wrapper)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Custom CherryPy tools
 | 
				
			||||||
 | 
					def CORS_allow(methods):
 | 
				
			||||||
 | 
					    """This does several things:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Handles CORS preflight requests.
 | 
				
			||||||
 | 
					    Adds Allow: header to all requests.
 | 
				
			||||||
 | 
					    Raise 405 if request.method not in method.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    It is similar to cherrypy.tools.allow, with the CORS stuff added.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Add this to CherryPy with:
 | 
				
			||||||
 | 
					    cherrypy.tools.CORS_allow = cherrypy.Tool('on_start_resource', CORS_allow)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    request = cherrypy.request.headers
 | 
				
			||||||
 | 
					    response = cherrypy.response.headers
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not isinstance(methods, (tuple, list)):
 | 
				
			||||||
 | 
					        methods = [methods]
 | 
				
			||||||
 | 
					    methods = [m.upper() for m in methods if m]
 | 
				
			||||||
 | 
					    if not methods:
 | 
				
			||||||
 | 
					        methods = ['GET', 'HEAD']
 | 
				
			||||||
 | 
					    elif 'GET' in methods and 'HEAD' not in methods:
 | 
				
			||||||
 | 
					        methods.append('HEAD')
 | 
				
			||||||
 | 
					    response['Allow'] = ', '.join(methods)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Allow all origins
 | 
				
			||||||
 | 
					    if 'Origin' in request:
 | 
				
			||||||
 | 
					        response['Access-Control-Allow-Origin'] = request['Origin']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # If it's a CORS request, send response.
 | 
				
			||||||
 | 
					    request_method = request.get("Access-Control-Request-Method", None)
 | 
				
			||||||
 | 
					    request_headers = request.get("Access-Control-Request-Headers", None)
 | 
				
			||||||
 | 
					    if (cherrypy.request.method == "OPTIONS" and
 | 
				
			||||||
 | 
					            request_method and request_headers):
 | 
				
			||||||
 | 
					        response['Access-Control-Allow-Headers'] = request_headers
 | 
				
			||||||
 | 
					        response['Access-Control-Allow-Methods'] = ', '.join(methods)
 | 
				
			||||||
 | 
					        # Try to stop further processing and return a 200 OK
 | 
				
			||||||
 | 
					        cherrypy.response.status = "200 OK"
 | 
				
			||||||
 | 
					        cherrypy.response.body = b""
 | 
				
			||||||
 | 
					        cherrypy.request.handler = lambda: ""
 | 
				
			||||||
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Reject methods that were not explicitly allowed
 | 
				
			||||||
 | 
					    if cherrypy.request.method not in methods:
 | 
				
			||||||
 | 
					        raise cherrypy.HTTPError(405)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Helper for json_in tool to process JSON data into normal request
 | 
				
			||||||
 | 
					# parameters.
 | 
				
			||||||
 | 
					def json_to_request_params(body):
 | 
				
			||||||
 | 
					    cherrypy.lib.jsontools.json_processor(body)
 | 
				
			||||||
 | 
					    if not isinstance(cherrypy.request.json, dict):
 | 
				
			||||||
 | 
					        raise cherrypy.HTTPError(415)
 | 
				
			||||||
 | 
					    cherrypy.request.params.update(cherrypy.request.json)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Used as an "error_page.default" handler
 | 
				
			||||||
 | 
					def json_error_page(status, message, traceback, version,
 | 
				
			||||||
 | 
					                    force_traceback=False):
 | 
				
			||||||
 | 
					    """Return a custom error page in JSON so the client can parse it"""
 | 
				
			||||||
 | 
					    errordata = {"status": status,
 | 
				
			||||||
 | 
					                 "message": message,
 | 
				
			||||||
 | 
					                 "version": version,
 | 
				
			||||||
 | 
					                 "traceback": traceback}
 | 
				
			||||||
 | 
					    # Don't send a traceback if the error was 400-499 (client's fault)
 | 
				
			||||||
 | 
					    code = int(status.split()[0])
 | 
				
			||||||
 | 
					    if not force_traceback:
 | 
				
			||||||
 | 
					        if 400 <= code <= 499:
 | 
				
			||||||
 | 
					            errordata["traceback"] = ""
 | 
				
			||||||
 | 
					    # Override the response type, which was previously set to text/html
 | 
				
			||||||
 | 
					    cherrypy.serving.response.headers['Content-Type'] = (
 | 
				
			||||||
 | 
					        "application/json;charset=utf-8")
 | 
				
			||||||
 | 
					    # Undo the HTML escaping that cherrypy's get_error_page function applies
 | 
				
			||||||
 | 
					    # (cherrypy issue 1135)
 | 
				
			||||||
 | 
					    for k, v in errordata.items():
 | 
				
			||||||
 | 
					        v = v.replace("<", "<")
 | 
				
			||||||
 | 
					        v = v.replace(">", ">")
 | 
				
			||||||
 | 
					        v = v.replace("&", "&")
 | 
				
			||||||
 | 
					        errordata[k] = v
 | 
				
			||||||
 | 
					    return json.dumps(errordata, separators=(',', ':'))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class CherryPyExit(SystemExit):
 | 
				
			||||||
 | 
					    pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def cherrypy_patch_exit():
 | 
				
			||||||
 | 
					    # Cherrypy stupidly calls os._exit(70) when it can't bind the port
 | 
				
			||||||
 | 
					    # and exits.  Instead of that, raise a CherryPyExit (derived from
 | 
				
			||||||
 | 
					    # SystemExit).  This exception may not make it back up to the caller
 | 
				
			||||||
 | 
					    # due to internal thread use in the CherryPy engine, but there should
 | 
				
			||||||
 | 
					    # be at least some indication that it happened.
 | 
				
			||||||
 | 
					    bus = cherrypy.process.wspbus.bus
 | 
				
			||||||
 | 
					    if "_patched_exit" in bus.__dict__:
 | 
				
			||||||
 | 
					        return
 | 
				
			||||||
 | 
					    bus._patched_exit = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def patched_exit(orig):
 | 
				
			||||||
 | 
					        real_exit = os._exit
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def fake_exit(code):
 | 
				
			||||||
 | 
					            raise CherryPyExit(code)
 | 
				
			||||||
 | 
					        os._exit = fake_exit
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            orig()
 | 
				
			||||||
 | 
					        finally:
 | 
				
			||||||
 | 
					            os._exit = real_exit
 | 
				
			||||||
 | 
					    bus.exit = functools.partial(patched_exit, bus.exit)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # A behavior change in Python 3.8 means that some thread exceptions,
 | 
				
			||||||
 | 
					    # derived from SystemExit, now print tracebacks where they didn't
 | 
				
			||||||
 | 
					    # used to: https://bugs.python.org/issue1230540
 | 
				
			||||||
 | 
					    # Install a thread exception hook that ignores CherryPyExit;
 | 
				
			||||||
 | 
					    # to make this match the behavior where we didn't set
 | 
				
			||||||
 | 
					    # threading.excepthook, we also need to ignore SystemExit.
 | 
				
			||||||
 | 
					    def hook(args):
 | 
				
			||||||
 | 
					        if args.exc_type == CherryPyExit or args.exc_type == SystemExit:
 | 
				
			||||||
 | 
					            return
 | 
				
			||||||
 | 
					        sys.excepthook(args.exc_type, args.exc_value,
 | 
				
			||||||
 | 
					                       args.exc_traceback)  # pragma: no cover
 | 
				
			||||||
 | 
					    threading.excepthook = hook
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Start/stop CherryPy standalone server
 | 
				
			||||||
 | 
					def cherrypy_start(blocking=False, event=False):
 | 
				
			||||||
 | 
					    """Start the CherryPy server, handling errors and signals
 | 
				
			||||||
 | 
					    somewhat gracefully."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    cherrypy_patch_exit()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Start the server
 | 
				
			||||||
 | 
					    cherrypy.engine.start()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Signal that the engine has started successfully
 | 
				
			||||||
 | 
					    if event is not None:
 | 
				
			||||||
 | 
					        event.set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if blocking:
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            cherrypy.engine.wait(cherrypy.engine.states.EXITING,
 | 
				
			||||||
 | 
					                                 interval=0.1, channel='main')
 | 
				
			||||||
 | 
					        except (KeyboardInterrupt, IOError):
 | 
				
			||||||
 | 
					            cherrypy.engine.log('Keyboard Interrupt: shutting down')
 | 
				
			||||||
 | 
					            cherrypy.engine.exit()
 | 
				
			||||||
 | 
					        except SystemExit:
 | 
				
			||||||
 | 
					            cherrypy.engine.log('SystemExit raised: shutting down')
 | 
				
			||||||
 | 
					            cherrypy.engine.exit()
 | 
				
			||||||
 | 
					            raise
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Stop CherryPy server
 | 
				
			||||||
 | 
					def cherrypy_stop():
 | 
				
			||||||
 | 
					    cherrypy.engine.exit()
 | 
				
			||||||
@@ -1,7 +1,7 @@
 | 
				
			|||||||
"""NilmDB utilities"""
 | 
					"""NilmDB utilities"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from nilmdb.utils.timer import Timer
 | 
					from nilmdb.utils.timer import Timer
 | 
				
			||||||
from nilmdb.utils.iteratorizer import Iteratorizer
 | 
					 | 
				
			||||||
from nilmdb.utils.serializer import serializer_proxy
 | 
					from nilmdb.utils.serializer import serializer_proxy
 | 
				
			||||||
from nilmdb.utils.lrucache import lru_cache
 | 
					from nilmdb.utils.lrucache import lru_cache
 | 
				
			||||||
from nilmdb.utils.diskusage import du, human_size
 | 
					from nilmdb.utils.diskusage import du, human_size
 | 
				
			||||||
@@ -12,3 +12,5 @@ import nilmdb.utils.fallocate
 | 
				
			|||||||
import nilmdb.utils.time
 | 
					import nilmdb.utils.time
 | 
				
			||||||
import nilmdb.utils.iterator
 | 
					import nilmdb.utils.iterator
 | 
				
			||||||
import nilmdb.utils.interval
 | 
					import nilmdb.utils.interval
 | 
				
			||||||
 | 
					import nilmdb.utils.lock
 | 
				
			||||||
 | 
					import nilmdb.utils.sort
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,12 +2,12 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def replace_file(filename, content):
 | 
					def replace_file(filename, content):
 | 
				
			||||||
    """Attempt to atomically and durably replace the filename with the
 | 
					    """Attempt to atomically and durably replace the filename with the
 | 
				
			||||||
    given contents.  This is intended to be 'pretty good on most
 | 
					    given contents"""
 | 
				
			||||||
    OSes', but not necessarily bulletproof."""
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    newfilename = filename + ".new"
 | 
					    newfilename = filename + b".new"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Write to new file, flush it
 | 
					    # Write to new file, flush it
 | 
				
			||||||
    with open(newfilename, "wb") as f:
 | 
					    with open(newfilename, "wb") as f:
 | 
				
			||||||
@@ -16,11 +16,4 @@ def replace_file(filename, content):
 | 
				
			|||||||
        os.fsync(f.fileno())
 | 
					        os.fsync(f.fileno())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Move new file over old one
 | 
					    # Move new file over old one
 | 
				
			||||||
    try:
 | 
					    os.replace(newfilename, filename)
 | 
				
			||||||
        os.rename(newfilename, filename)
 | 
					 | 
				
			||||||
    except OSError: # pragma: no cover
 | 
					 | 
				
			||||||
        # Some OSes might not support renaming over an existing file.
 | 
					 | 
				
			||||||
        # This is definitely NOT atomic!
 | 
					 | 
				
			||||||
        os.remove(filename)
 | 
					 | 
				
			||||||
        os.rename(newfilename, filename)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,710 +0,0 @@
 | 
				
			|||||||
#!/usr/bin/python
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
# Copyright 2009 Google Inc.
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
					 | 
				
			||||||
# you may not use this file except in compliance with the License.
 | 
					 | 
				
			||||||
# You may obtain a copy of the License at
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
#      http://www.apache.org/licenses/LICENSE-2.0
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
# Unless required by applicable law or agreed to in writing, software
 | 
					 | 
				
			||||||
# distributed under the License is distributed on an "AS IS" BASIS,
 | 
					 | 
				
			||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
					 | 
				
			||||||
# See the License for the specific language governing permissions and
 | 
					 | 
				
			||||||
# limitations under the License.
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
# Disable the invalid name warning as we are inheriting from a standard library
 | 
					 | 
				
			||||||
# object.
 | 
					 | 
				
			||||||
# pylint: disable-msg=C6409,W0212
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
"""A version of the datetime module which *cares* about timezones.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
This module will never return a naive datetime object. This requires the module
 | 
					 | 
				
			||||||
know your local timezone, which it tries really hard to figure out.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
You can override the detection by using the datetime.tzaware.defaulttz_set
 | 
					 | 
				
			||||||
method. It the module is unable to figure out the timezone itself this method
 | 
					 | 
				
			||||||
*must* be called before the normal module is imported. If done before importing
 | 
					 | 
				
			||||||
it can also speed up the time taken to import as the defaulttz will no longer
 | 
					 | 
				
			||||||
try and do the detection.
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
__author__ = "tansell@google.com (Tim Ansell)"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import calendar
 | 
					 | 
				
			||||||
import datetime
 | 
					 | 
				
			||||||
import os
 | 
					 | 
				
			||||||
import os.path
 | 
					 | 
				
			||||||
import re
 | 
					 | 
				
			||||||
import time
 | 
					 | 
				
			||||||
import warnings
 | 
					 | 
				
			||||||
import dateutil.parser
 | 
					 | 
				
			||||||
import dateutil.relativedelta
 | 
					 | 
				
			||||||
import dateutil.tz
 | 
					 | 
				
			||||||
import pytz
 | 
					 | 
				
			||||||
import pytz_abbr
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
try:
 | 
					 | 
				
			||||||
  # pylint: disable-msg=C6204
 | 
					 | 
				
			||||||
  import functools
 | 
					 | 
				
			||||||
except ImportError, e:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  class functools(object):
 | 
					 | 
				
			||||||
    """Fake replacement for a full functools."""
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # pylint: disable-msg=W0613
 | 
					 | 
				
			||||||
    @staticmethod
 | 
					 | 
				
			||||||
    def wraps(f, *args, **kw):
 | 
					 | 
				
			||||||
      return f
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Need to patch pytz.utc to have a _utcoffset so you can normalize/localize
 | 
					 | 
				
			||||||
# using it.
 | 
					 | 
				
			||||||
pytz.utc._utcoffset = datetime.timedelta()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
timedelta = datetime.timedelta
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def _tzinfome(tzinfo):
 | 
					 | 
				
			||||||
  """Gets a tzinfo object from a string.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Args:
 | 
					 | 
				
			||||||
    tzinfo: A string (or string like) object, or a datetime.tzinfo object.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Returns:
 | 
					 | 
				
			||||||
    An datetime.tzinfo object.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Raises:
 | 
					 | 
				
			||||||
    UnknownTimeZoneError: If the timezone given can't be decoded.
 | 
					 | 
				
			||||||
  """
 | 
					 | 
				
			||||||
  if not isinstance(tzinfo, datetime.tzinfo):
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
      tzinfo = pytz.timezone(tzinfo)
 | 
					 | 
				
			||||||
    except AttributeError:
 | 
					 | 
				
			||||||
      raise pytz.UnknownTimeZoneError("Unknown timezone! %s" % tzinfo)
 | 
					 | 
				
			||||||
  return tzinfo
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Our "local" timezone
 | 
					 | 
				
			||||||
_localtz = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def localtz():
 | 
					 | 
				
			||||||
  """Get the local timezone.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Returns:
 | 
					 | 
				
			||||||
    The localtime timezone as a tzinfo object.
 | 
					 | 
				
			||||||
  """
 | 
					 | 
				
			||||||
  # pylint: disable-msg=W0603
 | 
					 | 
				
			||||||
  global _localtz
 | 
					 | 
				
			||||||
  if _localtz is None:
 | 
					 | 
				
			||||||
    _localtz = detect_timezone()
 | 
					 | 
				
			||||||
  return _localtz
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def localtz_set(timezone):
 | 
					 | 
				
			||||||
  """Set the local timezone."""
 | 
					 | 
				
			||||||
  # pylint: disable-msg=W0603
 | 
					 | 
				
			||||||
  global _localtz
 | 
					 | 
				
			||||||
  _localtz = _tzinfome(timezone)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def detect_timezone():
 | 
					 | 
				
			||||||
  """Try and detect the timezone that Python is currently running in.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  We have a bunch of different methods for trying to figure this out (listed in
 | 
					 | 
				
			||||||
  order they are attempted).
 | 
					 | 
				
			||||||
    * Try TZ environment variable.
 | 
					 | 
				
			||||||
    * Try and find /etc/timezone file (with timezone name).
 | 
					 | 
				
			||||||
    * Try and find /etc/localtime file (with timezone data).
 | 
					 | 
				
			||||||
    * Try and match a TZ to the current dst/offset/shortname.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Returns:
 | 
					 | 
				
			||||||
    The detected local timezone as a tzinfo object
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Raises:
 | 
					 | 
				
			||||||
    pytz.UnknownTimeZoneError: If it was unable to detect a timezone.
 | 
					 | 
				
			||||||
  """
 | 
					 | 
				
			||||||
  # First we try the TZ variable
 | 
					 | 
				
			||||||
  tz = _detect_timezone_environ()
 | 
					 | 
				
			||||||
  if tz is not None:
 | 
					 | 
				
			||||||
    return tz
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Second we try /etc/timezone and use the value in that
 | 
					 | 
				
			||||||
  tz = _detect_timezone_etc_timezone()
 | 
					 | 
				
			||||||
  if tz is not None:
 | 
					 | 
				
			||||||
    return tz
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Next we try and see if something matches the tzinfo in /etc/localtime
 | 
					 | 
				
			||||||
  tz = _detect_timezone_etc_localtime()
 | 
					 | 
				
			||||||
  if tz is not None:
 | 
					 | 
				
			||||||
    return tz
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Next we try and use a similiar method to what PHP does.
 | 
					 | 
				
			||||||
  # We first try to search on time.tzname, time.timezone, time.daylight to
 | 
					 | 
				
			||||||
  # match a pytz zone.
 | 
					 | 
				
			||||||
  warnings.warn("Had to fall back to worst detection method (the 'PHP' "
 | 
					 | 
				
			||||||
                "method).")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  tz = _detect_timezone_php()
 | 
					 | 
				
			||||||
  if tz is not None:
 | 
					 | 
				
			||||||
    return tz
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  raise pytz.UnknownTimeZoneError("Unable to detect your timezone!")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def _detect_timezone_environ():
 | 
					 | 
				
			||||||
  if "TZ" in os.environ:
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
      return pytz.timezone(os.environ["TZ"])
 | 
					 | 
				
			||||||
    except (IOError, pytz.UnknownTimeZoneError):
 | 
					 | 
				
			||||||
      warnings.warn("You provided a TZ environment value (%r) we did not "
 | 
					 | 
				
			||||||
                    "understand!" % os.environ["TZ"])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def _detect_timezone_etc_timezone():
 | 
					 | 
				
			||||||
  if os.path.exists("/etc/timezone"):
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
      tz = file("/etc/timezone").read().strip()
 | 
					 | 
				
			||||||
      try:
 | 
					 | 
				
			||||||
        return pytz.timezone(tz)
 | 
					 | 
				
			||||||
      except (IOError, pytz.UnknownTimeZoneError), ei:
 | 
					 | 
				
			||||||
        warnings.warn("Your /etc/timezone file references a timezone (%r) that"
 | 
					 | 
				
			||||||
                      " is not valid (%r)." % (tz, ei))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Problem reading the /etc/timezone file
 | 
					 | 
				
			||||||
    except IOError, eo:
 | 
					 | 
				
			||||||
      warnings.warn("Could not access your /etc/timezone file: %s" % eo)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def _detect_timezone_etc_localtime():
 | 
					 | 
				
			||||||
  matches = []
 | 
					 | 
				
			||||||
  if os.path.exists("/etc/localtime"):
 | 
					 | 
				
			||||||
    localtime = pytz.tzfile.build_tzinfo("/etc/localtime",
 | 
					 | 
				
			||||||
                                         file("/etc/localtime"))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # See if we can find a "Human Name" for this..
 | 
					 | 
				
			||||||
    for tzname in pytz.all_timezones:
 | 
					 | 
				
			||||||
      tz = _tzinfome(tzname)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      if dir(tz) != dir(localtime):
 | 
					 | 
				
			||||||
        continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      for attrib in dir(tz):
 | 
					 | 
				
			||||||
        # Ignore functions and specials
 | 
					 | 
				
			||||||
        if callable(getattr(tz, attrib)) or attrib.startswith("__"):
 | 
					 | 
				
			||||||
          continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # This will always be different
 | 
					 | 
				
			||||||
        if attrib == "zone" or attrib == "_tzinfos":
 | 
					 | 
				
			||||||
          continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if getattr(tz, attrib) != getattr(localtime, attrib):
 | 
					 | 
				
			||||||
          break
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      # We get here iff break didn't happen, i.e. no meaningful attributes
 | 
					 | 
				
			||||||
      # differ between tz and localtime
 | 
					 | 
				
			||||||
      else:
 | 
					 | 
				
			||||||
        matches.append(tzname)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if len(matches) == 1:
 | 
					 | 
				
			||||||
      return _tzinfome(matches[0])
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
      # Warn the person about this!
 | 
					 | 
				
			||||||
      warning = "Could not get a human name for your timezone: "
 | 
					 | 
				
			||||||
      if len(matches) > 1:
 | 
					 | 
				
			||||||
        warning += ("We detected multiple matches for your /etc/localtime. "
 | 
					 | 
				
			||||||
                    "(Matches where %s)" % matches)
 | 
					 | 
				
			||||||
        return _tzinfome(matches[0])
 | 
					 | 
				
			||||||
      else:
 | 
					 | 
				
			||||||
        warning += "We detected no matches for your /etc/localtime."
 | 
					 | 
				
			||||||
      warnings.warn(warning)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      # Register /etc/localtime as the timezone loaded.
 | 
					 | 
				
			||||||
      pytz._tzinfo_cache['/etc/localtime'] = localtime
 | 
					 | 
				
			||||||
      return localtime
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def _detect_timezone_php():
 | 
					 | 
				
			||||||
  tomatch = (time.tzname[0], time.timezone, time.daylight)
 | 
					 | 
				
			||||||
  now = datetime.datetime.now()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  matches = []
 | 
					 | 
				
			||||||
  for tzname in pytz.all_timezones:
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
      tz = pytz.timezone(tzname)
 | 
					 | 
				
			||||||
    except IOError:
 | 
					 | 
				
			||||||
      continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
      indst = tz.localize(now).timetuple()[-1]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      if tomatch == (tz._tzname, -tz._utcoffset.seconds, indst):
 | 
					 | 
				
			||||||
        matches.append(tzname)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # pylint: disable-msg=W0704
 | 
					 | 
				
			||||||
    except AttributeError:
 | 
					 | 
				
			||||||
      pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  if len(matches) > 1:
 | 
					 | 
				
			||||||
    warnings.warn("We detected multiple matches for the timezone, choosing "
 | 
					 | 
				
			||||||
                  "the first %s. (Matches where %s)" % (matches[0], matches))
 | 
					 | 
				
			||||||
    return pytz.timezone(matches[0])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class datetime_tz(datetime.datetime):
 | 
					 | 
				
			||||||
  """An extension of the inbuilt datetime adding more functionality.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  The extra functionality includes:
 | 
					 | 
				
			||||||
    * Partial parsing support (IE 2006/02/30 matches %Y/%M/%D %H:%M)
 | 
					 | 
				
			||||||
    * Full integration with pytz (just give it the string of the timezone!)
 | 
					 | 
				
			||||||
    * Proper support for going to/from Unix timestamps (which are in UTC!).
 | 
					 | 
				
			||||||
  """
 | 
					 | 
				
			||||||
  __slots__ = ["is_dst"]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def __new__(cls, *args, **kw):
 | 
					 | 
				
			||||||
    args = list(args)
 | 
					 | 
				
			||||||
    if not args:
 | 
					 | 
				
			||||||
      raise TypeError("Not enough arguments given.")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # See if we are given a tzinfo object...
 | 
					 | 
				
			||||||
    tzinfo = None
 | 
					 | 
				
			||||||
    if isinstance(args[-1], (datetime.tzinfo, basestring)):
 | 
					 | 
				
			||||||
      tzinfo = _tzinfome(args.pop(-1))
 | 
					 | 
				
			||||||
    elif kw.get("tzinfo", None) is not None:
 | 
					 | 
				
			||||||
      tzinfo = _tzinfome(kw.pop("tzinfo"))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Create a datetime object if we don't have one
 | 
					 | 
				
			||||||
    if isinstance(args[0], datetime.datetime):
 | 
					 | 
				
			||||||
      # Convert the datetime instance to a datetime object.
 | 
					 | 
				
			||||||
      newargs = (list(args[0].timetuple()[0:6]) +
 | 
					 | 
				
			||||||
                 [args[0].microsecond, args[0].tzinfo])
 | 
					 | 
				
			||||||
      dt = datetime.datetime(*newargs)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      if tzinfo is None and dt.tzinfo is None:
 | 
					 | 
				
			||||||
        raise TypeError("Must specify a timezone!")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      if tzinfo is not None and dt.tzinfo is not None:
 | 
					 | 
				
			||||||
        raise TypeError("Can not give a timezone with timezone aware"
 | 
					 | 
				
			||||||
                        " datetime object! (Use localize.)")
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
      dt = datetime.datetime(*args, **kw)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if dt.tzinfo is not None:
 | 
					 | 
				
			||||||
      # Re-normalize the dt object
 | 
					 | 
				
			||||||
      dt = dt.tzinfo.normalize(dt)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
      if tzinfo is None:
 | 
					 | 
				
			||||||
        tzinfo = localtz()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      try:
 | 
					 | 
				
			||||||
        dt = tzinfo.localize(dt, is_dst=None)
 | 
					 | 
				
			||||||
      except pytz.AmbiguousTimeError:
 | 
					 | 
				
			||||||
        is_dst = None
 | 
					 | 
				
			||||||
        if "is_dst" in kw:
 | 
					 | 
				
			||||||
          is_dst = kw.pop("is_dst")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
          dt = tzinfo.localize(dt, is_dst)
 | 
					 | 
				
			||||||
        except IndexError:
 | 
					 | 
				
			||||||
          raise pytz.AmbiguousTimeError("No such time exists!")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    newargs = list(dt.timetuple()[0:6])+[dt.microsecond, dt.tzinfo]
 | 
					 | 
				
			||||||
    obj = datetime.datetime.__new__(cls, *newargs)
 | 
					 | 
				
			||||||
    obj.is_dst = obj.dst() != datetime.timedelta(0)
 | 
					 | 
				
			||||||
    return obj
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def asdatetime(self, naive=True):
 | 
					 | 
				
			||||||
    """Return this datetime_tz as a datetime object.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      naive: Return *without* any tz info.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      This datetime_tz as a datetime object.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    args = list(self.timetuple()[0:6])+[self.microsecond]
 | 
					 | 
				
			||||||
    if not naive:
 | 
					 | 
				
			||||||
      args.append(self.tzinfo)
 | 
					 | 
				
			||||||
    return datetime.datetime(*args)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def asdate(self):
 | 
					 | 
				
			||||||
    """Return this datetime_tz as a date object.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      This datetime_tz as a date object.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    return datetime.date(self.year, self.month, self.day)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def totimestamp(self):
 | 
					 | 
				
			||||||
    """Convert this datetime object back to a unix timestamp.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    The Unix epoch is the time 00:00:00 UTC on January 1, 1970.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      Unix timestamp.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    return calendar.timegm(self.utctimetuple())+1e-6*self.microsecond
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def astimezone(self, tzinfo):
 | 
					 | 
				
			||||||
    """Returns a version of this timestamp converted to the given timezone.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      tzinfo: Either a datetime.tzinfo object or a string (which will be looked
 | 
					 | 
				
			||||||
              up in pytz.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      A datetime_tz object in the given timezone.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    # Assert we are not a naive datetime object
 | 
					 | 
				
			||||||
    assert self.tzinfo is not None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    tzinfo = _tzinfome(tzinfo)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    d = self.asdatetime(naive=False).astimezone(tzinfo)
 | 
					 | 
				
			||||||
    return datetime_tz(d)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # pylint: disable-msg=C6113
 | 
					 | 
				
			||||||
  def replace(self, **kw):
 | 
					 | 
				
			||||||
    """Return datetime with new specified fields given as arguments.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    For example, dt.replace(days=4) would return a new datetime_tz object with
 | 
					 | 
				
			||||||
    exactly the same as dt but with the days attribute equal to 4.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Any attribute can be replaced, but tzinfo can not be set to None.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      Any datetime_tz attribute.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      A datetime_tz object with the attributes replaced.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Raises:
 | 
					 | 
				
			||||||
      TypeError: If the given replacement is invalid.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    if "tzinfo" in kw:
 | 
					 | 
				
			||||||
      if kw["tzinfo"] is None:
 | 
					 | 
				
			||||||
        raise TypeError("Can not remove the timezone use asdatetime()")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    is_dst = None
 | 
					 | 
				
			||||||
    if "is_dst" in kw:
 | 
					 | 
				
			||||||
      is_dst = kw["is_dst"]
 | 
					 | 
				
			||||||
      del kw["is_dst"]
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
      # Use our own DST setting..
 | 
					 | 
				
			||||||
      is_dst = self.is_dst
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    replaced = self.asdatetime().replace(**kw)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return datetime_tz(replaced, tzinfo=self.tzinfo.zone, is_dst=is_dst)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # pylint: disable-msg=C6310
 | 
					 | 
				
			||||||
  @classmethod
 | 
					 | 
				
			||||||
  def smartparse(cls, toparse, tzinfo=None):
 | 
					 | 
				
			||||||
    """Method which uses dateutil.parse and extras to try and parse the string.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Valid dates are found at:
 | 
					 | 
				
			||||||
     http://labix.org/python-dateutil#head-1443e0f14ad5dff07efd465e080d1110920673d8-2
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Other valid formats include:
 | 
					 | 
				
			||||||
      "now" or "today"
 | 
					 | 
				
			||||||
      "yesterday"
 | 
					 | 
				
			||||||
      "tommorrow"
 | 
					 | 
				
			||||||
      "5 minutes ago"
 | 
					 | 
				
			||||||
      "10 hours ago"
 | 
					 | 
				
			||||||
      "10h5m ago"
 | 
					 | 
				
			||||||
      "start of yesterday"
 | 
					 | 
				
			||||||
      "end of tommorrow"
 | 
					 | 
				
			||||||
      "end of 3rd of March"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      toparse: The string to parse.
 | 
					 | 
				
			||||||
      tzinfo: Timezone for the resultant datetime_tz object should be in.
 | 
					 | 
				
			||||||
              (Defaults to your local timezone.)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      New datetime_tz object.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Raises:
 | 
					 | 
				
			||||||
      ValueError: If unable to make sense of the input.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    # Default for empty fields are:
 | 
					 | 
				
			||||||
    #  year/month/day == now
 | 
					 | 
				
			||||||
    #  hour/minute/second/microsecond == 0
 | 
					 | 
				
			||||||
    toparse = toparse.strip()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if tzinfo is None:
 | 
					 | 
				
			||||||
      dt = cls.now()
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
      dt = cls.now(tzinfo)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    default = dt.replace(hour=0, minute=0, second=0, microsecond=0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Remove "start of " and "end of " prefix in the string
 | 
					 | 
				
			||||||
    if toparse.lower().startswith("end of "):
 | 
					 | 
				
			||||||
      toparse = toparse[7:].strip()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      dt += datetime.timedelta(days=1)
 | 
					 | 
				
			||||||
      dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
 | 
					 | 
				
			||||||
      dt -= datetime.timedelta(microseconds=1)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      default = dt
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    elif toparse.lower().startswith("start of "):
 | 
					 | 
				
			||||||
      toparse = toparse[9:].strip()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
 | 
					 | 
				
			||||||
      default = dt
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Handle strings with "now", "today", "yesterday", "tomorrow" and "ago".
 | 
					 | 
				
			||||||
    # Need to use lowercase
 | 
					 | 
				
			||||||
    toparselower = toparse.lower()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if toparselower in ["now", "today"]:
 | 
					 | 
				
			||||||
      pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    elif toparselower == "yesterday":
 | 
					 | 
				
			||||||
      dt -= datetime.timedelta(days=1)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    elif toparselower == "tommorrow":
 | 
					 | 
				
			||||||
      dt += datetime.timedelta(days=1)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    elif "ago" in toparselower:
 | 
					 | 
				
			||||||
      # Remove the "ago" bit
 | 
					 | 
				
			||||||
      toparselower = toparselower[:-3]
 | 
					 | 
				
			||||||
      # Replace all "a day and an hour" with "1 day 1 hour"
 | 
					 | 
				
			||||||
      toparselower = toparselower.replace("a ", "1 ")
 | 
					 | 
				
			||||||
      toparselower = toparselower.replace("an ", "1 ")
 | 
					 | 
				
			||||||
      toparselower = toparselower.replace(" and ", " ")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      # Match the following
 | 
					 | 
				
			||||||
      # 1 hour ago
 | 
					 | 
				
			||||||
      # 1h ago
 | 
					 | 
				
			||||||
      # 1 h ago
 | 
					 | 
				
			||||||
      # 1 hour ago
 | 
					 | 
				
			||||||
      # 2 hours ago
 | 
					 | 
				
			||||||
      # Same with minutes, seconds, etc.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      tocheck = ("seconds", "minutes", "hours", "days", "weeks", "months",
 | 
					 | 
				
			||||||
                 "years")
 | 
					 | 
				
			||||||
      result = {}
 | 
					 | 
				
			||||||
      for match in re.finditer("([0-9]+)([^0-9]*)", toparselower):
 | 
					 | 
				
			||||||
        amount = int(match.group(1))
 | 
					 | 
				
			||||||
        unit = match.group(2).strip()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for bit in tocheck:
 | 
					 | 
				
			||||||
          regex = "^([%s]|((%s)s?))$" % (
 | 
					 | 
				
			||||||
              bit[0], bit[:-1])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
          bitmatch = re.search(regex, unit)
 | 
					 | 
				
			||||||
          if bitmatch:
 | 
					 | 
				
			||||||
            result[bit] = amount
 | 
					 | 
				
			||||||
            break
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
          raise ValueError("Was not able to parse date unit %r!" % unit)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      delta = dateutil.relativedelta.relativedelta(**result)
 | 
					 | 
				
			||||||
      dt -= delta
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
      # Handle strings with normal datetime format, use original case.
 | 
					 | 
				
			||||||
      dt = dateutil.parser.parse(toparse, default=default.asdatetime(),
 | 
					 | 
				
			||||||
                                 tzinfos=pytz_abbr.tzinfos)
 | 
					 | 
				
			||||||
      if dt is None:
 | 
					 | 
				
			||||||
        raise ValueError("Was not able to parse date!")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      if dt.tzinfo is pytz_abbr.unknown:
 | 
					 | 
				
			||||||
        dt = dt.replace(tzinfo=None)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      if dt.tzinfo is None:
 | 
					 | 
				
			||||||
        if tzinfo is None:
 | 
					 | 
				
			||||||
          tzinfo = localtz()
 | 
					 | 
				
			||||||
        dt = cls(dt, tzinfo)
 | 
					 | 
				
			||||||
      else:
 | 
					 | 
				
			||||||
        if isinstance(dt.tzinfo, pytz_abbr.tzabbr):
 | 
					 | 
				
			||||||
          abbr = dt.tzinfo
 | 
					 | 
				
			||||||
          dt = dt.replace(tzinfo=None)
 | 
					 | 
				
			||||||
          dt = cls(dt, abbr.zone, is_dst=abbr.dst)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        dt = cls(dt)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return dt
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @classmethod
 | 
					 | 
				
			||||||
  def utcfromtimestamp(cls, timestamp):
 | 
					 | 
				
			||||||
    """Returns a datetime object of a given timestamp (in UTC)."""
 | 
					 | 
				
			||||||
    obj = datetime.datetime.utcfromtimestamp(timestamp)
 | 
					 | 
				
			||||||
    obj = pytz.utc.localize(obj)
 | 
					 | 
				
			||||||
    return cls(obj)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @classmethod
 | 
					 | 
				
			||||||
  def fromtimestamp(cls, timestamp):
 | 
					 | 
				
			||||||
    """Returns a datetime object of a given timestamp (in local tz)."""
 | 
					 | 
				
			||||||
    d = cls.utcfromtimestamp(timestamp)
 | 
					 | 
				
			||||||
    return d.astimezone(localtz())
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @classmethod
 | 
					 | 
				
			||||||
  def utcnow(cls):
 | 
					 | 
				
			||||||
    """Return a new datetime representing UTC day and time."""
 | 
					 | 
				
			||||||
    obj = datetime.datetime.utcnow()
 | 
					 | 
				
			||||||
    obj = cls(obj, tzinfo=pytz.utc)
 | 
					 | 
				
			||||||
    return obj
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @classmethod
 | 
					 | 
				
			||||||
  def now(cls, tzinfo=None):
 | 
					 | 
				
			||||||
    """[tz] -> new datetime with tz's local day and time."""
 | 
					 | 
				
			||||||
    obj = cls.utcnow()
 | 
					 | 
				
			||||||
    if tzinfo is None:
 | 
					 | 
				
			||||||
      tzinfo = localtz()
 | 
					 | 
				
			||||||
    return obj.astimezone(tzinfo)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  today = now
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @staticmethod
 | 
					 | 
				
			||||||
  def fromordinal(ordinal):
 | 
					 | 
				
			||||||
    raise SyntaxError("Not enough information to create a datetime_tz object "
 | 
					 | 
				
			||||||
                      "from an ordinal. Please use datetime.date.fromordinal")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class iterate(object):
 | 
					 | 
				
			||||||
  """Helpful iterators for working with datetime_tz objects."""
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @staticmethod
 | 
					 | 
				
			||||||
  def between(start, delta, end=None):
 | 
					 | 
				
			||||||
    """Return an iterator between this date till given end point.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Example usage:
 | 
					 | 
				
			||||||
      >>> d = datetime_tz.smartparse("5 days ago")
 | 
					 | 
				
			||||||
      2008/05/12 11:45
 | 
					 | 
				
			||||||
      >>> for i in d.between(timedelta(days=1), datetime_tz.now()):
 | 
					 | 
				
			||||||
      >>>    print i
 | 
					 | 
				
			||||||
      2008/05/12 11:45
 | 
					 | 
				
			||||||
      2008/05/13 11:45
 | 
					 | 
				
			||||||
      2008/05/14 11:45
 | 
					 | 
				
			||||||
      2008/05/15 11:45
 | 
					 | 
				
			||||||
      2008/05/16 11:45
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      start: The date to start at.
 | 
					 | 
				
			||||||
      delta: The interval to iterate with.
 | 
					 | 
				
			||||||
      end: (Optional) Date to end at. If not given the iterator will never
 | 
					 | 
				
			||||||
           terminate.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Yields:
 | 
					 | 
				
			||||||
      datetime_tz objects.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    toyield = start
 | 
					 | 
				
			||||||
    while end is None or toyield < end:
 | 
					 | 
				
			||||||
      yield toyield
 | 
					 | 
				
			||||||
      toyield += delta
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @staticmethod
 | 
					 | 
				
			||||||
  def weeks(start, end=None):
 | 
					 | 
				
			||||||
    """Iterate over the weeks between the given datetime_tzs.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      start: datetime_tz to start from.
 | 
					 | 
				
			||||||
      end: (Optional) Date to end at, if not given the iterator will never
 | 
					 | 
				
			||||||
           terminate.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      An iterator which generates datetime_tz objects a week apart.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    return iterate.between(start, datetime.timedelta(days=7), end)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @staticmethod
 | 
					 | 
				
			||||||
  def days(start, end=None):
 | 
					 | 
				
			||||||
    """Iterate over the days between the given datetime_tzs.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      start: datetime_tz to start from.
 | 
					 | 
				
			||||||
      end: (Optional) Date to end at, if not given the iterator will never
 | 
					 | 
				
			||||||
           terminate.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      An iterator which generates datetime_tz objects a day apart.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    return iterate.between(start, datetime.timedelta(days=1), end)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @staticmethod
 | 
					 | 
				
			||||||
  def hours(start, end=None):
 | 
					 | 
				
			||||||
    """Iterate over the hours between the given datetime_tzs.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      start: datetime_tz to start from.
 | 
					 | 
				
			||||||
      end: (Optional) Date to end at, if not given the iterator will never
 | 
					 | 
				
			||||||
           terminate.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      An iterator which generates datetime_tz objects a hour apart.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    return iterate.between(start, datetime.timedelta(hours=1), end)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @staticmethod
 | 
					 | 
				
			||||||
  def minutes(start, end=None):
 | 
					 | 
				
			||||||
    """Iterate over the minutes between the given datetime_tzs.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      start: datetime_tz to start from.
 | 
					 | 
				
			||||||
      end: (Optional) Date to end at, if not given the iterator will never
 | 
					 | 
				
			||||||
           terminate.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      An iterator which generates datetime_tz objects a minute apart.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    return iterate.between(start, datetime.timedelta(minutes=1), end)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  @staticmethod
 | 
					 | 
				
			||||||
  def seconds(start, end=None):
 | 
					 | 
				
			||||||
    """Iterate over the seconds between the given datetime_tzs.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Args:
 | 
					 | 
				
			||||||
      start: datetime_tz to start from.
 | 
					 | 
				
			||||||
      end: (Optional) Date to end at, if not given the iterator will never
 | 
					 | 
				
			||||||
           terminate.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Returns:
 | 
					 | 
				
			||||||
      An iterator which generates datetime_tz objects a second apart.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    return iterate.between(start, datetime.timedelta(minutes=1), end)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def _wrap_method(name):
 | 
					 | 
				
			||||||
  """Wrap a method.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Patch a method which might return a datetime.datetime to return a
 | 
					 | 
				
			||||||
  datetime_tz.datetime_tz instead.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  Args:
 | 
					 | 
				
			||||||
    name: The name of the method to patch
 | 
					 | 
				
			||||||
  """
 | 
					 | 
				
			||||||
  method = getattr(datetime.datetime, name)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Have to give the second argument as method has no __module__ option.
 | 
					 | 
				
			||||||
  @functools.wraps(method, ("__name__", "__doc__"), ())
 | 
					 | 
				
			||||||
  def wrapper(*args, **kw):
 | 
					 | 
				
			||||||
    r = method(*args, **kw)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if isinstance(r, datetime.datetime) and not isinstance(r, datetime_tz):
 | 
					 | 
				
			||||||
      r = datetime_tz(r)
 | 
					 | 
				
			||||||
    return r
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  setattr(datetime_tz, name, wrapper)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
for methodname in ["__add__", "__radd__", "__rsub__", "__sub__", "combine"]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Make sure we have not already got an override for this method
 | 
					 | 
				
			||||||
  assert methodname not in datetime_tz.__dict__
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  _wrap_method(methodname)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
__all__ = ['datetime_tz', 'detect_timezone', 'iterate', 'localtz',
 | 
					 | 
				
			||||||
    'localtz_set', 'timedelta', '_detect_timezone_environ',
 | 
					 | 
				
			||||||
    '_detect_timezone_etc_localtime', '_detect_timezone_etc_timezone',
 | 
					 | 
				
			||||||
    '_detect_timezone_php']
 | 
					 | 
				
			||||||
@@ -1,230 +0,0 @@
 | 
				
			|||||||
#!/usr/bin/python2.4
 | 
					 | 
				
			||||||
# -*- coding: utf-8 -*-
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
# Copyright 2010 Google Inc. All Rights Reserved.
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
Common time zone acronyms/abbreviations for use with the datetime_tz module.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
*WARNING*: There are lots of caveats when using this module which are listed
 | 
					 | 
				
			||||||
below.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
CAVEAT 1: The acronyms/abbreviations are not globally unique, they are not even
 | 
					 | 
				
			||||||
unique within a region. For example, EST can mean any of,
 | 
					 | 
				
			||||||
  Eastern Standard Time in Australia (which is 10 hour ahead of UTC)
 | 
					 | 
				
			||||||
  Eastern Standard Time in North America (which is 5 hours behind UTC)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Where there are two abbreviations the more popular one will appear in the all
 | 
					 | 
				
			||||||
dictionary, while the less common one will only appear in that countries region
 | 
					 | 
				
			||||||
dictionary. IE If using all, EST will be mapped to Eastern Standard Time in
 | 
					 | 
				
			||||||
North America.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
CAVEAT 2: Many of the acronyms don't map to a neat Oslon timezones. For example,
 | 
					 | 
				
			||||||
Eastern European Summer Time (EEDT) is used by many different countries in
 | 
					 | 
				
			||||||
Europe *at different times*! If the acronym does not map neatly to one zone it
 | 
					 | 
				
			||||||
is mapped to the Etc/GMT+-XX Oslon zone. This means that any date manipulations
 | 
					 | 
				
			||||||
can end up with idiot things like summer time in the middle of winter.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
CAVEAT 3: The Summer/Standard time difference is really important! For an hour
 | 
					 | 
				
			||||||
each year it is needed to determine which time you are actually talking about.
 | 
					 | 
				
			||||||
    2002-10-27 01:20:00 EST != 2002-10-27 01:20:00 EDT
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import datetime
 | 
					 | 
				
			||||||
import pytz
 | 
					 | 
				
			||||||
import pytz.tzfile
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class tzabbr(datetime.tzinfo):
 | 
					 | 
				
			||||||
  """A timezone abbreviation.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  *WARNING*: This is not a tzinfo implementation! Trying to use this as tzinfo
 | 
					 | 
				
			||||||
  object will result in failure.  We inherit from datetime.tzinfo so we can get
 | 
					 | 
				
			||||||
  through the dateutil checks.
 | 
					 | 
				
			||||||
  """
 | 
					 | 
				
			||||||
  pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# A "marker" tzinfo object which is used to signify an unknown timezone.
 | 
					 | 
				
			||||||
unknown = datetime.tzinfo(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
regions = {'all': {}, 'military': {}}
 | 
					 | 
				
			||||||
# Create a special alias for the all and military regions
 | 
					 | 
				
			||||||
all = regions['all']
 | 
					 | 
				
			||||||
military = regions['military']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def tzabbr_register(abbr, name, region, zone, dst):
 | 
					 | 
				
			||||||
  """Register a new timezone abbreviation in the global registry.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  If another abbreviation with the same name has already been registered it new
 | 
					 | 
				
			||||||
  abbreviation will only be registered in region specific dictionary.
 | 
					 | 
				
			||||||
  """
 | 
					 | 
				
			||||||
  newabbr = tzabbr()
 | 
					 | 
				
			||||||
  newabbr.abbr = abbr
 | 
					 | 
				
			||||||
  newabbr.name = name
 | 
					 | 
				
			||||||
  newabbr.region = region
 | 
					 | 
				
			||||||
  newabbr.zone = zone
 | 
					 | 
				
			||||||
  newabbr.dst = dst
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  if abbr not in all:
 | 
					 | 
				
			||||||
    all[abbr] = newabbr
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  if not region in regions:
 | 
					 | 
				
			||||||
    regions[region] = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  assert abbr not in regions[region]
 | 
					 | 
				
			||||||
  regions[region][abbr] = newabbr
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def tzinfos_create(use_region):
 | 
					 | 
				
			||||||
  abbrs = regions[use_region]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  def tzinfos(abbr, offset):
 | 
					 | 
				
			||||||
    if abbr:
 | 
					 | 
				
			||||||
      if abbr in abbrs:
 | 
					 | 
				
			||||||
        result = abbrs[abbr]
 | 
					 | 
				
			||||||
        if offset:
 | 
					 | 
				
			||||||
          # FIXME: Check the offset matches the abbreviation we just selected.
 | 
					 | 
				
			||||||
          pass
 | 
					 | 
				
			||||||
        return result
 | 
					 | 
				
			||||||
      else:
 | 
					 | 
				
			||||||
        raise ValueError, "Unknown timezone found %s" % abbr
 | 
					 | 
				
			||||||
    if offset == 0:
 | 
					 | 
				
			||||||
      return pytz.utc
 | 
					 | 
				
			||||||
    if offset:
 | 
					 | 
				
			||||||
      return pytz.FixedOffset(offset/60)
 | 
					 | 
				
			||||||
    return unknown
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  return tzinfos
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Create a special alias for the all tzinfos
 | 
					 | 
				
			||||||
tzinfos = tzinfos_create('all')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Create the abbreviations.
 | 
					 | 
				
			||||||
# *WARNING*: Order matters!
 | 
					 | 
				
			||||||
tzabbr_register("A", u"Alpha Time Zone", u"Military", "Etc/GMT-1", False)
 | 
					 | 
				
			||||||
tzabbr_register("ACDT", u"Australian Central Daylight Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/Adelaide", True)
 | 
					 | 
				
			||||||
tzabbr_register("ACST", u"Australian Central Standard Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/Adelaide", False)
 | 
					 | 
				
			||||||
tzabbr_register("ADT", u"Atlantic Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "America/Halifax", True)
 | 
					 | 
				
			||||||
tzabbr_register("AEDT", u"Australian Eastern Daylight Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/Sydney", True)
 | 
					 | 
				
			||||||
tzabbr_register("AEST", u"Australian Eastern Standard Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/Sydney", False)
 | 
					 | 
				
			||||||
tzabbr_register("AKDT", u"Alaska Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Alaska", True)
 | 
					 | 
				
			||||||
tzabbr_register("AKST", u"Alaska Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Alaska", False)
 | 
					 | 
				
			||||||
tzabbr_register("AST", u"Atlantic Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "America/Halifax", False)
 | 
					 | 
				
			||||||
tzabbr_register("AWDT", u"Australian Western Daylight Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/West", True)
 | 
					 | 
				
			||||||
tzabbr_register("AWST", u"Australian Western Standard Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/West", False)
 | 
					 | 
				
			||||||
tzabbr_register("B", u"Bravo Time Zone", u"Military", "Etc/GMT-2", False)
 | 
					 | 
				
			||||||
tzabbr_register("BST", u"British Summer Time", u"Europe", "Europe/London", True)
 | 
					 | 
				
			||||||
tzabbr_register("C", u"Charlie Time Zone", u"Military", "Etc/GMT-2", False)
 | 
					 | 
				
			||||||
tzabbr_register("CDT", u"Central Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Central", True)
 | 
					 | 
				
			||||||
tzabbr_register("CEDT", u"Central European Daylight Time", u"Europe",
 | 
					 | 
				
			||||||
                "Etc/GMT+2", True)
 | 
					 | 
				
			||||||
tzabbr_register("CEST", u"Central European Summer Time", u"Europe",
 | 
					 | 
				
			||||||
                "Etc/GMT+2", True)
 | 
					 | 
				
			||||||
tzabbr_register("CET", u"Central European Time", u"Europe", "Etc/GMT+1", False)
 | 
					 | 
				
			||||||
tzabbr_register("CST", u"Central Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Central", False)
 | 
					 | 
				
			||||||
tzabbr_register("CXT", u"Christmas Island Time", u"Australia",
 | 
					 | 
				
			||||||
                "Indian/Christmas", False)
 | 
					 | 
				
			||||||
tzabbr_register("D", u"Delta Time Zone", u"Military", "Etc/GMT-2", False)
 | 
					 | 
				
			||||||
tzabbr_register("E", u"Echo Time Zone", u"Military", "Etc/GMT-2", False)
 | 
					 | 
				
			||||||
tzabbr_register("EDT", u"Eastern Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Eastern", True)
 | 
					 | 
				
			||||||
tzabbr_register("EEDT", u"Eastern European Daylight Time", u"Europe",
 | 
					 | 
				
			||||||
                "Etc/GMT+3", True)
 | 
					 | 
				
			||||||
tzabbr_register("EEST", u"Eastern European Summer Time", u"Europe",
 | 
					 | 
				
			||||||
                "Etc/GMT+3", True)
 | 
					 | 
				
			||||||
tzabbr_register("EET", u"Eastern European Time", u"Europe", "Etc/GMT+2", False)
 | 
					 | 
				
			||||||
tzabbr_register("EST", u"Eastern Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Eastern", False)
 | 
					 | 
				
			||||||
tzabbr_register("F", u"Foxtrot Time Zone", u"Military", "Etc/GMT-6", False)
 | 
					 | 
				
			||||||
tzabbr_register("G", u"Golf Time Zone", u"Military", "Etc/GMT-7", False)
 | 
					 | 
				
			||||||
tzabbr_register("GMT", u"Greenwich Mean Time", u"Europe", pytz.utc, False)
 | 
					 | 
				
			||||||
tzabbr_register("H", u"Hotel Time Zone", u"Military", "Etc/GMT-8", False)
 | 
					 | 
				
			||||||
#tzabbr_register("HAA", u"Heure Avancée de l'Atlantique", u"North America", u"UTC - 3 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HAC", u"Heure Avancée du Centre", u"North America", u"UTC - 5 hours")
 | 
					 | 
				
			||||||
tzabbr_register("HADT", u"Hawaii-Aleutian Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "Pacific/Honolulu", True)
 | 
					 | 
				
			||||||
#tzabbr_register("HAE", u"Heure Avancée de l'Est", u"North America", u"UTC - 4 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HAP", u"Heure Avancée du Pacifique", u"North America", u"UTC - 7 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HAR", u"Heure Avancée des Rocheuses", u"North America", u"UTC - 6 hours")
 | 
					 | 
				
			||||||
tzabbr_register("HAST", u"Hawaii-Aleutian Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "Pacific/Honolulu", False)
 | 
					 | 
				
			||||||
#tzabbr_register("HAT", u"Heure Avancée de Terre-Neuve", u"North America", u"UTC - 2:30 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HAY", u"Heure Avancée du Yukon", u"North America", u"UTC - 8 hours")
 | 
					 | 
				
			||||||
tzabbr_register("HDT", u"Hawaii Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "Pacific/Honolulu", True)
 | 
					 | 
				
			||||||
#tzabbr_register("HNA", u"Heure Normale de l'Atlantique", u"North America", u"UTC - 4 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HNC", u"Heure Normale du Centre", u"North America", u"UTC - 6 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HNE", u"Heure Normale de l'Est", u"North America", u"UTC - 5 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HNP", u"Heure Normale du Pacifique", u"North America", u"UTC - 8 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HNR", u"Heure Normale des Rocheuses", u"North America", u"UTC - 7 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HNT", u"Heure Normale de Terre-Neuve", u"North America", u"UTC - 3:30 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("HNY", u"Heure Normale du Yukon", u"North America", u"UTC - 9 hours")
 | 
					 | 
				
			||||||
tzabbr_register("HST", u"Hawaii Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "Pacific/Honolulu", False)
 | 
					 | 
				
			||||||
tzabbr_register("I", u"India Time Zone", u"Military", "Etc/GMT-9", False)
 | 
					 | 
				
			||||||
tzabbr_register("IST", u"Irish Summer Time", u"Europe", "Europe/Dublin", True)
 | 
					 | 
				
			||||||
tzabbr_register("K", u"Kilo Time Zone", u"Military", "Etc/GMT-10", False)
 | 
					 | 
				
			||||||
tzabbr_register("L", u"Lima Time Zone", u"Military", "Etc/GMT-11", False)
 | 
					 | 
				
			||||||
tzabbr_register("M", u"Mike Time Zone", u"Military", "Etc/GMT-12", False)
 | 
					 | 
				
			||||||
tzabbr_register("MDT", u"Mountain Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Mountain", True)
 | 
					 | 
				
			||||||
#tzabbr_register("MESZ", u"Mitteleuroäische Sommerzeit", u"Europe", u"UTC + 2 hours")
 | 
					 | 
				
			||||||
#tzabbr_register("MEZ", u"Mitteleuropäische Zeit", u"Europe", u"UTC + 1 hour")
 | 
					 | 
				
			||||||
tzabbr_register("MSD", u"Moscow Daylight Time", u"Europe",
 | 
					 | 
				
			||||||
                "Europe/Moscow", True)
 | 
					 | 
				
			||||||
tzabbr_register("MSK", u"Moscow Standard Time", u"Europe",
 | 
					 | 
				
			||||||
                "Europe/Moscow", False)
 | 
					 | 
				
			||||||
tzabbr_register("MST", u"Mountain Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Mountain", False)
 | 
					 | 
				
			||||||
tzabbr_register("N", u"November Time Zone", u"Military", "Etc/GMT+1", False)
 | 
					 | 
				
			||||||
tzabbr_register("NDT", u"Newfoundland Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "America/St_Johns", True)
 | 
					 | 
				
			||||||
tzabbr_register("NFT", u"Norfolk (Island) Time", u"Australia",
 | 
					 | 
				
			||||||
                "Pacific/Norfolk", False)
 | 
					 | 
				
			||||||
tzabbr_register("NST", u"Newfoundland Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "America/St_Johns", False)
 | 
					 | 
				
			||||||
tzabbr_register("O", u"Oscar Time Zone", u"Military", "Etc/GMT+2", False)
 | 
					 | 
				
			||||||
tzabbr_register("P", u"Papa Time Zone", u"Military", "Etc/GMT+3", False)
 | 
					 | 
				
			||||||
tzabbr_register("PDT", u"Pacific Daylight Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Pacific", True)
 | 
					 | 
				
			||||||
tzabbr_register("PST", u"Pacific Standard Time", u"North America",
 | 
					 | 
				
			||||||
                "US/Pacific", False)
 | 
					 | 
				
			||||||
tzabbr_register("Q", u"Quebec Time Zone", u"Military", "Etc/GMT+4", False)
 | 
					 | 
				
			||||||
tzabbr_register("R", u"Romeo Time Zone", u"Military", "Etc/GMT+5", False)
 | 
					 | 
				
			||||||
tzabbr_register("S", u"Sierra Time Zone", u"Military", "Etc/GMT+6", False)
 | 
					 | 
				
			||||||
tzabbr_register("T", u"Tango Time Zone", u"Military", "Etc/GMT+7", False)
 | 
					 | 
				
			||||||
tzabbr_register("U", u"Uniform Time Zone", u"Military", "Etc/GMT+8", False)
 | 
					 | 
				
			||||||
tzabbr_register("UTC", u"Coordinated Universal Time", u"Europe",
 | 
					 | 
				
			||||||
                pytz.utc, False)
 | 
					 | 
				
			||||||
tzabbr_register("V", u"Victor Time Zone", u"Military", "Etc/GMT+9", False)
 | 
					 | 
				
			||||||
tzabbr_register("W", u"Whiskey Time Zone", u"Military", "Etc/GMT+10", False)
 | 
					 | 
				
			||||||
tzabbr_register("WDT", u"Western Daylight Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/West", True)
 | 
					 | 
				
			||||||
tzabbr_register("WEDT", u"Western European Daylight Time", u"Europe",
 | 
					 | 
				
			||||||
                "Etc/GMT+1", True)
 | 
					 | 
				
			||||||
tzabbr_register("WEST", u"Western European Summer Time", u"Europe",
 | 
					 | 
				
			||||||
                "Etc/GMT+1", True)
 | 
					 | 
				
			||||||
tzabbr_register("WET", u"Western European Time", u"Europe", pytz.utc, False)
 | 
					 | 
				
			||||||
tzabbr_register("WST", u"Western Standard Time", u"Australia",
 | 
					 | 
				
			||||||
                "Australia/West", False)
 | 
					 | 
				
			||||||
tzabbr_register("X", u"X-ray Time Zone", u"Military", "Etc/GMT+11", False)
 | 
					 | 
				
			||||||
tzabbr_register("Y", u"Yankee Time Zone", u"Military", "Etc/GMT+12", False)
 | 
					 | 
				
			||||||
tzabbr_register("Z", u"Zulu Time Zone", u"Military", pytz.utc, False)
 | 
					 | 
				
			||||||
@@ -1,25 +1,36 @@
 | 
				
			|||||||
import os
 | 
					import os
 | 
				
			||||||
 | 
					import errno
 | 
				
			||||||
from math import log
 | 
					from math import log
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def human_size(num):
 | 
					def human_size(num):
 | 
				
			||||||
    """Human friendly file size"""
 | 
					    """Human friendly file size"""
 | 
				
			||||||
    unit_list = zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'], [0, 0, 1, 2, 2])
 | 
					    unit_list = list(zip(['bytes', 'kiB', 'MiB', 'GiB', 'TiB'],
 | 
				
			||||||
    if num > 1:
 | 
					                         [0, 0, 1, 2, 2]))
 | 
				
			||||||
 | 
					    if num == 0:
 | 
				
			||||||
 | 
					        return '0 bytes'
 | 
				
			||||||
 | 
					    if num == 1:
 | 
				
			||||||
 | 
					        return '1 byte'
 | 
				
			||||||
    exponent = min(int(log(num, 1024)), len(unit_list) - 1)
 | 
					    exponent = min(int(log(num, 1024)), len(unit_list) - 1)
 | 
				
			||||||
    quotient = float(num) / 1024**exponent
 | 
					    quotient = float(num) / 1024**exponent
 | 
				
			||||||
    unit, num_decimals = unit_list[exponent]
 | 
					    unit, num_decimals = unit_list[exponent]
 | 
				
			||||||
    format_string = '{:.%sf} {}' % (num_decimals)
 | 
					    format_string = '{:.%sf} {}' % (num_decimals)
 | 
				
			||||||
    return format_string.format(quotient, unit)
 | 
					    return format_string.format(quotient, unit)
 | 
				
			||||||
    if num == 0: # pragma: no cover
 | 
					
 | 
				
			||||||
        return '0 bytes'
 | 
					 | 
				
			||||||
    if num == 1: # pragma: no cover
 | 
					 | 
				
			||||||
        return '1 byte'
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def du(path):
 | 
					def du(path):
 | 
				
			||||||
    """Like du -sb, returns total size of path in bytes."""
 | 
					    """Like du -sb, returns total size of path in bytes.  Ignore
 | 
				
			||||||
    size = os.path.getsize(path)
 | 
					    errors that might occur if we encounter broken symlinks or
 | 
				
			||||||
 | 
					    files in the process of being removed."""
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        st = os.stat(path)
 | 
				
			||||||
 | 
					        size = st.st_blocks * 512
 | 
				
			||||||
        if os.path.isdir(path):
 | 
					        if os.path.isdir(path):
 | 
				
			||||||
            for thisfile in os.listdir(path):
 | 
					            for thisfile in os.listdir(path):
 | 
				
			||||||
                filepath = os.path.join(path, thisfile)
 | 
					                filepath = os.path.join(path, thisfile)
 | 
				
			||||||
                size += du(filepath)
 | 
					                size += du(filepath)
 | 
				
			||||||
        return size
 | 
					        return size
 | 
				
			||||||
 | 
					    except OSError as e:
 | 
				
			||||||
 | 
					        if e.errno != errno.ENOENT:
 | 
				
			||||||
 | 
					            raise
 | 
				
			||||||
 | 
					        return 0
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,49 +1,20 @@
 | 
				
			|||||||
# Implementation of hole punching via fallocate, if the OS
 | 
					# Implementation of hole punching via fallocate, if the OS
 | 
				
			||||||
# and filesystem support it.
 | 
					# and filesystem support it.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
try:
 | 
					import fallocate
 | 
				
			||||||
    import os
 | 
					 | 
				
			||||||
    import ctypes
 | 
					 | 
				
			||||||
    import ctypes.util
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def make_fallocate():
 | 
					 | 
				
			||||||
        libc_name = ctypes.util.find_library('c')
 | 
					 | 
				
			||||||
        libc = ctypes.CDLL(libc_name, use_errno=True)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        _fallocate = libc.fallocate
 | 
					def punch_hole(filename, offset, length, ignore_errors=True):
 | 
				
			||||||
        _fallocate.restype = ctypes.c_int
 | 
					 | 
				
			||||||
        _fallocate.argtypes = [ ctypes.c_int, ctypes.c_int,
 | 
					 | 
				
			||||||
                                ctypes.c_int64, ctypes.c_int64 ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        del libc
 | 
					 | 
				
			||||||
        del libc_name
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        def fallocate(fd, mode, offset, len_):
 | 
					 | 
				
			||||||
            res = _fallocate(fd, mode, offset, len_)
 | 
					 | 
				
			||||||
            if res != 0: # pragma: no cover
 | 
					 | 
				
			||||||
                errno = ctypes.get_errno()
 | 
					 | 
				
			||||||
                raise IOError(errno, os.strerror(errno))
 | 
					 | 
				
			||||||
        return fallocate
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    fallocate = make_fallocate()
 | 
					 | 
				
			||||||
    del make_fallocate
 | 
					 | 
				
			||||||
except Exception: # pragma: no cover
 | 
					 | 
				
			||||||
    fallocate = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
FALLOC_FL_KEEP_SIZE = 0x01
 | 
					 | 
				
			||||||
FALLOC_FL_PUNCH_HOLE = 0x02
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def punch_hole(filename, offset, length, ignore_errors = True):
 | 
					 | 
				
			||||||
    """Punch a hole in the file.  This isn't well supported, so errors
 | 
					    """Punch a hole in the file.  This isn't well supported, so errors
 | 
				
			||||||
    are ignored by default."""
 | 
					    are ignored by default."""
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        if fallocate is None: # pragma: no cover
 | 
					 | 
				
			||||||
            raise IOError("fallocate not available")
 | 
					 | 
				
			||||||
        with open(filename, "r+") as f:
 | 
					        with open(filename, "r+") as f:
 | 
				
			||||||
            fallocate(f.fileno(),
 | 
					            fallocate.fallocate(
 | 
				
			||||||
                      FALLOC_FL_KEEP_SIZE | FALLOC_FL_PUNCH_HOLE,
 | 
					                f.fileno(),
 | 
				
			||||||
                      offset, length)
 | 
					                offset,
 | 
				
			||||||
    except IOError: # pragma: no cover
 | 
					                length,
 | 
				
			||||||
 | 
					                fallocate.FALLOC_FL_KEEP_SIZE | fallocate.FALLOC_FL_PUNCH_HOLE)
 | 
				
			||||||
 | 
					    except Exception:
 | 
				
			||||||
        if ignore_errors:
 | 
					        if ignore_errors:
 | 
				
			||||||
            return
 | 
					            return
 | 
				
			||||||
        raise
 | 
					        raise
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,6 @@
 | 
				
			|||||||
"""Interval.  Like nilmdb.server.interval, but re-implemented here
 | 
					"""Interval.  Like nilmdb.server.interval, but re-implemented here
 | 
				
			||||||
in plain Python so clients have easier access to it.
 | 
					in plain Python so clients have easier access to it, and with a few
 | 
				
			||||||
 | 
					helper functions.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
Intervals are half-open, ie. they include data points with timestamps
 | 
					Intervals are half-open, ie. they include data points with timestamps
 | 
				
			||||||
[start, end)
 | 
					[start, end)
 | 
				
			||||||
@@ -8,10 +9,12 @@ Intervals are half-open, ie. they include data points with timestamps
 | 
				
			|||||||
import nilmdb.utils.time
 | 
					import nilmdb.utils.time
 | 
				
			||||||
import nilmdb.utils.iterator
 | 
					import nilmdb.utils.iterator
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class IntervalError(Exception):
 | 
					class IntervalError(Exception):
 | 
				
			||||||
    """Error due to interval overlap, etc"""
 | 
					    """Error due to interval overlap, etc"""
 | 
				
			||||||
    pass
 | 
					    pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Interval
 | 
					# Interval
 | 
				
			||||||
class Interval:
 | 
					class Interval:
 | 
				
			||||||
    """Represents an interval of time."""
 | 
					    """Represents an interval of time."""
 | 
				
			||||||
@@ -21,7 +24,7 @@ class Interval:
 | 
				
			|||||||
        'start' and 'end' are arbitrary numbers that represent time
 | 
					        'start' and 'end' are arbitrary numbers that represent time
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        if start >= end:
 | 
					        if start >= end:
 | 
				
			||||||
            # Explicitly disallow zero-width intervals (since they're half-open)
 | 
					            # Explicitly disallow zero-width intervals, since they're half-open
 | 
				
			||||||
            raise IntervalError("start %s must precede end %s" % (start, end))
 | 
					            raise IntervalError("start %s must precede end %s" % (start, end))
 | 
				
			||||||
        self.start = start
 | 
					        self.start = start
 | 
				
			||||||
        self.end = end
 | 
					        self.end = end
 | 
				
			||||||
@@ -34,9 +37,28 @@ class Interval:
 | 
				
			|||||||
        return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
 | 
					        return ("[" + nilmdb.utils.time.timestamp_to_string(self.start) +
 | 
				
			||||||
                " -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
 | 
					                " -> " + nilmdb.utils.time.timestamp_to_string(self.end) + ")")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __cmp__(self, other):
 | 
					    def human_string(self):
 | 
				
			||||||
        """Compare two intervals.  If non-equal, order by start then end"""
 | 
					        return ("[ " + nilmdb.utils.time.timestamp_to_human(self.start) +
 | 
				
			||||||
        return cmp(self.start, other.start) or cmp(self.end, other.end)
 | 
					                " -> " + nilmdb.utils.time.timestamp_to_human(self.end) + " ]")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Compare two intervals.  If non-equal, order by start then end
 | 
				
			||||||
 | 
					    def __lt__(self, other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) < (other.start, other.end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __gt__(self, other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) > (other.start, other.end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __le__(self, other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) <= (other.start, other.end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __ge__(self, other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) >= (other.start, other.end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __eq__(self, other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) == (other.start, other.end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def __ne__(self, other):
 | 
				
			||||||
 | 
					        return (self.start, self.end) != (other.start, other.end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def intersects(self, other):
 | 
					    def intersects(self, other):
 | 
				
			||||||
        """Return True if two Interval objects intersect"""
 | 
					        """Return True if two Interval objects intersect"""
 | 
				
			||||||
@@ -53,18 +75,12 @@ class Interval:
 | 
				
			|||||||
            raise IntervalError("not a subset")
 | 
					            raise IntervalError("not a subset")
 | 
				
			||||||
        return Interval(start, end)
 | 
					        return Interval(start, end)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def set_difference(a, b):
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    Compute the difference (a \\ b) between the intervals in 'a' and
 | 
					 | 
				
			||||||
    the intervals in 'b'; i.e., the ranges that are present in 'self'
 | 
					 | 
				
			||||||
    but not 'other'.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    'a' and 'b' must both be iterables.
 | 
					def _interval_math_helper(a, b, op, subset=True):
 | 
				
			||||||
 | 
					    """Helper for set_difference, intersection functions,
 | 
				
			||||||
    Returns a generator that yields each interval in turn.
 | 
					    to compute interval subsets based on a math operator on ranges
 | 
				
			||||||
    Output intervals are built as subsets of the intervals in the
 | 
					    present in A and B.  Subsets are computed from A, or new intervals
 | 
				
			||||||
    first argument (a).
 | 
					    are generated if subset = False."""
 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    # Iterate through all starts and ends in sorted order.  Add a
 | 
					    # Iterate through all starts and ends in sorted order.  Add a
 | 
				
			||||||
    # tag to the iterator so that we can figure out which one they
 | 
					    # tag to the iterator so that we can figure out which one they
 | 
				
			||||||
    # were, after sorting.
 | 
					    # were, after sorting.
 | 
				
			||||||
@@ -79,28 +95,74 @@ def set_difference(a, b):
 | 
				
			|||||||
    # At each point, evaluate which type of end it is, to determine
 | 
					    # At each point, evaluate which type of end it is, to determine
 | 
				
			||||||
    # how to build up the output intervals.
 | 
					    # how to build up the output intervals.
 | 
				
			||||||
    a_interval = None
 | 
					    a_interval = None
 | 
				
			||||||
    b_interval = None
 | 
					    in_a = False
 | 
				
			||||||
 | 
					    in_b = False
 | 
				
			||||||
    out_start = None
 | 
					    out_start = None
 | 
				
			||||||
    for (ts, k, i) in nilmdb.utils.iterator.imerge(a_iter, b_iter):
 | 
					    for (ts, k, i) in nilmdb.utils.iterator.imerge(a_iter, b_iter):
 | 
				
			||||||
        if k == 0:
 | 
					        if k == 0:
 | 
				
			||||||
            # start a interval
 | 
					 | 
				
			||||||
            a_interval = i
 | 
					            a_interval = i
 | 
				
			||||||
            if b_interval is None:
 | 
					            in_a = True
 | 
				
			||||||
                out_start = ts
 | 
					 | 
				
			||||||
        elif k == 1:
 | 
					        elif k == 1:
 | 
				
			||||||
            # start b interval
 | 
					            in_b = True
 | 
				
			||||||
            b_interval = i
 | 
					 | 
				
			||||||
            if out_start is not None and out_start != ts:
 | 
					 | 
				
			||||||
                yield a_interval.subset(out_start, ts)
 | 
					 | 
				
			||||||
            out_start = None
 | 
					 | 
				
			||||||
        elif k == 2:
 | 
					        elif k == 2:
 | 
				
			||||||
            # end a interval
 | 
					            in_a = False
 | 
				
			||||||
            if out_start is not None and out_start != ts:
 | 
					        else:  # k == 3
 | 
				
			||||||
                yield a_interval.subset(out_start, ts)
 | 
					            in_b = False
 | 
				
			||||||
            out_start = None
 | 
					        include = op(in_a, in_b)
 | 
				
			||||||
            a_interval = None
 | 
					        if include and out_start is None:
 | 
				
			||||||
        elif k == 3:
 | 
					 | 
				
			||||||
            # end b interval
 | 
					 | 
				
			||||||
            b_interval = None
 | 
					 | 
				
			||||||
            if a_interval:
 | 
					 | 
				
			||||||
            out_start = ts
 | 
					            out_start = ts
 | 
				
			||||||
 | 
					        elif not include:
 | 
				
			||||||
 | 
					            if out_start is not None and out_start != ts:
 | 
				
			||||||
 | 
					                if subset:
 | 
				
			||||||
 | 
					                    yield a_interval.subset(out_start, ts)
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    yield Interval(out_start, ts)
 | 
				
			||||||
 | 
					            out_start = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def set_difference(a, b):
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    Compute the difference (a \\ b) between the intervals in 'a' and
 | 
				
			||||||
 | 
					    the intervals in 'b'; i.e., the ranges that are present in 'self'
 | 
				
			||||||
 | 
					    but not 'other'.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    'a' and 'b' must both be iterables.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns a generator that yields each interval in turn.
 | 
				
			||||||
 | 
					    Output intervals are built as subsets of the intervals in the
 | 
				
			||||||
 | 
					    first argument (a).
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    return _interval_math_helper(a, b, (lambda a, b: a and not b))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def intersection(a, b):
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    Compute the intersection between the intervals in 'a' and the
 | 
				
			||||||
 | 
					    intervals in 'b'; i.e., the ranges that are present in both 'a'
 | 
				
			||||||
 | 
					    and 'b'.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    'a' and 'b' must both be iterables.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns a generator that yields each interval in turn.
 | 
				
			||||||
 | 
					    Output intervals are built as subsets of the intervals in the
 | 
				
			||||||
 | 
					    first argument (a).
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    return _interval_math_helper(a, b, (lambda a, b: a and b))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def optimize(it):
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    Given an iterable 'it' with intervals, optimize them by joining
 | 
				
			||||||
 | 
					    together intervals that are adjacent in time, and return a generator
 | 
				
			||||||
 | 
					    that yields the new intervals.
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    saved_int = None
 | 
				
			||||||
 | 
					    for interval in it:
 | 
				
			||||||
 | 
					        if saved_int is not None:
 | 
				
			||||||
 | 
					            if saved_int.end == interval.start:
 | 
				
			||||||
 | 
					                interval.start = saved_int.start
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                yield saved_int
 | 
				
			||||||
 | 
					        saved_int = interval
 | 
				
			||||||
 | 
					    if saved_int is not None:
 | 
				
			||||||
 | 
					        yield saved_int
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,6 +2,8 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
# Iterator merging, based on http://code.activestate.com/recipes/491285/
 | 
					# Iterator merging, based on http://code.activestate.com/recipes/491285/
 | 
				
			||||||
import heapq
 | 
					import heapq
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def imerge(*iterables):
 | 
					def imerge(*iterables):
 | 
				
			||||||
    '''Merge multiple sorted inputs into a single sorted output.
 | 
					    '''Merge multiple sorted inputs into a single sorted output.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -17,8 +19,8 @@ def imerge(*iterables):
 | 
				
			|||||||
    h_append = h.append
 | 
					    h_append = h.append
 | 
				
			||||||
    for it in map(iter, iterables):
 | 
					    for it in map(iter, iterables):
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            next = it.next
 | 
					            nexter = it.__next__
 | 
				
			||||||
            h_append([next(), next])
 | 
					            h_append([nexter(), nexter])
 | 
				
			||||||
        except _Stop:
 | 
					        except _Stop:
 | 
				
			||||||
            pass
 | 
					            pass
 | 
				
			||||||
    heapq.heapify(h)
 | 
					    heapq.heapify(h)
 | 
				
			||||||
@@ -26,9 +28,9 @@ def imerge(*iterables):
 | 
				
			|||||||
    while 1:
 | 
					    while 1:
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            while 1:
 | 
					            while 1:
 | 
				
			||||||
                v, next = s = h[0]      # raises IndexError when h is empty
 | 
					                v, nexter = s = h[0]    # raises IndexError when h is empty
 | 
				
			||||||
                yield v
 | 
					                yield v
 | 
				
			||||||
                s[0] = next()           # raises StopIteration when exhausted
 | 
					                s[0] = nexter()         # raises StopIteration when exhausted
 | 
				
			||||||
                siftup(h, 0)            # restore heap condition
 | 
					                siftup(h, 0)            # restore heap condition
 | 
				
			||||||
        except _Stop:
 | 
					        except _Stop:
 | 
				
			||||||
            heappop(h)                  # remove empty iterator
 | 
					            heappop(h)                  # remove empty iterator
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,100 +0,0 @@
 | 
				
			|||||||
import Queue
 | 
					 | 
				
			||||||
import threading
 | 
					 | 
				
			||||||
import sys
 | 
					 | 
				
			||||||
import contextlib
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This file provides a context manager that converts a function
 | 
					 | 
				
			||||||
# that takes a callback into a generator that returns an iterable.
 | 
					 | 
				
			||||||
# This is done by running the function in a new thread.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Based partially on http://stackoverflow.com/questions/9968592/
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class IteratorizerThread(threading.Thread):
 | 
					 | 
				
			||||||
    def __init__(self, queue, function, curl_hack):
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        function: function to execute, which takes the
 | 
					 | 
				
			||||||
        callback (provided by this class) as an argument
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        threading.Thread.__init__(self)
 | 
					 | 
				
			||||||
        self.name = "Iteratorizer-" + function.__name__ + "-" + self.name
 | 
					 | 
				
			||||||
        self.function = function
 | 
					 | 
				
			||||||
        self.queue = queue
 | 
					 | 
				
			||||||
        self.die = False
 | 
					 | 
				
			||||||
        self.curl_hack = curl_hack
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def callback(self, data):
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            if self.die:
 | 
					 | 
				
			||||||
                raise Exception() # trigger termination
 | 
					 | 
				
			||||||
            self.queue.put((1, data))
 | 
					 | 
				
			||||||
        except:
 | 
					 | 
				
			||||||
            if self.curl_hack:
 | 
					 | 
				
			||||||
                # We can't raise exceptions, because the pycurl
 | 
					 | 
				
			||||||
                # extension module will unconditionally print the
 | 
					 | 
				
			||||||
                # exception itself, and not pass it up to the caller.
 | 
					 | 
				
			||||||
                # Instead, just return a value that tells curl to
 | 
					 | 
				
			||||||
                # abort.  (-1 would be best, in case we were given 0
 | 
					 | 
				
			||||||
                # bytes, but the extension doesn't support that).
 | 
					 | 
				
			||||||
                self.queue.put((2, sys.exc_info()))
 | 
					 | 
				
			||||||
                return 0
 | 
					 | 
				
			||||||
            raise
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def run(self):
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            result = self.function(self.callback)
 | 
					 | 
				
			||||||
        except:
 | 
					 | 
				
			||||||
            self.queue.put((2, sys.exc_info()))
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            self.queue.put((0, result))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@contextlib.contextmanager
 | 
					 | 
				
			||||||
def Iteratorizer(function, curl_hack = False):
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    Context manager that takes a function expecting a callback,
 | 
					 | 
				
			||||||
    and provides an iterable that yields the values passed to that
 | 
					 | 
				
			||||||
    callback instead.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    function: function to execute, which takes a callback
 | 
					 | 
				
			||||||
    (provided by this context manager) as an argument
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        with iteratorizer(func) as it:
 | 
					 | 
				
			||||||
            for i in it:
 | 
					 | 
				
			||||||
                print 'callback was passed:', i
 | 
					 | 
				
			||||||
        print 'function returned:', it.retval
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    queue = Queue.Queue(maxsize = 1)
 | 
					 | 
				
			||||||
    thread = IteratorizerThread(queue, function, curl_hack)
 | 
					 | 
				
			||||||
    thread.daemon = True
 | 
					 | 
				
			||||||
    thread.start()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    class iteratorizer_gen(object):
 | 
					 | 
				
			||||||
        def __init__(self, queue):
 | 
					 | 
				
			||||||
            self.queue = queue
 | 
					 | 
				
			||||||
            self.retval = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        def __iter__(self):
 | 
					 | 
				
			||||||
            return self
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        def next(self):
 | 
					 | 
				
			||||||
            (typ, data) = self.queue.get()
 | 
					 | 
				
			||||||
            if typ == 0:
 | 
					 | 
				
			||||||
                # function has returned
 | 
					 | 
				
			||||||
                self.retval = data
 | 
					 | 
				
			||||||
                raise StopIteration
 | 
					 | 
				
			||||||
            elif typ == 1:
 | 
					 | 
				
			||||||
                # data is available
 | 
					 | 
				
			||||||
                return data
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                # callback raised an exception
 | 
					 | 
				
			||||||
                raise data[0], data[1], data[2]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
        yield iteratorizer_gen(queue)
 | 
					 | 
				
			||||||
    finally:
 | 
					 | 
				
			||||||
        # Ask the thread to die, if it's still running.
 | 
					 | 
				
			||||||
        thread.die = True
 | 
					 | 
				
			||||||
        while thread.isAlive():
 | 
					 | 
				
			||||||
            try:
 | 
					 | 
				
			||||||
                queue.get(True, 0.01)
 | 
					 | 
				
			||||||
            except: # pragma: no cover
 | 
					 | 
				
			||||||
                pass
 | 
					 | 
				
			||||||
							
								
								
									
										22
									
								
								nilmdb/utils/lock.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								nilmdb/utils/lock.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,22 @@
 | 
				
			|||||||
 | 
					# File locking
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import fcntl
 | 
				
			||||||
 | 
					import errno
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def exclusive_lock(f):
 | 
				
			||||||
 | 
					    """Acquire an exclusive lock.  Returns True on successful
 | 
				
			||||||
 | 
					    lock, or False on error."""
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
 | 
				
			||||||
 | 
					    except IOError as e:
 | 
				
			||||||
 | 
					        if e.errno in (errno.EACCES, errno.EAGAIN):
 | 
				
			||||||
 | 
					            return False
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            raise
 | 
				
			||||||
 | 
					    return True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def exclusive_unlock(f):
 | 
				
			||||||
 | 
					    """Release an exclusive lock."""
 | 
				
			||||||
 | 
					    fcntl.flock(f.fileno(), fcntl.LOCK_UN)
 | 
				
			||||||
@@ -6,10 +6,11 @@
 | 
				
			|||||||
import collections
 | 
					import collections
 | 
				
			||||||
import decorator
 | 
					import decorator
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def lru_cache(size = 10, onremove = None, keys = slice(None)):
 | 
					
 | 
				
			||||||
 | 
					def lru_cache(size=10, onremove=None, keys=slice(None)):
 | 
				
			||||||
    """Least-recently-used cache decorator.
 | 
					    """Least-recently-used cache decorator.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @lru_cache(size = 10, onevict = None)
 | 
					    @lru_cache(size=10, onremove=None)
 | 
				
			||||||
    def f(...):
 | 
					    def f(...):
 | 
				
			||||||
        pass
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -53,14 +54,17 @@ def lru_cache(size = 10, onremove = None, keys = slice(None)):
 | 
				
			|||||||
            if key in cache:
 | 
					            if key in cache:
 | 
				
			||||||
                evict(cache.pop(key))
 | 
					                evict(cache.pop(key))
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                if len(cache) > 0 and len(args) != len(cache.iterkeys().next()):
 | 
					                if cache:
 | 
				
			||||||
 | 
					                    if len(args) != len(next(iter(cache.keys()))):
 | 
				
			||||||
                        raise KeyError("trying to remove from LRU cache, but "
 | 
					                        raise KeyError("trying to remove from LRU cache, but "
 | 
				
			||||||
                                       "number of arguments doesn't match the "
 | 
					                                       "number of arguments doesn't match the "
 | 
				
			||||||
                                       "cache key length")
 | 
					                                       "cache key length")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def cache_remove_all():
 | 
					        def cache_remove_all():
 | 
				
			||||||
 | 
					            nonlocal cache
 | 
				
			||||||
            for key in cache:
 | 
					            for key in cache:
 | 
				
			||||||
                evict(cache.pop(key))
 | 
					                evict(cache[key])
 | 
				
			||||||
 | 
					            cache = collections.OrderedDict()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def cache_info():
 | 
					        def cache_info():
 | 
				
			||||||
            return (func.cache_hits, func.cache_misses)
 | 
					            return (func.cache_hits, func.cache_misses)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,9 +1,10 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					 | 
				
			||||||
import sys
 | 
					import sys
 | 
				
			||||||
import inspect
 | 
					import inspect
 | 
				
			||||||
import decorator
 | 
					import decorator
 | 
				
			||||||
 | 
					from nilmdb.utils.printf import fprintf
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def must_close(errorfile = sys.stderr, wrap_verify = False):
 | 
					
 | 
				
			||||||
 | 
					def must_close(errorfile=sys.stderr, wrap_verify=False):
 | 
				
			||||||
    """Class decorator that warns on 'errorfile' at deletion time if
 | 
					    """Class decorator that warns on 'errorfile' at deletion time if
 | 
				
			||||||
    the class's close() member wasn't called.
 | 
					    the class's close() member wasn't called.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -12,12 +13,17 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
 | 
				
			|||||||
    already been called."""
 | 
					    already been called."""
 | 
				
			||||||
    def class_decorator(cls):
 | 
					    def class_decorator(cls):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def is_method_or_function(x):
 | 
				
			||||||
 | 
					            return inspect.ismethod(x) or inspect.isfunction(x)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def wrap_class_method(wrapper):
 | 
					        def wrap_class_method(wrapper):
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                orig = getattr(cls, wrapper.__name__).im_func
 | 
					                orig = getattr(cls, wrapper.__name__)
 | 
				
			||||||
            except:
 | 
					            except AttributeError:
 | 
				
			||||||
                orig = lambda x: None
 | 
					                orig = lambda x: None
 | 
				
			||||||
            setattr(cls, wrapper.__name__, decorator.decorator(wrapper, orig))
 | 
					            if is_method_or_function(orig):
 | 
				
			||||||
 | 
					                setattr(cls, wrapper.__name__,
 | 
				
			||||||
 | 
					                        decorator.decorator(wrapper, orig))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        @wrap_class_method
 | 
					        @wrap_class_method
 | 
				
			||||||
        def __init__(orig, self, *args, **kwargs):
 | 
					        def __init__(orig, self, *args, **kwargs):
 | 
				
			||||||
@@ -28,10 +34,13 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        @wrap_class_method
 | 
					        @wrap_class_method
 | 
				
			||||||
        def __del__(orig, self, *args, **kwargs):
 | 
					        def __del__(orig, self, *args, **kwargs):
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
                if "_must_close" in self.__dict__:
 | 
					                if "_must_close" in self.__dict__:
 | 
				
			||||||
                    fprintf(errorfile, "error: %s.close() wasn't called!\n",
 | 
					                    fprintf(errorfile, "error: %s.close() wasn't called!\n",
 | 
				
			||||||
                            self.__class__.__name__)
 | 
					                            self.__class__.__name__)
 | 
				
			||||||
                return orig(self, *args, **kwargs)
 | 
					                return orig(self, *args, **kwargs)
 | 
				
			||||||
 | 
					            except:
 | 
				
			||||||
 | 
					                pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        @wrap_class_method
 | 
					        @wrap_class_method
 | 
				
			||||||
        def close(orig, self, *args, **kwargs):
 | 
					        def close(orig, self, *args, **kwargs):
 | 
				
			||||||
@@ -46,16 +55,17 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
 | 
				
			|||||||
                raise AssertionError("called " + str(orig) + " after close")
 | 
					                raise AssertionError("called " + str(orig) + " after close")
 | 
				
			||||||
            return orig(self, *args, **kwargs)
 | 
					            return orig(self, *args, **kwargs)
 | 
				
			||||||
        if wrap_verify:
 | 
					        if wrap_verify:
 | 
				
			||||||
            for (name, method) in inspect.getmembers(cls, inspect.ismethod):
 | 
					            for (name, method) in inspect.getmembers(cls,
 | 
				
			||||||
                # Skip class methods
 | 
					                                                     is_method_or_function):
 | 
				
			||||||
                if method.__self__ is not None:
 | 
					 | 
				
			||||||
                    continue
 | 
					 | 
				
			||||||
                # Skip some methods
 | 
					                # Skip some methods
 | 
				
			||||||
                if name in [ "__del__", "__init__" ]:
 | 
					                if name in ["__del__", "__init__"]:
 | 
				
			||||||
                    continue
 | 
					                    continue
 | 
				
			||||||
                # Set up wrapper
 | 
					                # Set up wrapper
 | 
				
			||||||
                setattr(cls, name, decorator.decorator(verifier,
 | 
					                if inspect.ismethod(method):
 | 
				
			||||||
                                                       method.im_func))
 | 
					                    func = method.__func__
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    func = method
 | 
				
			||||||
 | 
					                setattr(cls, name, decorator.decorator(verifier, func))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return cls
 | 
					        return cls
 | 
				
			||||||
    return class_decorator
 | 
					    return class_decorator
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,9 +1,13 @@
 | 
				
			|||||||
"""printf, fprintf, sprintf"""
 | 
					"""printf, fprintf, sprintf"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from __future__ import print_function
 | 
					
 | 
				
			||||||
def printf(_str, *args):
 | 
					def printf(_str, *args):
 | 
				
			||||||
    print(_str % args, end='')
 | 
					    print(_str % args, end='')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def fprintf(_file, _str, *args):
 | 
					def fprintf(_file, _str, *args):
 | 
				
			||||||
    print(_str % args, end='', file=_file)
 | 
					    print(_str % args, end='', file=_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def sprintf(_str, *args):
 | 
					def sprintf(_str, *args):
 | 
				
			||||||
    return (_str % args)
 | 
					    return (_str % args)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,10 +1,6 @@
 | 
				
			|||||||
import Queue
 | 
					import queue
 | 
				
			||||||
import threading
 | 
					import threading
 | 
				
			||||||
import sys
 | 
					import sys
 | 
				
			||||||
import decorator
 | 
					 | 
				
			||||||
import inspect
 | 
					 | 
				
			||||||
import types
 | 
					 | 
				
			||||||
import functools
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# This file provides a class that will wrap an object and serialize
 | 
					# This file provides a class that will wrap an object and serialize
 | 
				
			||||||
# all calls to its methods.  All calls to that object will be queued
 | 
					# all calls to its methods.  All calls to that object will be queued
 | 
				
			||||||
@@ -13,6 +9,7 @@ import functools
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
# Based partially on http://stackoverflow.com/questions/2642515/
 | 
					# Based partially on http://stackoverflow.com/questions/2642515/
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class SerializerThread(threading.Thread):
 | 
					class SerializerThread(threading.Thread):
 | 
				
			||||||
    """Thread that retrieves call information from the queue, makes the
 | 
					    """Thread that retrieves call information from the queue, makes the
 | 
				
			||||||
    call, and returns the results."""
 | 
					    call, and returns the results."""
 | 
				
			||||||
@@ -40,6 +37,7 @@ class SerializerThread(threading.Thread):
 | 
				
			|||||||
            result_queue.put((exception, result))
 | 
					            result_queue.put((exception, result))
 | 
				
			||||||
            del exception, result
 | 
					            del exception, result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def serializer_proxy(obj_or_type):
 | 
					def serializer_proxy(obj_or_type):
 | 
				
			||||||
    """Wrap the given object or type in a SerializerObjectProxy.
 | 
					    """Wrap the given object or type in a SerializerObjectProxy.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -49,61 +47,88 @@ def serializer_proxy(obj_or_type):
 | 
				
			|||||||
    The proxied requests, including instantiation, are performed in a
 | 
					    The proxied requests, including instantiation, are performed in a
 | 
				
			||||||
    single thread and serialized between caller threads.
 | 
					    single thread and serialized between caller threads.
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    class SerializerCallProxy(object):
 | 
					    class SerializerCallProxy():
 | 
				
			||||||
        def __init__(self, call_queue, func, objectproxy):
 | 
					        def __init__(self, call_queue, func, objectproxy):
 | 
				
			||||||
            self.call_queue = call_queue
 | 
					            self.call_queue = call_queue
 | 
				
			||||||
            self.func = func
 | 
					            self.func = func
 | 
				
			||||||
            # Need to hold a reference to object proxy so it doesn't
 | 
					            # Need to hold a reference to object proxy so it doesn't
 | 
				
			||||||
            # go away (and kill the thread) until after get called.
 | 
					            # go away (and kill the thread) until after get called.
 | 
				
			||||||
            self.objectproxy = objectproxy
 | 
					            self.objectproxy = objectproxy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def __call__(self, *args, **kwargs):
 | 
					        def __call__(self, *args, **kwargs):
 | 
				
			||||||
            result_queue = Queue.Queue()
 | 
					            result_queue = queue.Queue()
 | 
				
			||||||
            self.call_queue.put((result_queue, self.func, args, kwargs))
 | 
					            self.call_queue.put((result_queue, self.func, args, kwargs))
 | 
				
			||||||
            ( exc_info, result ) = result_queue.get()
 | 
					            (exc_info, result) = result_queue.get()
 | 
				
			||||||
            if exc_info is None:
 | 
					            if exc_info is None:
 | 
				
			||||||
                return result
 | 
					                return result
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                raise exc_info[0], exc_info[1], exc_info[2]
 | 
					                raise exc_info[1].with_traceback(exc_info[2])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    class SerializerObjectProxy(object):
 | 
					    class SerializerObjectProxy():
 | 
				
			||||||
        def __init__(self, obj_or_type, *args, **kwargs):
 | 
					        def __init__(self, obj_or_type, *args, **kwargs):
 | 
				
			||||||
            self.__object = obj_or_type
 | 
					            self.__object = obj_or_type
 | 
				
			||||||
            try:
 | 
					            if isinstance(obj_or_type, type):
 | 
				
			||||||
                if type(obj_or_type) in (types.TypeType, types.ClassType):
 | 
					 | 
				
			||||||
                classname = obj_or_type.__name__
 | 
					                classname = obj_or_type.__name__
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                classname = obj_or_type.__class__.__name__
 | 
					                classname = obj_or_type.__class__.__name__
 | 
				
			||||||
            except AttributeError: # pragma: no cover
 | 
					            self.__call_queue = queue.Queue()
 | 
				
			||||||
                classname = "???"
 | 
					 | 
				
			||||||
            self.__call_queue = Queue.Queue()
 | 
					 | 
				
			||||||
            self.__thread = SerializerThread(classname, self.__call_queue)
 | 
					            self.__thread = SerializerThread(classname, self.__call_queue)
 | 
				
			||||||
            self.__thread.daemon = True
 | 
					            self.__thread.daemon = True
 | 
				
			||||||
            self.__thread.start()
 | 
					            self.__thread.start()
 | 
				
			||||||
            self._thread_safe = True
 | 
					            self._thread_safe = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def __getattr__(self, key):
 | 
					        def __getattr__(self, key):
 | 
				
			||||||
            if key.startswith("_SerializerObjectProxy__"): # pragma: no cover
 | 
					            # If the attribute is a function, we want to return a
 | 
				
			||||||
                raise AttributeError
 | 
					            # proxy that will perform the call through the serializer
 | 
				
			||||||
 | 
					            # when called.  Otherwise, we want to return the value
 | 
				
			||||||
 | 
					            # directly.  This means we need to grab the attribute once,
 | 
				
			||||||
 | 
					            # and therefore self.__object.__getattr__ may be called
 | 
				
			||||||
 | 
					            # in an unsafe way, from the caller's thread.
 | 
				
			||||||
            attr = getattr(self.__object, key)
 | 
					            attr = getattr(self.__object, key)
 | 
				
			||||||
            if not callable(attr):
 | 
					            if not callable(attr):
 | 
				
			||||||
 | 
					                # It's not callable, so perform the getattr from within
 | 
				
			||||||
 | 
					                # the serializer thread, then return its value.
 | 
				
			||||||
 | 
					                # That may differ from the "attr" value we just grabbed
 | 
				
			||||||
 | 
					                # from here, due to forced ordering in the serializer.
 | 
				
			||||||
                getter = SerializerCallProxy(self.__call_queue, getattr, self)
 | 
					                getter = SerializerCallProxy(self.__call_queue, getattr, self)
 | 
				
			||||||
                return getter(self.__object, key)
 | 
					                return getter(self.__object, key)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                # It is callable, so return an object that will proxy through
 | 
				
			||||||
 | 
					                # the serializer when called.
 | 
				
			||||||
                r = SerializerCallProxy(self.__call_queue, attr, self)
 | 
					                r = SerializerCallProxy(self.__call_queue, attr, self)
 | 
				
			||||||
                return r
 | 
					                return r
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # For an interable object, on __iter__(), save the object's
 | 
				
			||||||
 | 
					        # iterator and return this proxy.  On next(), call the object's
 | 
				
			||||||
 | 
					        # iterator through this proxy.
 | 
				
			||||||
 | 
					        def __iter__(self):
 | 
				
			||||||
 | 
					            attr = getattr(self.__object, "__iter__")
 | 
				
			||||||
 | 
					            self.__iter = SerializerCallProxy(self.__call_queue, attr, self)()
 | 
				
			||||||
 | 
					            return self
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __next__(self):
 | 
				
			||||||
 | 
					            return SerializerCallProxy(self.__call_queue,
 | 
				
			||||||
 | 
					                                       self.__iter.__next__, self)()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def __getitem__(self, key):
 | 
				
			||||||
 | 
					            return self.__getattr__("__getitem__")(key)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def __call__(self, *args, **kwargs):
 | 
					        def __call__(self, *args, **kwargs):
 | 
				
			||||||
            """Call this to instantiate the type, if a type was passed
 | 
					            """Call this to instantiate the type, if a type was passed
 | 
				
			||||||
            to serializer_proxy.  Otherwise, pass the call through."""
 | 
					            to serializer_proxy.  Otherwise, pass the call through."""
 | 
				
			||||||
            ret = SerializerCallProxy(self.__call_queue,
 | 
					            ret = SerializerCallProxy(self.__call_queue,
 | 
				
			||||||
                                      self.__object, self)(*args, **kwargs)
 | 
					                                      self.__object, self)(*args, **kwargs)
 | 
				
			||||||
            if type(self.__object) in (types.TypeType, types.ClassType):
 | 
					            if isinstance(self.__object, type):
 | 
				
			||||||
                # Instantiation
 | 
					                # Instantiation
 | 
				
			||||||
                self.__object = ret
 | 
					                self.__object = ret
 | 
				
			||||||
                return self
 | 
					                return self
 | 
				
			||||||
            return ret
 | 
					            return ret
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def __del__(self):
 | 
					        def __del__(self):
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                # Signal thread to exit, but don't wait for it.
 | 
				
			||||||
                self.__call_queue.put((None, None, None, None))
 | 
					                self.__call_queue.put((None, None, None, None))
 | 
				
			||||||
            self.__thread.join()
 | 
					            except:
 | 
				
			||||||
 | 
					                pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return SerializerObjectProxy(obj_or_type)
 | 
					    return SerializerObjectProxy(obj_or_type)
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										19
									
								
								nilmdb/utils/sort.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								nilmdb/utils/sort.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,19 @@
 | 
				
			|||||||
 | 
					import re
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def sort_human(items, key=None):
 | 
				
			||||||
 | 
					    """Human-friendly sort (/stream/2 before /stream/10)"""
 | 
				
			||||||
 | 
					    def to_num(val):
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            return int(val)
 | 
				
			||||||
 | 
					        except Exception:
 | 
				
			||||||
 | 
					            return val
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def human_key(text):
 | 
				
			||||||
 | 
					        if key:
 | 
				
			||||||
 | 
					            text = key(text)
 | 
				
			||||||
 | 
					        # Break into character and numeric chunks.
 | 
				
			||||||
 | 
					        chunks = re.split(r'([0-9]+)', text)
 | 
				
			||||||
 | 
					        return [to_num(c) for c in chunks]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return sorted(items, key=human_key)
 | 
				
			||||||
@@ -1,26 +1,25 @@
 | 
				
			|||||||
from nilmdb.utils.printf import *
 | 
					 | 
				
			||||||
import threading
 | 
					import threading
 | 
				
			||||||
import warnings
 | 
					from nilmdb.utils.printf import sprintf
 | 
				
			||||||
import types
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def verify_proxy(obj_or_type, exception = False, check_thread = True,
 | 
					
 | 
				
			||||||
                 check_concurrent = True):
 | 
					def verify_proxy(obj_or_type, check_thread=True,
 | 
				
			||||||
 | 
					                 check_concurrent=True):
 | 
				
			||||||
    """Wrap the given object or type in a VerifyObjectProxy.
 | 
					    """Wrap the given object or type in a VerifyObjectProxy.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Returns a VerifyObjectProxy that proxies all method calls to the
 | 
					    Returns a VerifyObjectProxy that proxies all method calls to the
 | 
				
			||||||
    given object, as well as attribute retrievals.
 | 
					    given object, as well as attribute retrievals.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    When calling methods, the following checks are performed.  If
 | 
					    When calling methods, the following checks are performed.  On
 | 
				
			||||||
    exception is True, an exception is raised.  Otherwise, a warning
 | 
					    failure, an exception is raised.
 | 
				
			||||||
    is printed.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    check_thread = True     # Warn/fail if two different threads call methods.
 | 
					    check_thread = True     # Fail if two different threads call methods.
 | 
				
			||||||
    check_concurrent = True # Warn/fail if two functions are concurrently
 | 
					    check_concurrent = True # Fail if two functions are concurrently
 | 
				
			||||||
                            # run through this proxy
 | 
					                            # run through this proxy
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    class Namespace(object):
 | 
					    class Namespace():
 | 
				
			||||||
        pass
 | 
					        pass
 | 
				
			||||||
    class VerifyCallProxy(object):
 | 
					
 | 
				
			||||||
 | 
					    class VerifyCallProxy():
 | 
				
			||||||
        def __init__(self, func, parent_namespace):
 | 
					        def __init__(self, func, parent_namespace):
 | 
				
			||||||
            self.func = func
 | 
					            self.func = func
 | 
				
			||||||
            self.parent_namespace = parent_namespace
 | 
					            self.parent_namespace = parent_namespace
 | 
				
			||||||
@@ -42,22 +41,16 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
 | 
				
			|||||||
                              " but %s called %s.%s",
 | 
					                              " but %s called %s.%s",
 | 
				
			||||||
                              p.thread.name, p.classname, p.thread_callee,
 | 
					                              p.thread.name, p.classname, p.thread_callee,
 | 
				
			||||||
                              this.name, p.classname, callee)
 | 
					                              this.name, p.classname, callee)
 | 
				
			||||||
                if exception:
 | 
					 | 
				
			||||||
                raise AssertionError(err)
 | 
					                raise AssertionError(err)
 | 
				
			||||||
                else: # pragma: no cover
 | 
					 | 
				
			||||||
                    warnings.warn(err)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            need_concur_unlock = False
 | 
					            need_concur_unlock = False
 | 
				
			||||||
            if check_concurrent:
 | 
					            if check_concurrent:
 | 
				
			||||||
                if p.concur_lock.acquire(False) == False:
 | 
					                if not p.concur_lock.acquire(False):
 | 
				
			||||||
                    err = sprintf("unsafe concurrency: %s called %s.%s "
 | 
					                    err = sprintf("unsafe concurrency: %s called %s.%s "
 | 
				
			||||||
                                  "while %s is still in %s.%s",
 | 
					                                  "while %s is still in %s.%s",
 | 
				
			||||||
                                  this.name, p.classname, callee,
 | 
					                                  this.name, p.classname, callee,
 | 
				
			||||||
                                  p.concur_tname, p.classname, p.concur_callee)
 | 
					                                  p.concur_tname, p.classname, p.concur_callee)
 | 
				
			||||||
                    if exception:
 | 
					 | 
				
			||||||
                    raise AssertionError(err)
 | 
					                    raise AssertionError(err)
 | 
				
			||||||
                    else: # pragma: no cover
 | 
					 | 
				
			||||||
                        warnings.warn(err)
 | 
					 | 
				
			||||||
                else:
 | 
					                else:
 | 
				
			||||||
                    p.concur_tname = this.name
 | 
					                    p.concur_tname = this.name
 | 
				
			||||||
                    p.concur_callee = callee
 | 
					                    p.concur_callee = callee
 | 
				
			||||||
@@ -70,7 +63,7 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
 | 
				
			|||||||
                    p.concur_lock.release()
 | 
					                    p.concur_lock.release()
 | 
				
			||||||
            return ret
 | 
					            return ret
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    class VerifyObjectProxy(object):
 | 
					    class VerifyObjectProxy():
 | 
				
			||||||
        def __init__(self, obj_or_type, *args, **kwargs):
 | 
					        def __init__(self, obj_or_type, *args, **kwargs):
 | 
				
			||||||
            p = Namespace()
 | 
					            p = Namespace()
 | 
				
			||||||
            self.__ns = p
 | 
					            self.__ns = p
 | 
				
			||||||
@@ -80,17 +73,12 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
 | 
				
			|||||||
            p.concur_tname = None
 | 
					            p.concur_tname = None
 | 
				
			||||||
            p.concur_callee = None
 | 
					            p.concur_callee = None
 | 
				
			||||||
            self.__obj = obj_or_type
 | 
					            self.__obj = obj_or_type
 | 
				
			||||||
            try:
 | 
					            if isinstance(obj_or_type, type):
 | 
				
			||||||
                if type(obj_or_type) in (types.TypeType, types.ClassType):
 | 
					 | 
				
			||||||
                p.classname = self.__obj.__name__
 | 
					                p.classname = self.__obj.__name__
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                p.classname = self.__obj.__class__.__name__
 | 
					                p.classname = self.__obj.__class__.__name__
 | 
				
			||||||
            except AttributeError: # pragma: no cover
 | 
					 | 
				
			||||||
                p.classname = "???"
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def __getattr__(self, key):
 | 
					        def __getattr__(self, key):
 | 
				
			||||||
            if key.startswith("_VerifyObjectProxy__"): # pragma: no cover
 | 
					 | 
				
			||||||
                raise AttributeError
 | 
					 | 
				
			||||||
            attr = getattr(self.__obj, key)
 | 
					            attr = getattr(self.__obj, key)
 | 
				
			||||||
            if not callable(attr):
 | 
					            if not callable(attr):
 | 
				
			||||||
                return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
 | 
					                return VerifyCallProxy(getattr, self.__ns)(self.__obj, key)
 | 
				
			||||||
@@ -100,7 +88,7 @@ def verify_proxy(obj_or_type, exception = False, check_thread = True,
 | 
				
			|||||||
            """Call this to instantiate the type, if a type was passed
 | 
					            """Call this to instantiate the type, if a type was passed
 | 
				
			||||||
            to verify_proxy.  Otherwise, pass the call through."""
 | 
					            to verify_proxy.  Otherwise, pass the call through."""
 | 
				
			||||||
            ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
 | 
					            ret = VerifyCallProxy(self.__obj, self.__ns)(*args, **kwargs)
 | 
				
			||||||
            if type(self.__obj) in (types.TypeType, types.ClassType):
 | 
					            if isinstance(self.__obj, type):
 | 
				
			||||||
                # Instantiation
 | 
					                # Instantiation
 | 
				
			||||||
                self.__obj = ret
 | 
					                self.__obj = ret
 | 
				
			||||||
                return self
 | 
					                return self
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,23 +1,25 @@
 | 
				
			|||||||
from nilmdb.utils import datetime_tz
 | 
					 | 
				
			||||||
import re
 | 
					import re
 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
 | 
					import datetime_tz
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Range
 | 
					# Range
 | 
				
			||||||
min_timestamp = (-2**63)
 | 
					min_timestamp = (-2**63)
 | 
				
			||||||
max_timestamp = (2**62 - 1)
 | 
					max_timestamp = (2**63 - 1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Smallest representable step
 | 
					# Smallest representable step
 | 
				
			||||||
epsilon = 1
 | 
					epsilon = 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def string_to_timestamp(str):
 | 
					
 | 
				
			||||||
 | 
					def string_to_timestamp(string):
 | 
				
			||||||
    """Convert a string that represents an integer number of microseconds
 | 
					    """Convert a string that represents an integer number of microseconds
 | 
				
			||||||
    since epoch."""
 | 
					    since epoch."""
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        # Parse a string like "1234567890123456" and return an integer
 | 
					        # Parse a string like "1234567890123456" and return an integer
 | 
				
			||||||
        return int(str)
 | 
					        return int(string)
 | 
				
			||||||
    except ValueError:
 | 
					    except ValueError:
 | 
				
			||||||
        # Try parsing as a float, in case it's "1234567890123456.0"
 | 
					        # Try parsing as a float, in case it's "1234567890123456.0"
 | 
				
			||||||
        return int(round(float(str)))
 | 
					        return int(round(float(string)))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def timestamp_to_string(timestamp):
 | 
					def timestamp_to_string(timestamp):
 | 
				
			||||||
    """Convert a timestamp (integer microseconds since epoch) to a string"""
 | 
					    """Convert a timestamp (integer microseconds since epoch) to a string"""
 | 
				
			||||||
@@ -26,38 +28,68 @@ def timestamp_to_string(timestamp):
 | 
				
			|||||||
    else:
 | 
					    else:
 | 
				
			||||||
        return str(timestamp)
 | 
					        return str(timestamp)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def timestamp_to_bytes(timestamp):
 | 
				
			||||||
 | 
					    """Convert a timestamp (integer microseconds since epoch) to a Python
 | 
				
			||||||
 | 
					    bytes object"""
 | 
				
			||||||
 | 
					    return timestamp_to_string(timestamp).encode('utf-8')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def timestamp_to_human(timestamp):
 | 
					def timestamp_to_human(timestamp):
 | 
				
			||||||
    """Convert a timestamp (integer microseconds since epoch) to a
 | 
					    """Convert a timestamp (integer microseconds since epoch) to a
 | 
				
			||||||
    human-readable string, using the local timezone for display
 | 
					    human-readable string, using the local timezone for display
 | 
				
			||||||
    (e.g. from the TZ env var)."""
 | 
					    (e.g. from the TZ env var)."""
 | 
				
			||||||
 | 
					    if timestamp == min_timestamp:
 | 
				
			||||||
 | 
					        return "(minimum)"
 | 
				
			||||||
 | 
					    if timestamp == max_timestamp:
 | 
				
			||||||
 | 
					        return "(maximum)"
 | 
				
			||||||
    dt = datetime_tz.datetime_tz.fromtimestamp(timestamp_to_unix(timestamp))
 | 
					    dt = datetime_tz.datetime_tz.fromtimestamp(timestamp_to_unix(timestamp))
 | 
				
			||||||
    return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
 | 
					    return dt.strftime("%a, %d %b %Y %H:%M:%S.%f %z")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def unix_to_timestamp(unix):
 | 
					def unix_to_timestamp(unix):
 | 
				
			||||||
    """Convert a Unix timestamp (floating point seconds since epoch)
 | 
					    """Convert a Unix timestamp (floating point seconds since epoch)
 | 
				
			||||||
    into a NILM timestamp (integer microseconds since epoch)"""
 | 
					    into a NILM timestamp (integer microseconds since epoch)"""
 | 
				
			||||||
    return int(round(unix * 1e6))
 | 
					    return int(round(unix * 1e6))
 | 
				
			||||||
seconds_to_timestamp = unix_to_timestamp
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def timestamp_to_unix(timestamp):
 | 
					def timestamp_to_unix(timestamp):
 | 
				
			||||||
    """Convert a NILM timestamp (integer microseconds since epoch)
 | 
					    """Convert a NILM timestamp (integer microseconds since epoch)
 | 
				
			||||||
    into a Unix timestamp (floating point seconds since epoch)"""
 | 
					    into a Unix timestamp (floating point seconds since epoch)"""
 | 
				
			||||||
    return timestamp / 1e6
 | 
					    return timestamp / 1e6
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					seconds_to_timestamp = unix_to_timestamp
 | 
				
			||||||
timestamp_to_seconds = timestamp_to_unix
 | 
					timestamp_to_seconds = timestamp_to_unix
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def rate_to_period(hz, cycles = 1):
 | 
					
 | 
				
			||||||
 | 
					def rate_to_period(hz, cycles=1):
 | 
				
			||||||
    """Convert a rate (in Hz) to a period (in timestamp units).
 | 
					    """Convert a rate (in Hz) to a period (in timestamp units).
 | 
				
			||||||
    Returns an integer."""
 | 
					    Returns an integer."""
 | 
				
			||||||
    period = unix_to_timestamp(cycles) / float(hz)
 | 
					    period = unix_to_timestamp(cycles) / float(hz)
 | 
				
			||||||
    return int(round(period))
 | 
					    return int(round(period))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def parse_time(toparse):
 | 
					def parse_time(toparse):
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    Parse a free-form time string and return a nilmdb timestamp
 | 
					    Parse a free-form time string and return a nilmdb timestamp
 | 
				
			||||||
    (integer seconds since epoch).  If the string doesn't contain a
 | 
					    (integer microseconds since epoch).  If the string doesn't contain a
 | 
				
			||||||
    timestamp, the current local timezone is assumed (e.g. from the TZ
 | 
					    timestamp, the current local timezone is assumed (e.g. from the TZ
 | 
				
			||||||
    env var).
 | 
					    env var).
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
 | 
					    if toparse == "min":
 | 
				
			||||||
 | 
					        return min_timestamp
 | 
				
			||||||
 | 
					    if toparse == "max":
 | 
				
			||||||
 | 
					        return max_timestamp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # If it starts with @, treat it as a NILM timestamp
 | 
				
			||||||
 | 
					    # (integer microseconds since epoch)
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        if toparse[0] == '@':
 | 
				
			||||||
 | 
					            return int(toparse[1:])
 | 
				
			||||||
 | 
					    except (ValueError, KeyError, IndexError):
 | 
				
			||||||
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # If string isn't "now" and doesn't contain at least 4 digits,
 | 
					    # If string isn't "now" and doesn't contain at least 4 digits,
 | 
				
			||||||
    # consider it invalid.  smartparse might otherwise accept
 | 
					    # consider it invalid.  smartparse might otherwise accept
 | 
				
			||||||
    # empty strings and strings with just separators.
 | 
					    # empty strings and strings with just separators.
 | 
				
			||||||
@@ -68,15 +100,7 @@ def parse_time(toparse):
 | 
				
			|||||||
    try:
 | 
					    try:
 | 
				
			||||||
        return unix_to_timestamp(datetime_tz.datetime_tz.
 | 
					        return unix_to_timestamp(datetime_tz.datetime_tz.
 | 
				
			||||||
                                 smartparse(toparse).totimestamp())
 | 
					                                 smartparse(toparse).totimestamp())
 | 
				
			||||||
    except (ValueError, OverflowError):
 | 
					    except (ValueError, OverflowError, TypeError):
 | 
				
			||||||
        pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # If it starts with @, treat it as a NILM timestamp
 | 
					 | 
				
			||||||
    # (integer microseconds since epoch)
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
        if toparse[0] == '@':
 | 
					 | 
				
			||||||
            return int(toparse[1:])
 | 
					 | 
				
			||||||
    except (ValueError, KeyError):
 | 
					 | 
				
			||||||
        pass
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # If it's parseable as a float, treat it as a Unix or NILM
 | 
					    # If it's parseable as a float, treat it as a Unix or NILM
 | 
				
			||||||
@@ -84,9 +108,9 @@ def parse_time(toparse):
 | 
				
			|||||||
    try:
 | 
					    try:
 | 
				
			||||||
        val = float(toparse)
 | 
					        val = float(toparse)
 | 
				
			||||||
        # range is from about year 2001 - 2128
 | 
					        # range is from about year 2001 - 2128
 | 
				
			||||||
        if val > 1e9 and val < 5e9:
 | 
					        if 1e9 < val < 5e9:
 | 
				
			||||||
            return unix_to_timestamp(val)
 | 
					            return unix_to_timestamp(val)
 | 
				
			||||||
        if val > 1e15 and val < 5e15:
 | 
					        if 1e15 < val < 5e15:
 | 
				
			||||||
            return val
 | 
					            return val
 | 
				
			||||||
    except ValueError:
 | 
					    except ValueError:
 | 
				
			||||||
        pass
 | 
					        pass
 | 
				
			||||||
@@ -118,6 +142,7 @@ def parse_time(toparse):
 | 
				
			|||||||
    # just give up for now.
 | 
					    # just give up for now.
 | 
				
			||||||
    raise ValueError("unable to parse timestamp")
 | 
					    raise ValueError("unable to parse timestamp")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def now():
 | 
					def now():
 | 
				
			||||||
    """Return current timestamp"""
 | 
					    """Return current timestamp"""
 | 
				
			||||||
    return unix_to_timestamp(time.time())
 | 
					    return unix_to_timestamp(time.time())
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,18 +5,17 @@
 | 
				
			|||||||
#   with nilmdb.utils.Timer("flush"):
 | 
					#   with nilmdb.utils.Timer("flush"):
 | 
				
			||||||
#       foo.flush()
 | 
					#       foo.flush()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from __future__ import print_function
 | 
					 | 
				
			||||||
from __future__ import absolute_import
 | 
					 | 
				
			||||||
import contextlib
 | 
					import contextlib
 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@contextlib.contextmanager
 | 
					@contextlib.contextmanager
 | 
				
			||||||
def Timer(name = None, tosyslog = False):
 | 
					def Timer(name=None, tosyslog=False):
 | 
				
			||||||
    start = time.time()
 | 
					    start = time.time()
 | 
				
			||||||
    yield
 | 
					    yield
 | 
				
			||||||
    elapsed = int((time.time() - start) * 1000)
 | 
					    elapsed = int((time.time() - start) * 1000)
 | 
				
			||||||
    msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
 | 
					    msg = (name or 'elapsed') + ": " + str(elapsed) + " ms"
 | 
				
			||||||
    if tosyslog: # pragma: no cover
 | 
					    if tosyslog:
 | 
				
			||||||
        import syslog
 | 
					        import syslog
 | 
				
			||||||
        syslog.syslog(msg)
 | 
					        syslog.syslog(msg)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,16 +1,17 @@
 | 
				
			|||||||
"""File-like objects that add timestamps to the input lines"""
 | 
					"""File-like objects that add timestamps to the input lines"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from nilmdb.utils.printf import *
 | 
					from nilmdb.utils.printf import sprintf
 | 
				
			||||||
import nilmdb.utils.time
 | 
					import nilmdb.utils.time
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Timestamper(object):
 | 
					
 | 
				
			||||||
 | 
					class Timestamper():
 | 
				
			||||||
    """A file-like object that adds timestamps to lines of an input file."""
 | 
					    """A file-like object that adds timestamps to lines of an input file."""
 | 
				
			||||||
    def __init__(self, infile, ts_iter):
 | 
					    def __init__(self, infile, ts_iter):
 | 
				
			||||||
        """file: filename, or another file-like object
 | 
					        """file: filename, or another file-like object
 | 
				
			||||||
           ts_iter: iterator that returns a timestamp string for
 | 
					           ts_iter: iterator that returns a timestamp string for
 | 
				
			||||||
           each line of the file"""
 | 
					           each line of the file"""
 | 
				
			||||||
        if isinstance(infile, basestring):
 | 
					        if isinstance(infile, str):
 | 
				
			||||||
            self.file = open(infile, "r")
 | 
					            self.file = open(infile, "rb")
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            self.file = infile
 | 
					            self.file = infile
 | 
				
			||||||
        self.ts_iter = ts_iter
 | 
					        self.ts_iter = ts_iter
 | 
				
			||||||
@@ -22,17 +23,19 @@ class Timestamper(object):
 | 
				
			|||||||
        while True:
 | 
					        while True:
 | 
				
			||||||
            line = self.file.readline(*args)
 | 
					            line = self.file.readline(*args)
 | 
				
			||||||
            if not line:
 | 
					            if not line:
 | 
				
			||||||
                return ""
 | 
					                return b""
 | 
				
			||||||
            if line[0] == '#':
 | 
					            if line[0:1] == b'#':
 | 
				
			||||||
                continue
 | 
					                continue
 | 
				
			||||||
            break
 | 
					            # For some reason, coverage on python 3.8 reports that
 | 
				
			||||||
 | 
					            # we never hit this break, even though we definitely do.
 | 
				
			||||||
 | 
					            break  # pragma: no cover
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            return self.ts_iter.next() + line
 | 
					            return next(self.ts_iter) + line
 | 
				
			||||||
        except StopIteration:
 | 
					        except StopIteration:
 | 
				
			||||||
            return ""
 | 
					            return b""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def readlines(self, size = None):
 | 
					    def readlines(self, size=None):
 | 
				
			||||||
        out = ""
 | 
					        out = b""
 | 
				
			||||||
        while True:
 | 
					        while True:
 | 
				
			||||||
            line = self.readline()
 | 
					            line = self.readline()
 | 
				
			||||||
            out += line
 | 
					            out += line
 | 
				
			||||||
@@ -43,15 +46,16 @@ class Timestamper(object):
 | 
				
			|||||||
    def __iter__(self):
 | 
					    def __iter__(self):
 | 
				
			||||||
        return self
 | 
					        return self
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def next(self):
 | 
					    def __next__(self):
 | 
				
			||||||
        result = self.readline()
 | 
					        result = self.readline()
 | 
				
			||||||
        if not result:
 | 
					        if not result:
 | 
				
			||||||
            raise StopIteration
 | 
					            raise StopIteration
 | 
				
			||||||
        return result
 | 
					        return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class TimestamperRate(Timestamper):
 | 
					class TimestamperRate(Timestamper):
 | 
				
			||||||
    """Timestamper that uses a start time and a fixed rate"""
 | 
					    """Timestamper that uses a start time and a fixed rate"""
 | 
				
			||||||
    def __init__(self, infile, start, rate, end = None):
 | 
					    def __init__(self, infile, start, rate, end=None):
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        file: file name or object
 | 
					        file: file name or object
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -61,33 +65,39 @@ class TimestamperRate(Timestamper):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        end: If specified, raise StopIteration before outputting a value
 | 
					        end: If specified, raise StopIteration before outputting a value
 | 
				
			||||||
             greater than this."""
 | 
					             greater than this."""
 | 
				
			||||||
        timestamp_to_string = nilmdb.utils.time.timestamp_to_string
 | 
					        timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
 | 
				
			||||||
        rate_to_period = nilmdb.utils.time.rate_to_period
 | 
					        rate_to_period = nilmdb.utils.time.rate_to_period
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def iterator(start, rate, end):
 | 
					        def iterator(start, rate, end):
 | 
				
			||||||
            n = 0
 | 
					            n = 0
 | 
				
			||||||
            rate = float(rate)
 | 
					            rate = float(rate)
 | 
				
			||||||
            while True:
 | 
					            while True:
 | 
				
			||||||
                now = start + rate_to_period(rate, n)
 | 
					                now = start + rate_to_period(rate, n)
 | 
				
			||||||
                if end and now >= end:
 | 
					                if end and now >= end:
 | 
				
			||||||
                    raise StopIteration
 | 
					                    return
 | 
				
			||||||
                yield timestamp_to_string(now) + " "
 | 
					                yield timestamp_to_bytes(now) + b" "
 | 
				
			||||||
                n += 1
 | 
					                n += 1
 | 
				
			||||||
        Timestamper.__init__(self, infile, iterator(start, rate, end))
 | 
					        Timestamper.__init__(self, infile, iterator(start, rate, end))
 | 
				
			||||||
        self.start = start
 | 
					        self.start = start
 | 
				
			||||||
        self.rate = rate
 | 
					        self.rate = rate
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __str__(self):
 | 
					    def __str__(self):
 | 
				
			||||||
        return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
 | 
					        return sprintf("TimestamperRate(..., start=\"%s\", rate=%g)",
 | 
				
			||||||
                       nilmdb.utils.time.timestamp_to_human(self.start),
 | 
					                       nilmdb.utils.time.timestamp_to_human(self.start),
 | 
				
			||||||
                       self.rate)
 | 
					                       self.rate)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class TimestamperNow(Timestamper):
 | 
					class TimestamperNow(Timestamper):
 | 
				
			||||||
    """Timestamper that uses current time"""
 | 
					    """Timestamper that uses current time"""
 | 
				
			||||||
    def __init__(self, infile):
 | 
					    def __init__(self, infile):
 | 
				
			||||||
        timestamp_to_string = nilmdb.utils.time.timestamp_to_string
 | 
					        timestamp_to_bytes = nilmdb.utils.time.timestamp_to_bytes
 | 
				
			||||||
        get_now = nilmdb.utils.time.now
 | 
					        get_now = nilmdb.utils.time.now
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        def iterator():
 | 
					        def iterator():
 | 
				
			||||||
            while True:
 | 
					            while True:
 | 
				
			||||||
                yield timestamp_to_string(get_now()) + " "
 | 
					                yield timestamp_to_bytes(get_now()) + b" "
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Timestamper.__init__(self, infile, iterator())
 | 
					        Timestamper.__init__(self, infile, iterator())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __str__(self):
 | 
					    def __str__(self):
 | 
				
			||||||
        return "TimestamperNow(...)"
 | 
					        return "TimestamperNow(...)"
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										41
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,41 @@
 | 
				
			|||||||
 | 
					argcomplete==1.12.0
 | 
				
			||||||
 | 
					CherryPy==18.6.0
 | 
				
			||||||
 | 
					coverage==5.2.1
 | 
				
			||||||
 | 
					Cython==0.29.21
 | 
				
			||||||
 | 
					decorator==4.4.2
 | 
				
			||||||
 | 
					fallocate==1.6.4
 | 
				
			||||||
 | 
					flake8==3.8.3
 | 
				
			||||||
 | 
					nose==1.3.7
 | 
				
			||||||
 | 
					numpy==1.19.1
 | 
				
			||||||
 | 
					progressbar==2.5
 | 
				
			||||||
 | 
					psutil==5.7.2
 | 
				
			||||||
 | 
					python-datetime-tz==0.5.4
 | 
				
			||||||
 | 
					python-dateutil==2.8.1
 | 
				
			||||||
 | 
					requests==2.24.0
 | 
				
			||||||
 | 
					tz==0.2.2
 | 
				
			||||||
 | 
					yappi==1.2.5
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## The following requirements were added by pip freeze:
 | 
				
			||||||
 | 
					beautifulsoup4==4.9.1
 | 
				
			||||||
 | 
					certifi==2020.6.20
 | 
				
			||||||
 | 
					chardet==3.0.4
 | 
				
			||||||
 | 
					cheroot==8.4.2
 | 
				
			||||||
 | 
					idna==2.10
 | 
				
			||||||
 | 
					jaraco.classes==3.1.0
 | 
				
			||||||
 | 
					jaraco.collections==3.0.0
 | 
				
			||||||
 | 
					jaraco.functools==3.0.1
 | 
				
			||||||
 | 
					jaraco.text==3.2.0
 | 
				
			||||||
 | 
					mccabe==0.6.1
 | 
				
			||||||
 | 
					more-itertools==8.4.0
 | 
				
			||||||
 | 
					portend==2.6
 | 
				
			||||||
 | 
					pycodestyle==2.6.0
 | 
				
			||||||
 | 
					pyflakes==2.2.0
 | 
				
			||||||
 | 
					pytz==2020.1
 | 
				
			||||||
 | 
					six==1.15.0
 | 
				
			||||||
 | 
					soupsieve==2.0.1
 | 
				
			||||||
 | 
					tempora==4.0.0
 | 
				
			||||||
 | 
					urllib3==1.25.10
 | 
				
			||||||
 | 
					waitress==1.4.4
 | 
				
			||||||
 | 
					WebOb==1.8.6
 | 
				
			||||||
 | 
					WebTest==2.0.35
 | 
				
			||||||
 | 
					zc.lockfile==2.0
 | 
				
			||||||
							
								
								
									
										22
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										22
									
								
								setup.cfg
									
									
									
									
									
								
							@@ -13,8 +13,6 @@ cover-package=nilmdb
 | 
				
			|||||||
cover-erase=1
 | 
					cover-erase=1
 | 
				
			||||||
# this works, puts html output in cover/ dir:
 | 
					# this works, puts html output in cover/ dir:
 | 
				
			||||||
# cover-html=1
 | 
					# cover-html=1
 | 
				
			||||||
# need nose 1.1.3 for this:
 | 
					 | 
				
			||||||
# cover-branches=1
 | 
					 | 
				
			||||||
#debug=nose
 | 
					#debug=nose
 | 
				
			||||||
#debug-log=nose.log
 | 
					#debug-log=nose.log
 | 
				
			||||||
stop=1
 | 
					stop=1
 | 
				
			||||||
@@ -39,3 +37,23 @@ tests=tests
 | 
				
			|||||||
#with-profile=1
 | 
					#with-profile=1
 | 
				
			||||||
#profile-sort=time
 | 
					#profile-sort=time
 | 
				
			||||||
##profile-restrict=10  # doesn't work right, treated as string or something
 | 
					##profile-restrict=10  # doesn't work right, treated as string or something
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[versioneer]
 | 
				
			||||||
 | 
					VCS=git
 | 
				
			||||||
 | 
					style=pep440
 | 
				
			||||||
 | 
					versionfile_source=nilmdb/_version.py
 | 
				
			||||||
 | 
					versionfile_build=nilmdb/_version.py
 | 
				
			||||||
 | 
					tag_prefix=nilmdb-
 | 
				
			||||||
 | 
					parentdir_prefix=nilmdb-
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[flake8]
 | 
				
			||||||
 | 
					exclude=_version.py
 | 
				
			||||||
 | 
					extend-ignore=E731
 | 
				
			||||||
 | 
					per-file-ignores=__init__.py:F401,E402 \
 | 
				
			||||||
 | 
					        serializer.py:E722 \
 | 
				
			||||||
 | 
					        mustclose.py:E722 \
 | 
				
			||||||
 | 
					        fsck.py:E266
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[pylint]
 | 
				
			||||||
 | 
					ignore=_version.py
 | 
				
			||||||
 | 
					disable=C0103,C0111,R0913,R0914
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										114
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										114
									
								
								setup.py
									
									
									
									
									
								
							@@ -1,136 +1,62 @@
 | 
				
			|||||||
#!/usr/bin/python
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# To release a new version, tag it:
 | 
					# To release a new version, tag it:
 | 
				
			||||||
#   git tag -a nilmdb-1.1 -m "Version 1.1"
 | 
					#   git tag -a nilmdb-1.1 -m "Version 1.1"
 | 
				
			||||||
#   git push --tags
 | 
					#   git push --tags
 | 
				
			||||||
# Then just package it up:
 | 
					# Then just package it up:
 | 
				
			||||||
#   python setup.py sdist
 | 
					#   python3 setup.py sdist
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# This is supposed to be using Distribute:
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
#   distutils provides a "setup" method.
 | 
					 | 
				
			||||||
#   setuptools is a set of monkeypatches on top of that.
 | 
					 | 
				
			||||||
#   distribute is a particular version/implementation of setuptools.
 | 
					 | 
				
			||||||
#
 | 
					 | 
				
			||||||
# So we don't really know if this is using the old setuptools or the
 | 
					 | 
				
			||||||
# Distribute-provided version of setuptools.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import traceback
 | 
					 | 
				
			||||||
import sys
 | 
					import sys
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
 | 
					from setuptools import setup
 | 
				
			||||||
try:
 | 
					from distutils.extension import Extension
 | 
				
			||||||
    from setuptools import setup, find_packages
 | 
					 | 
				
			||||||
    from distutils.extension import Extension
 | 
					 | 
				
			||||||
    import distutils.version
 | 
					 | 
				
			||||||
except ImportError:
 | 
					 | 
				
			||||||
    traceback.print_exc()
 | 
					 | 
				
			||||||
    print "Please install the prerequisites listed in README.txt"
 | 
					 | 
				
			||||||
    sys.exit(1)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Versioneer manages version numbers from git tags.
 | 
					# Versioneer manages version numbers from git tags.
 | 
				
			||||||
# https://github.com/warner/python-versioneer
 | 
					# https://github.com/warner/python-versioneer
 | 
				
			||||||
import versioneer
 | 
					import versioneer
 | 
				
			||||||
versioneer.versionfile_source = 'nilmdb/_version.py'
 | 
					 | 
				
			||||||
versioneer.versionfile_build = 'nilmdb/_version.py'
 | 
					 | 
				
			||||||
versioneer.tag_prefix = 'nilmdb-'
 | 
					 | 
				
			||||||
versioneer.parentdir_prefix = 'nilmdb-'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Hack to workaround logging/multiprocessing issue:
 | 
					 | 
				
			||||||
# https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ
 | 
					 | 
				
			||||||
try: import multiprocessing
 | 
					 | 
				
			||||||
except: pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Use Cython if it's new enough, otherwise use preexisting C files.
 | 
					 | 
				
			||||||
cython_modules = [ 'nilmdb.server.interval',
 | 
					 | 
				
			||||||
                   'nilmdb.server.rbtree' ]
 | 
					 | 
				
			||||||
try:
 | 
					 | 
				
			||||||
    import Cython
 | 
					 | 
				
			||||||
    from Cython.Build import cythonize
 | 
					 | 
				
			||||||
    if (distutils.version.LooseVersion(Cython.__version__) <
 | 
					 | 
				
			||||||
        distutils.version.LooseVersion("0.16")):
 | 
					 | 
				
			||||||
        print "Cython version", Cython.__version__, "is too old; not using it."
 | 
					 | 
				
			||||||
        raise ImportError()
 | 
					 | 
				
			||||||
    use_cython = True
 | 
					 | 
				
			||||||
except ImportError:
 | 
					 | 
				
			||||||
    use_cython = False
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# External modules that need to be built
 | 
				
			||||||
ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
 | 
					ext_modules = [ Extension('nilmdb.server.rocket', ['nilmdb/server/rocket.c' ]) ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Use Cython.
 | 
				
			||||||
 | 
					cython_modules = [ 'nilmdb.server.interval', 'nilmdb.server.rbtree' ]
 | 
				
			||||||
 | 
					import Cython
 | 
				
			||||||
 | 
					from Cython.Build import cythonize
 | 
				
			||||||
for modulename in cython_modules:
 | 
					for modulename in cython_modules:
 | 
				
			||||||
    filename = modulename.replace('.','/')
 | 
					    filename = modulename.replace('.','/')
 | 
				
			||||||
    if use_cython:
 | 
					 | 
				
			||||||
    ext_modules.extend(cythonize(filename + ".pyx"))
 | 
					    ext_modules.extend(cythonize(filename + ".pyx"))
 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
        cfile = filename + ".c"
 | 
					 | 
				
			||||||
        if not os.path.exists(cfile):
 | 
					 | 
				
			||||||
            raise Exception("Missing source file " + cfile + ".  "
 | 
					 | 
				
			||||||
                            "Try installing cython >= 0.16.")
 | 
					 | 
				
			||||||
        ext_modules.append(Extension(modulename, [ cfile ]))
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# We need a MANIFEST.in.  Generate it here rather than polluting the
 | 
					# Get list of requirements to use in `install_requires` below.  Note
 | 
				
			||||||
# repository with yet another setup-related file.
 | 
					# that we don't make a distinction between things that are actually
 | 
				
			||||||
with open("MANIFEST.in", "w") as m:
 | 
					# required for end-users vs developers (or use `test_requires` or
 | 
				
			||||||
    m.write("""
 | 
					# anything else) -- just install everything for simplicity.
 | 
				
			||||||
# Root
 | 
					install_requires = open('requirements.txt').readlines()
 | 
				
			||||||
include README.txt
 | 
					 | 
				
			||||||
include setup.cfg
 | 
					 | 
				
			||||||
include setup.py
 | 
					 | 
				
			||||||
include versioneer.py
 | 
					 | 
				
			||||||
include Makefile
 | 
					 | 
				
			||||||
include .coveragerc
 | 
					 | 
				
			||||||
include .pylintrc
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Cython files -- include source.
 | 
					 | 
				
			||||||
recursive-include nilmdb/server *.pyx *.pyxdep *.pxd
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Tests
 | 
					 | 
				
			||||||
recursive-include tests *.py
 | 
					 | 
				
			||||||
recursive-include tests/data *
 | 
					 | 
				
			||||||
include tests/test.order
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Docs
 | 
					 | 
				
			||||||
recursive-include docs Makefile *.md
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Extras
 | 
					 | 
				
			||||||
recursive-include extras *
 | 
					 | 
				
			||||||
""")
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Run setup
 | 
					# Run setup
 | 
				
			||||||
setup(name='nilmdb',
 | 
					setup(name='nilmdb',
 | 
				
			||||||
      version = versioneer.get_version(),
 | 
					      version = versioneer.get_version(),
 | 
				
			||||||
      cmdclass = versioneer.get_cmdclass(),
 | 
					      cmdclass = versioneer.get_cmdclass(),
 | 
				
			||||||
      url = 'https://git.jim.sh/jim/lees/nilmdb.git',
 | 
					      url = 'https://git.jim.sh/nilm/nilmdb.git',
 | 
				
			||||||
      author = 'Jim Paris',
 | 
					      author = 'Jim Paris',
 | 
				
			||||||
      description = "NILM Database",
 | 
					      description = "NILM Database",
 | 
				
			||||||
      long_description = "NILM Database",
 | 
					      long_description = "NILM Database",
 | 
				
			||||||
      license = "Proprietary",
 | 
					      license = "Proprietary",
 | 
				
			||||||
      author_email = 'jim@jtan.com',
 | 
					      author_email = 'jim@jtan.com',
 | 
				
			||||||
      tests_require = [ 'nose',
 | 
					      setup_requires = [ 'setuptools' ],
 | 
				
			||||||
                        'coverage',
 | 
					      install_requires = install_requires,
 | 
				
			||||||
                        ],
 | 
					 | 
				
			||||||
      setup_requires = [ 'distribute',
 | 
					 | 
				
			||||||
                         ],
 | 
					 | 
				
			||||||
      install_requires = [ 'decorator',
 | 
					 | 
				
			||||||
                           'cherrypy >= 3.2',
 | 
					 | 
				
			||||||
                           'simplejson',
 | 
					 | 
				
			||||||
                           'pycurl',
 | 
					 | 
				
			||||||
                           'python-dateutil',
 | 
					 | 
				
			||||||
                           'pytz',
 | 
					 | 
				
			||||||
                           'psutil >= 0.3.0',
 | 
					 | 
				
			||||||
                           'requests >= 1.1.0, < 2.0.0',
 | 
					 | 
				
			||||||
                           ],
 | 
					 | 
				
			||||||
      packages = [ 'nilmdb',
 | 
					      packages = [ 'nilmdb',
 | 
				
			||||||
                   'nilmdb.utils',
 | 
					                   'nilmdb.utils',
 | 
				
			||||||
                   'nilmdb.utils.datetime_tz',
 | 
					 | 
				
			||||||
                   'nilmdb.server',
 | 
					                   'nilmdb.server',
 | 
				
			||||||
                   'nilmdb.client',
 | 
					                   'nilmdb.client',
 | 
				
			||||||
                   'nilmdb.cmdline',
 | 
					                   'nilmdb.cmdline',
 | 
				
			||||||
                   'nilmdb.scripts',
 | 
					                   'nilmdb.scripts',
 | 
				
			||||||
 | 
					                   'nilmdb.fsck',
 | 
				
			||||||
                   ],
 | 
					                   ],
 | 
				
			||||||
      entry_points = {
 | 
					      entry_points = {
 | 
				
			||||||
          'console_scripts': [
 | 
					          'console_scripts': [
 | 
				
			||||||
              'nilmtool = nilmdb.scripts.nilmtool:main',
 | 
					              'nilmtool = nilmdb.scripts.nilmtool:main',
 | 
				
			||||||
              'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
 | 
					              'nilmdb-server = nilmdb.scripts.nilmdb_server:main',
 | 
				
			||||||
 | 
					              'nilmdb-fsck = nilmdb.scripts.nilmdb_fsck:main',
 | 
				
			||||||
              ],
 | 
					              ],
 | 
				
			||||||
          },
 | 
					          },
 | 
				
			||||||
      ext_modules = ext_modules,
 | 
					      ext_modules = ext_modules,
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										28
									
								
								tests/data/extract-8
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								tests/data/extract-8
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,28 @@
 | 
				
			|||||||
 | 
					# interval-start 1332496919900000
 | 
				
			||||||
 | 
					1332496919900000 2.523050e+05 2.254020e+05 4.779410e+03 3.638030e+03 8.138070e+03 4.334460e+03 1.083780e+03 3.743730e+03
 | 
				
			||||||
 | 
					1332496919908333 2.551190e+05 2.237870e+05 5.965640e+03 2.076350e+03 9.468790e+03 3.693880e+03 1.247860e+03 3.393680e+03
 | 
				
			||||||
 | 
					1332496919916667 2.616370e+05 2.247980e+05 4.848970e+03 2.315620e+03 9.323300e+03 4.225460e+03 1.805780e+03 2.593050e+03
 | 
				
			||||||
 | 
					1332496919925000 2.606460e+05 2.251300e+05 3.061360e+03 3.951840e+03 7.662910e+03 5.341410e+03 1.986520e+03 2.276780e+03
 | 
				
			||||||
 | 
					1332496919933333 2.559710e+05 2.235030e+05 4.096030e+03 3.296970e+03 7.827080e+03 5.452120e+03 2.492520e+03 2.929450e+03
 | 
				
			||||||
 | 
					1332496919941667 2.579260e+05 2.217080e+05 5.472320e+03 1.555700e+03 8.495760e+03 4.491140e+03 2.379780e+03 3.741710e+03
 | 
				
			||||||
 | 
					1332496919950000 2.610180e+05 2.242350e+05 4.669770e+03 1.876190e+03 8.366680e+03 3.677510e+03 9.021690e+02 3.549040e+03
 | 
				
			||||||
 | 
					1332496919958333 2.569150e+05 2.274650e+05 2.785070e+03 3.751930e+03 7.440320e+03 3.964860e+03 -3.227860e+02 2.460890e+03
 | 
				
			||||||
 | 
					1332496919966667 2.509510e+05 2.262000e+05 3.772710e+03 3.131950e+03 8.159860e+03 4.539860e+03 7.375190e+02 2.126750e+03
 | 
				
			||||||
 | 
					1332496919975000 2.556710e+05 2.223720e+05 5.826200e+03 8.715560e+02 9.120240e+03 4.545110e+03 2.804310e+03 2.721000e+03
 | 
				
			||||||
 | 
					1332496919983333 2.649730e+05 2.214860e+05 5.839130e+03 4.659180e+02 8.628300e+03 3.934870e+03 2.972490e+03 3.773730e+03
 | 
				
			||||||
 | 
					1332496919991667 2.652170e+05 2.233920e+05 3.718770e+03 2.834970e+03 7.209900e+03 3.460260e+03 1.324930e+03 4.075960e+03
 | 
				
			||||||
 | 
					# interval-end 1332496919991668
 | 
				
			||||||
 | 
					# interval-start 1332496920000000
 | 
				
			||||||
 | 
					1332496920000000 2.564370e+05 2.244300e+05 4.011610e+03 3.475340e+03 7.495890e+03 3.388940e+03 2.613970e+02 3.731260e+03
 | 
				
			||||||
 | 
					1332496920008333 2.539630e+05 2.241670e+05 5.621070e+03 1.548010e+03 9.165170e+03 3.522930e+03 1.058930e+03 2.996960e+03
 | 
				
			||||||
 | 
					1332496920016667 2.585080e+05 2.249300e+05 6.011400e+03 8.188660e+02 9.039950e+03 4.482440e+03 2.490390e+03 2.679340e+03
 | 
				
			||||||
 | 
					1332496920025000 2.596270e+05 2.260220e+05 4.474500e+03 2.423020e+03 7.414190e+03 5.071970e+03 2.439380e+03 2.962960e+03
 | 
				
			||||||
 | 
					1332496920033333 2.551870e+05 2.246320e+05 4.738570e+03 3.398040e+03 7.395120e+03 4.726450e+03 1.839030e+03 3.393530e+03
 | 
				
			||||||
 | 
					1332496920041667 2.571020e+05 2.216230e+05 6.144130e+03 1.441090e+03 8.756480e+03 3.495320e+03 1.869940e+03 3.752530e+03
 | 
				
			||||||
 | 
					1332496920050000 2.636530e+05 2.217700e+05 6.221770e+03 7.389620e+02 9.547600e+03 2.666820e+03 1.462660e+03 3.332570e+03
 | 
				
			||||||
 | 
					1332496920058333 2.636130e+05 2.252560e+05 4.477120e+03 2.437450e+03 8.510210e+03 3.855630e+03 9.594420e+02 2.387180e+03
 | 
				
			||||||
 | 
					1332496920066667 2.553500e+05 2.262640e+05 4.283720e+03 3.923940e+03 7.912470e+03 5.466520e+03 1.284990e+03 2.093720e+03
 | 
				
			||||||
 | 
					1332496920075000 2.527270e+05 2.246090e+05 5.851930e+03 2.491980e+03 8.540630e+03 5.623050e+03 2.339780e+03 3.007140e+03
 | 
				
			||||||
 | 
					1332496920083333 2.584750e+05 2.235780e+05 5.924870e+03 1.394480e+03 8.779620e+03 4.544180e+03 2.132030e+03 3.849760e+03
 | 
				
			||||||
 | 
					1332496920091667 2.615630e+05 2.246090e+05 4.336140e+03 2.455750e+03 8.055380e+03 3.469110e+03 6.278730e+02 3.664200e+03
 | 
				
			||||||
 | 
					# interval-end 1332496920100000
 | 
				
			||||||
@@ -1,4 +1,4 @@
 | 
				
			|||||||
# comments are cool?
 | 
					# comments are cool?  what if they contain →UNICODEâ†<C3A2> or invalid utf-8 like Ã(
 | 
				
			||||||
2.66568e+05  2.24029e+05  5.16140e+03  2.52517e+03  8.35084e+03  3.72470e+03  1.35534e+03  2.03900e+03  
 | 
					2.66568e+05  2.24029e+05  5.16140e+03  2.52517e+03  8.35084e+03  3.72470e+03  1.35534e+03  2.03900e+03  
 | 
				
			||||||
2.57914e+05  2.27183e+05  4.30368e+03  4.13080e+03  7.25535e+03  4.89047e+03  1.63859e+03  1.93496e+03  
 | 
					2.57914e+05  2.27183e+05  4.30368e+03  4.13080e+03  7.25535e+03  4.89047e+03  1.63859e+03  1.93496e+03  
 | 
				
			||||||
2.51717e+05  2.26047e+05  5.99445e+03  3.49363e+03  8.07250e+03  5.08267e+03  2.26917e+03  2.86231e+03  
 | 
					2.51717e+05  2.26047e+05  5.99445e+03  3.49363e+03  8.07250e+03  5.08267e+03  2.26917e+03  2.86231e+03  
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										8
									
								
								tests/data/timestamped
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								tests/data/timestamped
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,8 @@
 | 
				
			|||||||
 | 
					-10000000000 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
 | 
					-100000000 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
 | 
					-100000 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
 | 
					-1000 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
 | 
					1 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
 | 
					1000 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
 | 
					1000000 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
 | 
					1000000000 2.61246e+05  2.22735e+05  4.60340e+03  2.58221e+03  8.42804e+03  3.41890e+03  9.57898e+02  4.00585e+03  
 | 
				
			||||||
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test1/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test1/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										1
									
								
								tests/fsck-data/test1/data/git-empty-dir-placeholder
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								tests/fsck-data/test1/data/git-empty-dir-placeholder
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1 @@
 | 
				
			|||||||
 | 
					hi
 | 
				
			||||||
							
								
								
									
										1
									
								
								tests/fsck-data/test1a/data/git-empty-dir-placeholder
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								tests/fsck-data/test1a/data/git-empty-dir-placeholder
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1 @@
 | 
				
			|||||||
 | 
					hi
 | 
				
			||||||
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test1b/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test1b/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test1c/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test1c/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										1
									
								
								tests/fsck-data/test1c/data/git-empty-dir-placeholder
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								tests/fsck-data/test1c/data/git-empty-dir-placeholder
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1 @@
 | 
				
			|||||||
 | 
					hi
 | 
				
			||||||
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2a/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2a/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2a/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2a/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2a/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2a/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2b/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2b/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2b/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2b/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2b/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2b/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2c/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2c/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2c/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2c/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2c/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2c/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2d/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2d/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2d/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2d/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2d/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2d/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2e/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2e/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2e/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2e/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2e/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2e/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2f/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2f/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2f/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2f/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2f/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2f/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2g/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2g/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2g/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2g/data/a/b/0000/0000
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2g/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2g/data/a/b/_format
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/fsck-data/test2h/data.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/fsck-data/test2h/data.sql
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user