Compare commits

...

21 Commits

Author SHA1 Message Date
5292319802 server: consolidate time processing and checks 2013-03-30 21:16:40 -04:00
173121ca87 Switch URL to one that should definitely not resolve 2013-03-30 17:31:35 -04:00
26bab031bd Add StreamInserter.send() to trigger intermediate block send 2013-03-30 17:30:43 -04:00
b5fefffa09 Use a global cached server object for WSGI app
This is instead of caching it inside nilmdb.server.wsgi_application.
Might make things work a bit better in case the web server decides
to call wsgi_application multiple times.
2013-03-30 15:56:57 -04:00
dccb3e370a WSGI config needs to specify application group
This ensures that the same Python sub-instance handles the request,
even if it's coming in from two different virtual hosts.
2013-03-30 15:56:02 -04:00
95ca55aa7e Print out WSGI environment on DB init failure 2013-03-30 15:55:41 -04:00
e01813f29d Fix wsgi documentation 2013-03-25 13:52:32 -04:00
7f41e117a2 Fix tabs 2013-03-25 13:44:03 -04:00
dd5fc806e5 Restructure WSGI app to regenerate error on each call, if needed
This way, errors like "database already locked" can be fixed and the
page reloaded, without needing to restart Apache.
2013-03-24 21:52:11 -04:00
f8ca8d31e6 Remove Iteratorizer, as it's no longer needed 2013-03-24 21:31:03 -04:00
ed89d803f0 Remove aplotter code 2013-03-24 21:29:09 -04:00
3d24092cd2 Replace bare 'except:' with 'except: Exception'
Otherwise we might inadvertently catch SystemExit or KeyboardExit or
something we don't want to catch.
2013-03-24 21:28:01 -04:00
304bb43d85 Move lockfile out of data dir, to avoid stream tree conflicts 2013-03-24 21:23:45 -04:00
59a79a30a5 Remove lockfile when done.
This isn't necessary for correct behavior: if the database is killed,
the old flock() will go away when the file descriptor gets closed.
2013-03-24 21:20:47 -04:00
c0d450d39e Add locking mechanism to avoid multiple servers on one DB 2013-03-24 21:20:20 -04:00
6f14d609b2 Fix issue where bulkdata was accidentally closed 2013-03-24 21:16:18 -04:00
77ef87456f Improve WSGI application support, fix docs 2013-03-24 21:16:03 -04:00
32d6af935c Improve wsgi docs 2013-03-22 19:17:36 -04:00
6af3a6fc41 Add WSGI application support and documentation 2013-03-22 19:14:34 -04:00
f8a06fb3b7 Clarify default DB path in nilmdb_server.py help text 2013-03-22 15:09:37 -04:00
e790bb9e8a Fix test failure when tests are run as root 2013-03-21 14:33:02 -04:00
23 changed files with 194 additions and 636 deletions

View File

@@ -24,3 +24,5 @@ Usage:
nilmdb-server --help nilmdb-server --help
nilmtool --help nilmtool --help
See docs/wsgi.md for info on setting up a WSGI application in Apache.

32
docs/wsgi.md Normal file
View File

@@ -0,0 +1,32 @@
WSGI Application in Apache
--------------------------
Install `apache2` and `libapache2-mod-wsgi`
We'll set up the database server at URL `http://myhost.com/nilmdb`.
The database will be stored in `/home/nilm/db`, and the process will
run as user `nilm`, group `nilm`.
First, create a WSGI script `/home/nilm/nilmdb.wsgi` containing:
import nilmdb.server
application = nilmdb.server.wsgi_application("/home/nilm/db", "/nilmdb")
The first parameter is the local filesystem path, and the second
parameter is the path part of the URL.
Then, set up Apache with a configuration like:
<VirtualHost>
WSGIScriptAlias /nilmdb /home/nilm/nilmdb.wsgi
WSGIApplicationGroup nilmdb-appgroup
WSGIProcessGroup nilmdb-procgroup
WSGIDaemonProcess nilmdb-procgroup threads=32 user=nilm group=nilm
# Access control example:
<Location /nilmdb>
Order deny,allow
Deny from all
Allow from 1.2.3.4
</Location>
</VirtualHost>

View File

@@ -313,6 +313,11 @@ class StreamInserter(object):
part of a new interval and there may be a gap left in-between.""" part of a new interval and there may be a gap left in-between."""
self._send_block(final = True) self._send_block(final = True)
def send(self):
"""Send any data that we might have buffered up. Does not affect
any other treatment of timestamps or endpoints."""
self._send_block(final = False)
def _get_first_noncomment(self, block): def _get_first_noncomment(self, block):
"""Return the (start, end) indices of the first full line in """Return the (start, end) indices of the first full line in
block that isn't a comment, or raise IndexError if block that isn't a comment, or raise IndexError if

View File

@@ -16,7 +16,7 @@ class HTTPClient(object):
reparsed = urlparse.urlparse(baseurl).geturl() reparsed = urlparse.urlparse(baseurl).geturl()
if '://' not in reparsed: if '://' not in reparsed:
reparsed = urlparse.urlparse("http://" + baseurl).geturl() reparsed = urlparse.urlparse("http://" + baseurl).geturl()
self.baseurl = reparsed self.baseurl = reparsed.rstrip('/') + '/'
# Build Requests session object, enable SSL verification # Build Requests session object, enable SSL verification
self.session = requests.Session() self.session = requests.Session()

View File

@@ -22,7 +22,7 @@ def main():
group.add_argument('-p', '--port', help = 'Listen on the given port', group.add_argument('-p', '--port', help = 'Listen on the given port',
type = int, default = 12380) type = int, default = 12380)
group.add_argument('-d', '--database', help = 'Database directory', group.add_argument('-d', '--database', help = 'Database directory',
default = os.path.join(os.getcwd(), "db")) default = "./db")
group.add_argument('-q', '--quiet', help = 'Silence output', group.add_argument('-q', '--quiet', help = 'Silence output',
action = 'store_true') action = 'store_true')
group.add_argument('-t', '--traceback', group.add_argument('-t', '--traceback',

View File

@@ -17,5 +17,5 @@ except (ImportError, TypeError): # pragma: no cover
pass pass
from nilmdb.server.nilmdb import NilmDB from nilmdb.server.nilmdb import NilmDB
from nilmdb.server.server import Server from nilmdb.server.server import Server, wsgi_application
from nilmdb.server.errors import NilmDBError, StreamError, OverlapError from nilmdb.server.errors import NilmDBError, StreamError, OverlapError

View File

@@ -14,6 +14,7 @@ import re
import sys import sys
import tempfile import tempfile
import nilmdb.utils.lock
from . import rocket from . import rocket
# Up to 256 open file descriptors at any given time. # Up to 256 open file descriptors at any given time.
@@ -26,6 +27,8 @@ class BulkData(object):
def __init__(self, basepath, **kwargs): def __init__(self, basepath, **kwargs):
self.basepath = basepath self.basepath = basepath
self.root = os.path.join(self.basepath, "data") self.root = os.path.join(self.basepath, "data")
self.lock = self.root + ".lock"
self.lockfile = None
# Tuneables # Tuneables
if "file_size" in kwargs: if "file_size" in kwargs:
@@ -44,8 +47,22 @@ class BulkData(object):
if not os.path.isdir(self.root): if not os.path.isdir(self.root):
os.mkdir(self.root) os.mkdir(self.root)
# Create the lock
self.lockfile = open(self.lock, "w")
if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
raise IOError('database at "' + self.basepath +
'" is already locked by another process')
def close(self): def close(self):
self.getnode.cache_remove_all() self.getnode.cache_remove_all()
if self.lockfile:
nilmdb.utils.lock.exclusive_unlock(self.lockfile)
self.lockfile.close()
try:
os.unlink(self.lock)
except OSError: # pragma: no cover
pass
self.lockfile = None
def _encode_filename(self, path): def _encode_filename(self, path):
# Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(), # Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(),
@@ -134,7 +151,7 @@ class BulkData(object):
# Open and cache it # Open and cache it
self.getnode(unicodepath) self.getnode(unicodepath)
except: except Exception:
exc_info = sys.exc_info() exc_info = sys.exc_info()
try: try:
os.rmdir(ospath) os.rmdir(ospath)
@@ -371,7 +388,7 @@ class Table(object):
# Try deleting subdir, too # Try deleting subdir, too
try: try:
os.rmdir(os.path.join(self.root, subdir)) os.rmdir(os.path.join(self.root, subdir))
except: except Exception:
pass pass
# Cache open files # Cache open files
@@ -504,7 +521,7 @@ class Table(object):
with open(cachefile, "rb") as f: with open(cachefile, "rb") as f:
ranges = pickle.load(f) ranges = pickle.load(f)
cachefile_present = True cachefile_present = True
except: except Exception:
ranges = [] ranges = []
cachefile_present = False cachefile_present = False

View File

@@ -106,7 +106,9 @@ class NilmDB(object):
try: try:
os.makedirs(self.basepath) os.makedirs(self.basepath)
except OSError as e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST: # pragma: no cover
# (no coverage, because it's hard to trigger this case
# if tests are run as root)
raise IOError("can't create tree " + self.basepath) raise IOError("can't create tree " + self.basepath)
# Our data goes inside it # Our data goes inside it
@@ -117,8 +119,9 @@ class NilmDB(object):
self.con = sqlite3.connect(sqlfilename, check_same_thread = True) self.con = sqlite3.connect(sqlfilename, check_same_thread = True)
try: try:
self._sql_schema_update() self._sql_schema_update()
finally: # pragma: no cover except Exception: # pragma: no cover
self.data.close() self.data.close()
raise
# See big comment at top about the performance implications of this # See big comment at top about the performance implications of this
self.con.execute("PRAGMA synchronous=NORMAL") self.con.execute("PRAGMA synchronous=NORMAL")

View File

@@ -11,9 +11,11 @@ from nilmdb.utils.time import string_to_timestamp
import cherrypy import cherrypy
import sys import sys
import os import os
import socket
import simplejson as json import simplejson as json
import decorator import decorator
import psutil import psutil
import traceback
class NilmApp(object): class NilmApp(object):
def __init__(self, db): def __init__(self, db):
@@ -172,6 +174,21 @@ class Root(NilmApp):
class Stream(NilmApp): class Stream(NilmApp):
"""Stream-specific operations""" """Stream-specific operations"""
# Helpers
def _get_times(self, start_param, end_param):
(start, end) = (None, None)
if start_param is not None:
start = string_to_timestamp(start_param)
if end_param is not None:
end = string_to_timestamp(end_param)
if start is not None and end is not None:
if start >= end:
raise cherrypy.HTTPError(
"400 Bad Request",
sprintf("start must precede end (%s >= %s)",
start_param, end_param))
return (start, end)
# /stream/list # /stream/list
# /stream/list?layout=float32_8 # /stream/list?layout=float32_8
# /stream/list?path=/newton/prep&extended=1 # /stream/list?path=/newton/prep&extended=1
@@ -305,11 +322,7 @@ class Stream(NilmApp):
raise cherrypy.HTTPError("404 Not Found", "No such stream") raise cherrypy.HTTPError("404 Not Found", "No such stream")
# Check limits # Check limits
start = string_to_timestamp(start) (start, end) = self._get_times(start, end)
end = string_to_timestamp(end)
if start >= end:
raise cherrypy.HTTPError("400 Bad Request",
"start must precede end")
# Pass the data directly to nilmdb, which will parse it and # Pass the data directly to nilmdb, which will parse it and
# raise a ValueError if there are any problems. # raise a ValueError if there are any problems.
@@ -331,14 +344,7 @@ class Stream(NilmApp):
the interval [start, end). Returns the number of data points the interval [start, end). Returns the number of data points
removed. removed.
""" """
if start is not None: (start, end) = self._get_times(start, end)
start = string_to_timestamp(start)
if end is not None:
end = string_to_timestamp(end)
if start is not None and end is not None:
if start >= end:
raise cherrypy.HTTPError("400 Bad Request",
"start must precede end")
total_removed = 0 total_removed = 0
while True: while True:
(removed, restart) = self.db.stream_remove(path, start, end) (removed, restart) = self.db.stream_remove(path, start, end)
@@ -369,15 +375,7 @@ class Stream(NilmApp):
Note that the response type is the non-standard Note that the response type is the non-standard
'application/x-json-stream' for lack of a better option. 'application/x-json-stream' for lack of a better option.
""" """
if start is not None: (start, end) = self._get_times(start, end)
start = string_to_timestamp(start)
if end is not None:
end = string_to_timestamp(end)
if start is not None and end is not None:
if start >= end:
raise cherrypy.HTTPError("400 Bad Request",
"start must precede end")
if len(self.db.stream_list(path = path)) != 1: if len(self.db.stream_list(path = path)) != 1:
raise cherrypy.HTTPError("404", "No such stream: " + path) raise cherrypy.HTTPError("404", "No such stream: " + path)
@@ -415,16 +413,7 @@ class Stream(NilmApp):
If 'markup' is True, adds comments to the stream denoting each If 'markup' is True, adds comments to the stream denoting each
interval's start and end timestamp. interval's start and end timestamp.
""" """
if start is not None: (start, end) = self._get_times(start, end)
start = string_to_timestamp(start)
if end is not None:
end = string_to_timestamp(end)
# Check parameters
if start is not None and end is not None:
if start >= end:
raise cherrypy.HTTPError("400 Bad Request",
"start must precede end")
# Check path and get layout # Check path and get layout
streams = self.db.stream_list(path = path) streams = self.db.stream_list(path = path)
@@ -466,7 +455,8 @@ class Server(object):
stoppable = False, # whether /exit URL exists stoppable = False, # whether /exit URL exists
embedded = True, # hide diagnostics and output, etc embedded = True, # hide diagnostics and output, etc
fast_shutdown = False, # don't wait for clients to disconn. fast_shutdown = False, # don't wait for clients to disconn.
force_traceback = False # include traceback in all errors force_traceback = False, # include traceback in all errors
basepath = '', # base URL path for cherrypy.tree
): ):
# Save server version, just for verification during tests # Save server version, just for verification during tests
self.version = nilmdb.__version__ self.version = nilmdb.__version__
@@ -526,7 +516,7 @@ class Server(object):
if stoppable: if stoppable:
root.exit = Exiter() root.exit = Exiter()
cherrypy.tree.apps = {} cherrypy.tree.apps = {}
cherrypy.tree.mount(root, "/", config = { "/" : app_config }) cherrypy.tree.mount(root, basepath, config = { "/" : app_config })
# Shutdowns normally wait for clients to disconnect. To speed # Shutdowns normally wait for clients to disconnect. To speed
# up tests, set fast_shutdown = True # up tests, set fast_shutdown = True
@@ -536,6 +526,9 @@ class Server(object):
else: else:
cherrypy.server.shutdown_timeout = 5 cherrypy.server.shutdown_timeout = 5
# Set up the WSGI application pointer for external programs
self.wsgi_application = cherrypy.tree
def json_error_page(self, status, message, traceback, version): def json_error_page(self, status, message, traceback, version):
"""Return a custom error page in JSON so the client can parse it""" """Return a custom error page in JSON so the client can parse it"""
errordata = { "status" : status, errordata = { "status" : status,
@@ -602,3 +595,55 @@ class Server(object):
def stop(self): def stop(self):
cherrypy.engine.exit() cherrypy.engine.exit()
# Use a single global nilmdb.server.NilmDB and nilmdb.server.Server
# instance since the database can only be opened once. For this to
# work, the web server must use only a single process and single
# Python interpreter. Multiple threads are OK.
_wsgi_server = None
def wsgi_application(dbpath, basepath): # pragma: no cover
"""Return a WSGI application object with a database at the
specified path.
'dbpath' is a filesystem location, e.g. /home/nilm/db
'basepath' is the URL path of the application base, which
is the same as the first argument to Apache's WSGIScriptAlias
directive.
"""
def application(environ, start_response):
global _wsgi_server
if _wsgi_server is None:
# Try to start the server
try:
db = nilmdb.utils.serializer_proxy(nilmdb.server.NilmDB)(dbpath)
_wsgi_server = nilmdb.server.Server(
db, embedded = True,
basepath = basepath.rstrip('/'))
except Exception:
# Build an error message on failure
import pprint
err = sprintf("Initializing database at path '%s' failed:\n\n",
dbpath)
err += traceback.format_exc()
try:
import pwd
import grp
err += sprintf("\nRunning as: uid=%d (%s), gid=%d (%s) "
"on host %s, pid %d\n",
os.getuid(), pwd.getpwuid(os.getuid())[0],
os.getgid(), grp.getgrgid(os.getgid())[0],
socket.gethostname(), os.getpid())
except ImportError:
pass
err += sprintf("\nEnvironment:\n%s\n", pprint.pformat(environ))
if _wsgi_server is None:
# Serve up the error with our own mini WSGI app.
headers = [ ('Content-type', 'text/plain'),
('Content-length', str(len(err))) ]
start_response("500 Internal Server Error", headers)
return [err]
# Call the normal application
return _wsgi_server.wsgi_application(environ, start_response)
return application

View File

@@ -1,7 +1,7 @@
"""NilmDB utilities""" """NilmDB utilities"""
from __future__ import absolute_import
from nilmdb.utils.timer import Timer from nilmdb.utils.timer import Timer
from nilmdb.utils.iteratorizer import Iteratorizer
from nilmdb.utils.serializer import serializer_proxy from nilmdb.utils.serializer import serializer_proxy
from nilmdb.utils.lrucache import lru_cache from nilmdb.utils.lrucache import lru_cache
from nilmdb.utils.diskusage import du, human_size from nilmdb.utils.diskusage import du, human_size
@@ -12,3 +12,4 @@ import nilmdb.utils.fallocate
import nilmdb.utils.time import nilmdb.utils.time
import nilmdb.utils.iterator import nilmdb.utils.iterator
import nilmdb.utils.interval import nilmdb.utils.interval
import nilmdb.utils.lock

View File

@@ -1,100 +0,0 @@
import Queue
import threading
import sys
import contextlib
# This file provides a context manager that converts a function
# that takes a callback into a generator that returns an iterable.
# This is done by running the function in a new thread.
# Based partially on http://stackoverflow.com/questions/9968592/
class IteratorizerThread(threading.Thread):
def __init__(self, queue, function, curl_hack):
"""
function: function to execute, which takes the
callback (provided by this class) as an argument
"""
threading.Thread.__init__(self)
self.name = "Iteratorizer-" + function.__name__ + "-" + self.name
self.function = function
self.queue = queue
self.die = False
self.curl_hack = curl_hack
def callback(self, data):
try:
if self.die:
raise Exception() # trigger termination
self.queue.put((1, data))
except:
if self.curl_hack:
# We can't raise exceptions, because the pycurl
# extension module will unconditionally print the
# exception itself, and not pass it up to the caller.
# Instead, just return a value that tells curl to
# abort. (-1 would be best, in case we were given 0
# bytes, but the extension doesn't support that).
self.queue.put((2, sys.exc_info()))
return 0
raise
def run(self):
try:
result = self.function(self.callback)
except:
self.queue.put((2, sys.exc_info()))
else:
self.queue.put((0, result))
@contextlib.contextmanager
def Iteratorizer(function, curl_hack = False):
"""
Context manager that takes a function expecting a callback,
and provides an iterable that yields the values passed to that
callback instead.
function: function to execute, which takes a callback
(provided by this context manager) as an argument
with iteratorizer(func) as it:
for i in it:
print 'callback was passed:', i
print 'function returned:', it.retval
"""
queue = Queue.Queue(maxsize = 1)
thread = IteratorizerThread(queue, function, curl_hack)
thread.daemon = True
thread.start()
class iteratorizer_gen(object):
def __init__(self, queue):
self.queue = queue
self.retval = None
def __iter__(self):
return self
def next(self):
(typ, data) = self.queue.get()
if typ == 0:
# function has returned
self.retval = data
raise StopIteration
elif typ == 1:
# data is available
return data
else:
# callback raised an exception
raise data[0], data[1], data[2]
try:
yield iteratorizer_gen(queue)
finally:
# Ask the thread to die, if it's still running.
thread.die = True
while thread.isAlive():
try:
queue.get(True, 0.01)
except: # pragma: no cover
pass

33
nilmdb/utils/lock.py Normal file
View File

@@ -0,0 +1,33 @@
# File locking
import warnings
try:
import fcntl
import errno
def exclusive_lock(f):
"""Acquire an exclusive lock. Returns True on successful
lock, or False on error."""
try:
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
return False
else: # pragma: no cover
raise
return True
def exclusive_unlock(f):
"""Release an exclusive lock."""
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
except ImportError: # pragma: no cover
def exclusive_lock(f):
"""Dummy lock function -- does not lock!"""
warnings.warn("Pretending to lock " + str(f))
return True
def exclusive_unlock(f):
"""Release an exclusive lock."""
return

View File

@@ -15,7 +15,7 @@ def must_close(errorfile = sys.stderr, wrap_verify = False):
def wrap_class_method(wrapper): def wrap_class_method(wrapper):
try: try:
orig = getattr(cls, wrapper.__name__).im_func orig = getattr(cls, wrapper.__name__).im_func
except: except Exception:
orig = lambda x: None orig = lambda x: None
setattr(cls, wrapper.__name__, decorator.decorator(wrapper, orig)) setattr(cls, wrapper.__name__, decorator.decorator(wrapper, orig))

View File

@@ -39,7 +39,7 @@ versioneer.parentdir_prefix = 'nilmdb-'
# Hack to workaround logging/multiprocessing issue: # Hack to workaround logging/multiprocessing issue:
# https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ # https://groups.google.com/d/msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ
try: import multiprocessing try: import multiprocessing
except: pass except Exception: pass
# Use Cython if it's new enough, otherwise use preexisting C files. # Use Cython if it's new enough, otherwise use preexisting C files.
cython_modules = [ 'nilmdb.server.interval', cython_modules = [ 'nilmdb.server.interval',

View File

@@ -24,7 +24,7 @@ class JimOrderPlugin(nose.plugins.Plugin):
name, workingDir=loader.workingDir) name, workingDir=loader.workingDir)
try: try:
order = os.path.join(addr.filename, "test.order") order = os.path.join(addr.filename, "test.order")
except: except Exception:
order = None order = None
if order and os.path.exists(order): if order and os.path.exists(order):
files = [] files = []

View File

@@ -4,7 +4,6 @@ test_lrucache.py
test_mustclose.py test_mustclose.py
test_serializer.py test_serializer.py
test_iteratorizer.py
test_timestamper.py test_timestamper.py
test_rbtree.py test_rbtree.py

View File

@@ -30,6 +30,11 @@ class TestBulkData(object):
else: else:
data = BulkData(db, file_size = size, files_per_dir = files) data = BulkData(db, file_size = size, files_per_dir = files)
# Try opening it again (should result in locking error)
with assert_raises(IOError) as e:
data2 = BulkData(db)
in_("already locked by another process", str(e.exception))
# create empty # create empty
with assert_raises(ValueError): with assert_raises(ValueError):
data.create("/foo", "uint16_8") data.create("/foo", "uint16_8")

View File

@@ -311,11 +311,11 @@ class TestClient(object):
# Trigger a curl error in generator # Trigger a curl error in generator
with assert_raises(ServerError) as e: with assert_raises(ServerError) as e:
client.http.get_gen("http://nosuchurl/").next() client.http.get_gen("http://nosuchurl.example.com./").next()
# Trigger a curl error in generator # Trigger a curl error in generator
with assert_raises(ServerError) as e: with assert_raises(ServerError) as e:
client.http.get_gen("http://nosuchurl/").next() client.http.get_gen("http://nosuchurl.example.com./").next()
# Check 404 for missing streams # Check 404 for missing streams
for function in [ client.stream_intervals, client.stream_extract ]: for function in [ client.stream_intervals, client.stream_extract ]:
@@ -460,6 +460,7 @@ class TestClient(object):
ctx.update_start(109) ctx.update_start(109)
ctx.insert("110 1\n") ctx.insert("110 1\n")
ctx.insert("111 1\n") ctx.insert("111 1\n")
ctx.send()
ctx.insert("112 1\n") ctx.insert("112 1\n")
ctx.insert("113 1\n") ctx.insert("113 1\n")
ctx.insert("114 1\n") ctx.insert("114 1\n")
@@ -619,7 +620,7 @@ class TestClient(object):
poolmanager = c.http._last_response.connection.poolmanager poolmanager = c.http._last_response.connection.poolmanager
pool = poolmanager.pools[('http','localhost',32180)] pool = poolmanager.pools[('http','localhost',32180)]
return (pool.num_connections, pool.num_requests) return (pool.num_connections, pool.num_requests)
except: except Exception:
raise SkipTest("can't get connection info") raise SkipTest("can't get connection info")
# First request makes a connection # First request makes a connection

View File

@@ -385,7 +385,6 @@ class TestIntervalSpeed:
def test_interval_speed(self): def test_interval_speed(self):
import yappi import yappi
import time import time
import testutil.aplotter as aplotter
import random import random
import math import math
@@ -406,6 +405,5 @@ class TestIntervalSpeed:
speed/j, speed/j,
speed / (j*math.log(j))) # should be constant speed / (j*math.log(j))) # should be constant
speeds[j] = speed speeds[j] = speed
aplotter.plot(speeds.keys(), speeds.values(), plot_slope=True)
yappi.stop() yappi.stop()
yappi.print_stats(sort_type=yappi.SORTTYPE_TTOT, limit=10) yappi.print_stats(sort_type=yappi.SORTTYPE_TTOT, limit=10)

View File

@@ -1,61 +0,0 @@
import nilmdb
from nilmdb.utils.printf import *
import nose
from nose.tools import *
from nose.tools import assert_raises
import threading
import time
from testutil.helpers import *
def func_with_callback(a, b, callback):
callback(a)
callback(b)
callback(a+b)
return "return value"
class TestIteratorizer(object):
def test(self):
# First try it with a normal callback
self.result = ""
def cb(x):
self.result += str(x)
func_with_callback(1, 2, cb)
eq_(self.result, "123")
# Now make it an iterator
result = ""
f = lambda x: func_with_callback(1, 2, x)
with nilmdb.utils.Iteratorizer(f) as it:
for i in it:
result += str(i)
eq_(result, "123")
eq_(it.retval, "return value")
# Make sure things work when an exception occurs
result = ""
with nilmdb.utils.Iteratorizer(
lambda x: func_with_callback(1, "a", x)) as it:
with assert_raises(TypeError) as e:
for i in it:
result += str(i)
eq_(result, "1a")
# Now try to trigger the case where we stop iterating
# mid-generator, and expect the iteratorizer to clean up after
# itself. This doesn't have a particular result in the test,
# but gains coverage.
def foo():
with nilmdb.utils.Iteratorizer(f) as it:
it.next()
foo()
eq_(it.retval, None)
# Do the same thing when the curl hack is applied
def foo():
with nilmdb.utils.Iteratorizer(f, curl_hack = True) as it:
it.next()
foo()
eq_(it.retval, None)

View File

@@ -28,9 +28,6 @@ class Test00Nilmdb(object): # named 00 so it runs first
def test_NilmDB(self): def test_NilmDB(self):
recursive_unlink(testdb) recursive_unlink(testdb)
with assert_raises(IOError):
nilmdb.server.NilmDB("/nonexistant-db/foo")
db = nilmdb.server.NilmDB(testdb) db = nilmdb.server.NilmDB(testdb)
db.close() db.close()
db = nilmdb.server.NilmDB(testdb) db = nilmdb.server.NilmDB(testdb)

View File

@@ -18,7 +18,7 @@ class TestPrintf(object):
printf("hello, world: %d", 123) printf("hello, world: %d", 123)
fprintf(test2, "hello too: %d", 123) fprintf(test2, "hello too: %d", 123)
test3 = sprintf("hello three: %d", 123) test3 = sprintf("hello three: %d", 123)
except: except Exception:
sys.stdout = old_stdout sys.stdout = old_stdout
raise raise
sys.stdout = old_stdout sys.stdout = old_stdout

View File

@@ -1,419 +0,0 @@
#-----------------------------------------------
#aplotter.py - ascii art function plotter
#Copyright (c) 2006, Imri Goldberg
#All rights reserved.
#
#Redistribution and use in source and binary forms,
#with or without modification, are permitted provided
#that the following conditions are met:
#
# * Redistributions of source code must retain the
# above copyright notice, this list of conditions
# and the following disclaimer.
# * Redistributions in binary form must reproduce the
# above copyright notice, this list of conditions
# and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the <ORGANIZATION> nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
#AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
#IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
#ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
#LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
#DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
#OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#-----------------------------------------------
import math
EPSILON = 0.000001
def transposed(mat):
result = []
for i in xrange(len(mat[0])):
result.append([x[i] for x in mat])
return result
def y_reversed(mat):
result = []
for i in range(len(mat)):
result.append(list(reversed(mat[i])))
return result
def sign(x):
if 0<x:
return 1
if 0 == x:
return 0
return -1
class Plotter(object):
class PlotData(object):
def __init__(self, x_size, y_size, min_x, max_x, min_y, max_y, x_mod, y_mod):
self.x_size = x_size
self.y_size = y_size
self.min_x = min_x
self.max_x = max_x
self.min_y = min_y
self.max_y = max_y
self.x_mod = x_mod
self.y_mod = y_mod
self.x_step = float(max_x - min_x)/float(self.x_size)
self.y_step = float(max_y - min_y)/float(self.y_size)
self.inv_x_step = 1/self.x_step
self.inv_y_step = 1/self.y_step
self.ratio = self.y_step / self.x_step
def __repr__(self):
s = "size: %s, bl: %s, tr: %s, step: %s" % ((self.x_size, self.y_size), (self.min_x, self.min_y), (self.max_x, self.max_y),
(self.x_step, self.y_step))
return s
def __init__(self, **kwargs):
self.x_size = kwargs.get("x_size", 80)
self.y_size = kwargs.get("y_size", 20)
self.will_draw_axes = kwargs.get("draw_axes", True)
self.new_line = kwargs.get("newline", "\n")
self.dot = kwargs.get("dot", "*")
self.plot_slope = kwargs.get("plot_slope", True)
self.x_margin = kwargs.get("x_margin", 0.05)
self.y_margin = kwargs.get("y_margin", 0.1)
self.will_plot_labels = kwargs.get("plot_labels", True)
@staticmethod
def get_symbol_by_slope(slope, default_symbol):
draw_symbol = default_symbol
if slope > math.tan(3*math.pi/8):
draw_symbol = "|"
elif slope > math.tan(math.pi/8) and slope < math.tan(3*math.pi/8):
draw_symbol = "/"
elif abs(slope) < math.tan(math.pi/8):
draw_symbol = "-"
elif slope < math.tan(-math.pi/8) and slope > math.tan(-3*math.pi/8):
draw_symbol = "\\"
elif slope < math.tan(-3*math.pi/8):
draw_symbol = "|"
return draw_symbol
def plot_labels(self, output_buffer, plot_data):
if plot_data.y_size < 2:
return
margin_factor = 1
do_plot_x_label = True
do_plot_y_label = True
x_str = "%+g"
if plot_data.x_size < 16:
do_plot_x_label = False
elif plot_data.x_size < 23:
x_str = "%+.2g"
y_str = "%+g"
if plot_data.x_size < 8:
do_plot_y_label = False
elif plot_data.x_size < 11:
y_str = "%+.2g"
act_min_x = (plot_data.min_x + plot_data.x_mod*margin_factor)
act_max_x = (plot_data.max_x - plot_data.x_mod*margin_factor)
act_min_y = (plot_data.min_y + plot_data.y_mod*margin_factor)
act_max_y = (plot_data.max_y - plot_data.y_mod*margin_factor)
if abs(act_min_x) < 1:
min_x_str = "%+.2g" % act_min_x
else:
min_x_str = x_str % act_min_x
if abs(act_max_x) < 1:
max_x_str = "%+.2g" % act_max_x
else:
max_x_str = x_str % act_max_x
if abs(act_min_y) < 1:
min_y_str = "%+.2g" % act_min_y
else:
min_y_str = y_str % act_min_y
if abs(act_max_y) < 1:
max_y_str = "%+.2g" % act_max_y
else:
max_y_str = y_str % act_max_y
min_x_coord = self.get_coord(act_min_x,plot_data.min_x,plot_data.x_step)
max_x_coord = self.get_coord(act_max_x,plot_data.min_x,plot_data.x_step)
min_y_coord = self.get_coord(act_min_y,plot_data.min_y,plot_data.y_step)
max_y_coord = self.get_coord(act_max_y,plot_data.min_y,plot_data.y_step)
#print plot_data
y_zero_coord = self.get_coord(0, plot_data.min_y, plot_data.y_step)
#if plot_data.min_x < 0 and plot_data.max_x > 0:
x_zero_coord = self.get_coord(0, plot_data.min_x, plot_data.x_step)
#else:
#pass
output_buffer[x_zero_coord][min_y_coord] = "+"
output_buffer[x_zero_coord][max_y_coord] = "+"
output_buffer[min_x_coord][y_zero_coord] = "+"
output_buffer[max_x_coord][y_zero_coord] = "+"
if do_plot_x_label:
for i,c in enumerate(min_x_str):
output_buffer[min_x_coord+i][y_zero_coord-1] = c
for i,c in enumerate(max_x_str):
output_buffer[max_x_coord+i-len(max_x_str)][y_zero_coord-1] = c
if do_plot_y_label:
for i,c in enumerate(max_y_str):
output_buffer[x_zero_coord+i][max_y_coord] = c
for i,c in enumerate(min_y_str):
output_buffer[x_zero_coord+i][min_y_coord] = c
def plot_data(self, xy_seq, output_buffer, plot_data):
if self.plot_slope:
xy_seq = list(xy_seq)
#sort according to the x coord
xy_seq.sort(key = lambda c: c[0])
prev_p = xy_seq[0]
e_xy_seq = enumerate(xy_seq)
e_xy_seq.next()
for i,(x,y) in e_xy_seq:
draw_symbol = self.dot
line_drawn = self.plot_line(prev_p, (x,y), output_buffer, plot_data)
prev_p = (x,y)
if not line_drawn:
if i > 0 and i < len(xy_seq)-1:
px,py = xy_seq[i-1]
nx,ny = xy_seq[i+1]
if abs(nx-px) > EPSILON:
slope = (1.0/plot_data.ratio)*(ny-py)/(nx-px)
draw_symbol = self.get_symbol_by_slope(slope, draw_symbol)
if x < plot_data.min_x or x >= plot_data.max_x or y < plot_data.min_y or y >= plot_data.max_y:
continue
x_coord = self.get_coord(x, plot_data.min_x, plot_data.x_step)
y_coord = self.get_coord(y, plot_data.min_y, plot_data.y_step)
if x_coord >= 0 and x_coord < len(output_buffer) and y_coord >= 0 and y_coord < len(output_buffer[0]):
if self.draw_axes:
if y_coord == self.get_coord(0, plot_data.min_y, plot_data.y_step) and draw_symbol == "-":
draw_symbol = "="
output_buffer[x_coord][y_coord] = draw_symbol
else:
for x,y in xy_seq:
if x < plot_data.min_x or x >= plot_data.max_x or y < plot_data.min_y or y >= plot_data.max_y:
continue
x_coord = self.get_coord(x, plot_data.min_x, plot_data.x_step)
y_coord = self.get_coord(y, plot_data.min_y, plot_data.y_step)
if x_coord >= 0 and x_coord < len(output_buffer) and y_coord > 0 and y_coord < len(output_buffer[0]):
output_buffer[x_coord][y_coord] = self.dot
def plot_line(self, start, end, output_buffer, plot_data):
start_coord = self.get_coord(start[0], plot_data.min_x, plot_data.x_step), self.get_coord(start[1], plot_data.min_y, plot_data.y_step)
end_coord = self.get_coord(end[0], plot_data.min_x, plot_data.x_step), self.get_coord(end[1], plot_data.min_y, plot_data.y_step)
x0,y0 = start_coord
x1,y1 = end_coord
if (x0,y0) == (x1,y1):
return True
clipped_line = clip_line(start, end, (plot_data.min_x, plot_data.min_y), (plot_data.max_x, plot_data.max_y))
if clipped_line != None:
start,end = clipped_line
else:
return False
start_coord = self.get_coord(start[0], plot_data.min_x, plot_data.x_step), self.get_coord(start[1], plot_data.min_y, plot_data.y_step)
end_coord = self.get_coord(end[0], plot_data.min_x, plot_data.x_step), self.get_coord(end[1], plot_data.min_y, plot_data.y_step)
x0,y0 = start_coord
x1,y1 = end_coord
if (x0,y0) == (x1,y1):
return True
x_zero_coord = self.get_coord(0, plot_data.min_x, plot_data.x_step)
y_zero_coord = self.get_coord(0, plot_data.min_y, plot_data.y_step)
if start[0]-end[0] == 0:
draw_symbol = "|"
else:
slope = (1.0/plot_data.ratio)*(end[1]-start[1])/(end[0]-start[0])
draw_symbol = self.get_symbol_by_slope(slope, self.dot)
try:
delta = x1-x0, y1-y0
if abs(delta[0])>abs(delta[1]):
s = sign(delta[0])
slope = float(delta[1])/delta[0]
for i in range(0,abs(int(delta[0]))):
cur_draw_symbol = draw_symbol
x = i*s
cur_y = int(y0+slope*x)
if self.draw_axes and cur_y == y_zero_coord and draw_symbol == "-":
cur_draw_symbol = "="
output_buffer[x0+x][cur_y] = cur_draw_symbol
else:
s = sign(delta[1])
slope = float(delta[0])/delta[1]
for i in range(0,abs(int(delta[1]))):
y = i*s
cur_draw_symbol = draw_symbol
cur_y = y0+y
if self.draw_axes and cur_y == y_zero_coord and draw_symbol == "-":
cur_draw_symbol = "="
output_buffer[int(x0+slope*y)][cur_y] = cur_draw_symbol
except:
print start, end
print start_coord, end_coord
print plot_data
raise
return False
def plot_single(self, seq, min_x = None, max_x = None, min_y = None, max_y = None):
return self.plot_double(range(len(seq)),seq, min_x, max_x, min_y, max_y)
def plot_double(self, x_seq, y_seq, min_x = None, max_x = None, min_y = None, max_y = None):
if min_x == None:
min_x = min(x_seq)
if max_x == None:
max_x = max(x_seq)
if min_y == None:
min_y = min(y_seq)
if max_y == None:
max_y = max(y_seq)
if max_y == min_y:
max_y += 1
x_mod = (max_x-min_x)*self.x_margin
y_mod = (max_y-min_y)*self.y_margin
min_x-=x_mod
max_x+=x_mod
min_y-=y_mod
max_y+=y_mod
plot_data = self.PlotData(self.x_size, self.y_size, min_x, max_x, min_y, max_y, x_mod, y_mod)
output_buffer = [[" "]*self.y_size for i in range(self.x_size)]
if self.will_draw_axes:
self.draw_axes(output_buffer, plot_data)
self.plot_data(zip(x_seq, y_seq), output_buffer, plot_data)
if self.will_plot_labels:
self.plot_labels(output_buffer, plot_data)
trans_result = transposed(y_reversed(output_buffer))
result = self.new_line.join(["".join(row) for row in trans_result])
return result
def draw_axes(self, output_buffer, plot_data):
draw_x = False
draw_y = False
if plot_data.min_x <= 0 and plot_data.max_x > 0:
draw_y = True
zero_x = self.get_coord(0, plot_data.min_x, plot_data.x_step)
for y in xrange(plot_data.y_size):
output_buffer[zero_x][y] = "|"
if plot_data.min_y <= 0 and plot_data.max_y > 0:
draw_x = True
zero_y = self.get_coord(0, plot_data.min_y, plot_data.y_step)
for x in xrange(plot_data.x_size):
output_buffer[x][zero_y] = "-"
if draw_x and draw_y:
output_buffer[zero_x][zero_y] = "+"
@staticmethod
def get_coord(val, min, step):
result = int((val - min)/step)
return result
def clip_line(line_pt_1, line_pt_2, rect_bottom_left, rect_top_right):
ts = [0.0,1.0]
if line_pt_1[0] == line_pt_2[0]:
return ((line_pt_1[0], max(min(line_pt_1[1], line_pt_2[1]), rect_bottom_left[1])),
(line_pt_1[0], min(max(line_pt_1[1], line_pt_2[1]), rect_top_right[1])))
if line_pt_1[1] == line_pt_2[1]:
return ((max(min(line_pt_1[0], line_pt_2[0]), rect_bottom_left[0]), line_pt_1[1]),
(min(max(line_pt_1[0], line_pt_2[0]), rect_top_right[0]), line_pt_1[1]))
if ((rect_bottom_left[0] <= line_pt_1[0] and line_pt_1[0] < rect_top_right[0]) and
(rect_bottom_left[1] <= line_pt_1[1] and line_pt_1[1] < rect_top_right[1]) and
(rect_bottom_left[0] <= line_pt_2[0] and line_pt_2[0] < rect_top_right[0]) and
(rect_bottom_left[1] <= line_pt_2[1] and line_pt_2[1] < rect_top_right[1])):
return line_pt_1, line_pt_2
ts.append( float(rect_bottom_left[0]-line_pt_1[0])/(line_pt_2[0]-line_pt_1[0]) )
ts.append( float(rect_top_right[0]-line_pt_1[0])/(line_pt_2[0]-line_pt_1[0]) )
ts.append( float(rect_bottom_left[1]-line_pt_1[1])/(line_pt_2[1]-line_pt_1[1]) )
ts.append( float(rect_top_right[1]-line_pt_1[1])/(line_pt_2[1]-line_pt_1[1]) )
ts.sort()
if ts[2] < 0 or ts[2] >= 1 or ts[3] < 0 or ts[2]>= 1:
return None
result = [(pt_1 + t*(pt_2-pt_1)) for t in (ts[2],ts[3]) for (pt_1, pt_2) in zip(line_pt_1, line_pt_2)]
return (result[0],result[1]), (result[2], result[3])
def plot(*args,**flags):
limit_flags_names = set(["min_x","min_y","max_x","max_y"])
limit_flags = dict([(n,flags[n]) for n in limit_flags_names & set(flags)])
settting_flags = dict([(n,flags[n]) for n in set(flags) - limit_flags_names])
if len(args) == 1:
p = Plotter(**settting_flags)
print p.plot_single(args[0],**limit_flags)
elif len(args) == 2:
p = Plotter(**settting_flags)
print p.plot_double(args[0],args[1],**limit_flags)
else:
raise NotImplementedError("can't draw multiple graphs yet")
__all__ = ["Plotter","plot"]