Browse Source

Add locking mechanism to avoid multiple servers on one DB

tags/nilmdb-1.4.4
Jim Paris 11 years ago
parent
commit
c0d450d39e
4 changed files with 53 additions and 0 deletions
  1. +13
    -0
      nilmdb/server/bulkdata.py
  2. +2
    -0
      nilmdb/utils/__init__.py
  3. +33
    -0
      nilmdb/utils/lock.py
  4. +5
    -0
      tests/test_bulkdata.py

+ 13
- 0
nilmdb/server/bulkdata.py View File

@@ -14,6 +14,7 @@ import re
import sys
import tempfile

import nilmdb.utils.lock
from . import rocket

# Up to 256 open file descriptors at any given time.
@@ -26,6 +27,8 @@ class BulkData(object):
def __init__(self, basepath, **kwargs):
self.basepath = basepath
self.root = os.path.join(self.basepath, "data")
self.lock = os.path.join(self.root, "lock")
self.lockfile = None

# Tuneables
if "file_size" in kwargs:
@@ -44,8 +47,18 @@ class BulkData(object):
if not os.path.isdir(self.root):
os.mkdir(self.root)

# Create the lock
self.lockfile = open(self.lock, "w")
if not nilmdb.utils.lock.exclusive_lock(self.lockfile):
raise IOError('database at "' + self.basepath +
'" is already locked by another process')

def close(self):
self.getnode.cache_remove_all()
if self.lockfile:
nilmdb.utils.lock.exclusive_unlock(self.lockfile)
self.lockfile.close()
self.lockfile = None

def _encode_filename(self, path):
# Encode all paths to UTF-8, regardless of sys.getfilesystemencoding(),


+ 2
- 0
nilmdb/utils/__init__.py View File

@@ -1,5 +1,6 @@
"""NilmDB utilities"""

from __future__ import absolute_import
from nilmdb.utils.timer import Timer
from nilmdb.utils.iteratorizer import Iteratorizer
from nilmdb.utils.serializer import serializer_proxy
@@ -12,3 +13,4 @@ import nilmdb.utils.fallocate
import nilmdb.utils.time
import nilmdb.utils.iterator
import nilmdb.utils.interval
import nilmdb.utils.lock

+ 33
- 0
nilmdb/utils/lock.py View File

@@ -0,0 +1,33 @@
# File locking

import warnings

try:
import fcntl
import errno

def exclusive_lock(f):
"""Acquire an exclusive lock. Returns True on successful
lock, or False on error."""
try:
fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
return False
else: # pragma: no cover
raise
return True

def exclusive_unlock(f):
"""Release an exclusive lock."""
fcntl.flock(f.fileno(), fcntl.LOCK_UN)

except ImportError: # pragma: no cover
def exclusive_lock(f):
"""Dummy lock function -- does not lock!"""
warnings.warn("Pretending to lock " + str(f))
return True

def exclusive_unlock(f):
"""Release an exclusive lock."""
return

+ 5
- 0
tests/test_bulkdata.py View File

@@ -30,6 +30,11 @@ class TestBulkData(object):
else:
data = BulkData(db, file_size = size, files_per_dir = files)

# Try opening it again (should result in locking error)
with assert_raises(IOError) as e:
data2 = BulkData(db)
in_("already locked by another process", str(e.exception))

# create empty
with assert_raises(ValueError):
data.create("/foo", "uint16_8")


Loading…
Cancel
Save