Compare commits

..

9 Commits

Author SHA1 Message Date
ccf1f695af Prevent negative numbers in dbinfo output.
This might occur if things change while we're calculating the sizes.
2013-08-05 12:25:36 -04:00
06f7390c9e Fix disk usage block size 2013-08-05 12:25:10 -04:00
6de77a08f1 Report actual disk size, not apparent size 2013-08-05 12:16:56 -04:00
8db9771c20 Remove leftover fsck test 2013-08-05 12:16:47 -04:00
04f815a24b Reorder nilmtool commands 2013-08-04 19:51:13 -04:00
6868f5f126 fsck: limit max retries so we don't get stuck in a loop forever 2013-08-03 22:34:30 -04:00
ca0943ec19 fsck: add --no-data option to do a quicker fsck
This makes it fast enough to run at startup with -f, if it's expected
that a system will frequently need to be fixed.
2013-08-03 22:31:45 -04:00
68addb4e4a Clarify output when fsck database is locked 2013-08-03 21:58:24 -04:00
68c33b1f14 fsck: add comma separator on big numbers 2013-08-03 21:50:33 -04:00
6 changed files with 26 additions and 20 deletions

View File

@@ -1,5 +1,5 @@
# By default, run the tests. # By default, run the tests.
all: fscktest all: test
version: version:
python setup.py version python setup.py version
@@ -23,10 +23,6 @@ docs:
lint: lint:
pylint --rcfile=.pylintrc nilmdb pylint --rcfile=.pylintrc nilmdb
fscktest:
python -c "import nilmdb.fsck; nilmdb.fsck.Fsck('/home/jim/wsgi/db').check()"
# python -c "import nilmdb.fsck; nilmdb.fsck.Fsck('/home/jim/mnt/bucket/mnt/sharon/data/db', True).check()"
test: test:
ifeq ($(INSIDE_EMACS), t) ifeq ($(INSIDE_EMACS), t)
# Use the slightly more flexible script # Use the slightly more flexible script

View File

@@ -19,9 +19,8 @@ except ImportError: # pragma: no cover
# Valid subcommands. Defined in separate files just to break # Valid subcommands. Defined in separate files just to break
# things up -- they're still called with Cmdline as self. # things up -- they're still called with Cmdline as self.
subcommands = [ "help", "info", "create", "list", "metadata", subcommands = [ "help", "info", "create", "rename", "list", "intervals",
"insert", "extract", "remove", "destroy", "metadata", "insert", "extract", "remove", "destroy" ]
"intervals", "rename" ]
# Import the subcommand modules # Import the subcommand modules
subcmd_mods = {} subcmd_mods = {}

View File

@@ -44,15 +44,16 @@ def err(format, *args):
fprintf(sys.stderr, format, *args) fprintf(sys.stderr, format, *args)
# Decorator that retries a function if it returns a specific value # Decorator that retries a function if it returns a specific value
def retry_if_raised(exc, message = None): def retry_if_raised(exc, message = None, max_retries = 100):
def f1(func): def f1(func):
def f2(*args, **kwargs): def f2(*args, **kwargs):
while True: for n in range(max_retries):
try: try:
return func(*args, **kwargs) return func(*args, **kwargs)
except exc as e: except exc as e:
if message: if message:
log("%s\n\n", message) log("%s\n\n", message)
raise Exception("Max number of retries (%d) exceeded; giving up")
return f2 return f2
return f1 return f1
@@ -89,7 +90,7 @@ class Fsck(object):
### Main checks ### Main checks
@retry_if_raised(RetryFsck, "Something was fixed: restarting fsck") @retry_if_raised(RetryFsck, "Something was fixed: restarting fsck")
def check(self): def check(self, skip_data = False):
self.bulk = None self.bulk = None
self.sql = None self.sql = None
try: try:
@@ -97,6 +98,9 @@ class Fsck(object):
self.check_sql() self.check_sql()
self.check_streams() self.check_streams()
self.check_intervals() self.check_intervals()
if skip_data:
log("skipped data check\n")
else:
self.check_data() self.check_data()
finally: finally:
if self.bulk: if self.bulk:
@@ -118,7 +122,11 @@ class Fsck(object):
raise FsckError("Bulk data directory missing (%s)", self.bulkpath) raise FsckError("Bulk data directory missing (%s)", self.bulkpath)
with open(self.bulklock, "w") as lockfile: with open(self.bulklock, "w") as lockfile:
if not nilmdb.utils.lock.exclusive_lock(lockfile): if not nilmdb.utils.lock.exclusive_lock(lockfile):
raise FsckError('database already locked by another process') raise FsckError('Database already locked by another process\n'
'Make sure all other processes that might be '
'using the database are stopped.\n'
'Restarting apache will cause it to unlock '
'the db until a request is received.')
# unlocked immediately # unlocked immediately
self.bulk = nilmdb.server.bulkdata.BulkData(self.basepath) self.bulk = nilmdb.server.bulkdata.BulkData(self.basepath)
@@ -170,7 +178,7 @@ class Fsck(object):
def check_streams(self): def check_streams(self):
ids = self.stream_path.keys() ids = self.stream_path.keys()
log("checking %d streams\n", len(ids)) log("checking %s streams\n", "{:,d}".format(len(ids)))
with Progress(len(ids)) as pbar: with Progress(len(ids)) as pbar:
for i, sid in enumerate(ids): for i, sid in enumerate(ids):
pbar.update(i) pbar.update(i)
@@ -306,7 +314,7 @@ class Fsck(object):
def check_intervals(self): def check_intervals(self):
total_ints = sum(len(x) for x in self.stream_interval.values()) total_ints = sum(len(x) for x in self.stream_interval.values())
log("checking %d intervals\n", total_ints) log("checking %s intervals\n", "{:,d}".format(total_ints))
done = 0 done = 0
with Progress(total_ints) as pbar: with Progress(total_ints) as pbar:
for sid in self.stream_interval: for sid in self.stream_interval:
@@ -389,7 +397,7 @@ class Fsck(object):
def check_data(self): def check_data(self):
total_rows = sum(sum((y[3] - y[2]) for y in x) total_rows = sum(sum((y[3] - y[2]) for y in x)
for x in self.stream_interval.values()) for x in self.stream_interval.values())
log("checking %d rows of data\n", total_rows) log("checking %s rows of data\n", "{:,d}".format(total_rows))
done = 0 done = 0
with Progress(total_rows) as pbar: with Progress(total_rows) as pbar:
for sid in self.stream_interval: for sid in self.stream_interval:

View File

@@ -16,10 +16,12 @@ def main():
parser.add_argument("-f", "--fix", action="store_true", parser.add_argument("-f", "--fix", action="store_true",
default=False, help = 'Fix errors when possible ' default=False, help = 'Fix errors when possible '
'(which may involve removing data)') '(which may involve removing data)')
parser.add_argument("-n", "--no-data", action="store_true",
default=False, help = 'Skip the slow full-data check')
parser.add_argument('database', help = 'Database directory') parser.add_argument('database', help = 'Database directory')
args = parser.parse_args() args = parser.parse_args()
nilmdb.fsck.Fsck(args.database, args.fix).check() nilmdb.fsck.Fsck(args.database, args.fix).check(skip_data = args.no_data)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -74,8 +74,8 @@ class Root(NilmApp):
dbsize = nilmdb.utils.du(path) dbsize = nilmdb.utils.du(path)
return { "path": path, return { "path": path,
"size": dbsize, "size": dbsize,
"other": usage.used - dbsize, "other": max(usage.used - dbsize, 0),
"reserved": usage.total - usage.used - usage.free, "reserved": max(usage.total - usage.used - usage.free, 0),
"free": usage.free } "free": usage.free }
class Stream(NilmApp): class Stream(NilmApp):

View File

@@ -21,7 +21,8 @@ def du(path):
errors that might occur if we encounter broken symlinks or errors that might occur if we encounter broken symlinks or
files in the process of being removed.""" files in the process of being removed."""
try: try:
size = os.path.getsize(path) st = os.stat(path)
size = st.st_blocks * 512
if os.path.isdir(path): if os.path.isdir(path):
for thisfile in os.listdir(path): for thisfile in os.listdir(path):
filepath = os.path.join(path, thisfile) filepath = os.path.join(path, thisfile)