|
|
@@ -667,7 +667,7 @@ class TestCmdline(object): |
|
|
|
# Test BulkData's ability to split into multiple files, |
|
|
|
# by forcing the file size to be really small. |
|
|
|
server_stop() |
|
|
|
server_start(bulkdata_args = { "file_size" : 999, |
|
|
|
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file |
|
|
|
"files_per_dir" : 3 }) |
|
|
|
|
|
|
|
# Fill data |
|
|
@@ -702,6 +702,7 @@ class TestCmdline(object): |
|
|
|
# Now recreate the data one more time and make sure there are |
|
|
|
# fewer files. |
|
|
|
self.ok("destroy /newton/prep") |
|
|
|
self.fail("destroy /newton/prep") # already destroyed |
|
|
|
self.ok("create /newton/prep float32_8") |
|
|
|
os.environ['TZ'] = "UTC" |
|
|
|
with open("tests/data/prep-20120323T1004-timestamped") as input: |
|
|
@@ -709,4 +710,96 @@ class TestCmdline(object): |
|
|
|
nfiles = 0 |
|
|
|
for (dirpath, dirnames, filenames) in os.walk(testdb): |
|
|
|
nfiles += len(filenames) |
|
|
|
assert(nfiles < 50) |
|
|
|
lt_(nfiles, 50) |
|
|
|
self.ok("destroy /newton/prep") # destroy again |
|
|
|
|
|
|
|
def test_14_remove_files(self): |
|
|
|
# Test BulkData's ability to remove when data is split into |
|
|
|
# multiple files. Should be a fairly comprehensive test of |
|
|
|
# remove functionality. |
|
|
|
server_stop() |
|
|
|
server_start(bulkdata_args = { "file_size" : 920, # 23 rows per file |
|
|
|
"files_per_dir" : 3 }) |
|
|
|
|
|
|
|
# Insert data. Just for fun, insert out of order |
|
|
|
self.ok("create /newton/prep PrepData") |
|
|
|
os.environ['TZ'] = "UTC" |
|
|
|
self.ok("insert --rate 120 /newton/prep " |
|
|
|
"tests/data/prep-20120323T1002 " |
|
|
|
"tests/data/prep-20120323T1000") |
|
|
|
|
|
|
|
# Should take up about 2.8 MB here (including directory entries) |
|
|
|
du_before = nilmdb.utils.diskusage.du_bytes(testdb) |
|
|
|
|
|
|
|
# Make sure we have the data we expect |
|
|
|
self.ok("list --detail") |
|
|
|
self.match("/newton/prep PrepData\n" + |
|
|
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000" |
|
|
|
" -> Fri, 23 Mar 2012 10:01:59.991668 +0000 ]\n" |
|
|
|
" [ Fri, 23 Mar 2012 10:02:00.000000 +0000" |
|
|
|
" -> Fri, 23 Mar 2012 10:03:59.991668 +0000 ]\n") |
|
|
|
|
|
|
|
# Remove various chunks of prep data and make sure |
|
|
|
# they're gone. |
|
|
|
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01") |
|
|
|
self.match("28800\n") |
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " + |
|
|
|
"--start '23 Mar 2012 10:00:30' " + |
|
|
|
"--end '23 Mar 2012 10:03:30'") |
|
|
|
self.match("21600\n") |
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " + |
|
|
|
"--start '23 Mar 2012 10:00:10' " + |
|
|
|
"--end '23 Mar 2012 10:00:20'") |
|
|
|
self.match("1200\n") |
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " + |
|
|
|
"--start '23 Mar 2012 10:00:05' " + |
|
|
|
"--end '23 Mar 2012 10:00:25'") |
|
|
|
self.match("1200\n") |
|
|
|
|
|
|
|
self.ok("remove -c /newton/prep " + |
|
|
|
"--start '23 Mar 2012 10:03:50' " + |
|
|
|
"--end '23 Mar 2012 10:06:50'") |
|
|
|
self.match("1200\n") |
|
|
|
|
|
|
|
self.ok("extract -c /newton/prep --start 2000-01-01 --end 2020-01-01") |
|
|
|
self.match("3600\n") |
|
|
|
|
|
|
|
# See the missing chunks in list output |
|
|
|
self.ok("list --detail") |
|
|
|
self.match("/newton/prep PrepData\n" + |
|
|
|
" [ Fri, 23 Mar 2012 10:00:00.000000 +0000" |
|
|
|
" -> Fri, 23 Mar 2012 10:00:05.000000 +0000 ]\n" |
|
|
|
" [ Fri, 23 Mar 2012 10:00:25.000000 +0000" |
|
|
|
" -> Fri, 23 Mar 2012 10:00:30.000000 +0000 ]\n" |
|
|
|
" [ Fri, 23 Mar 2012 10:03:30.000000 +0000" |
|
|
|
" -> Fri, 23 Mar 2012 10:03:50.000000 +0000 ]\n") |
|
|
|
|
|
|
|
# We have 1/8 of the data that we had before, so the file size |
|
|
|
# should have dropped below 1/4 of what it used to be |
|
|
|
du_after = nilmdb.utils.diskusage.du_bytes(testdb) |
|
|
|
lt_(du_after, (du_before / 4)) |
|
|
|
|
|
|
|
# Remove anything that came from the 10:02 data file |
|
|
|
self.ok("remove /newton/prep " + |
|
|
|
"--start '23 Mar 2012 10:02:00' --end '2020-01-01'") |
|
|
|
|
|
|
|
# Shut down and restart server, to force nrows to get refreshed |
|
|
|
# global test_server, test_db |
|
|
|
# raise Exception() |
|
|
|
# print test_db.data.getnode("/newton/prep") |
|
|
|
server_stop() |
|
|
|
server_start() |
|
|
|
# print test_db.data.getnode("/newton/prep") |
|
|
|
|
|
|
|
# Re-add the full 10:02 data file. This tests adding new data once |
|
|
|
# we removed data near the end. |
|
|
|
self.ok("insert --rate 120 /newton/prep tests/data/prep-20120323T1002") |
|
|
|
|
|
|
|
# See if we can extract it all |
|
|
|
self.ok("extract /newton/prep --start 2000-01-01 --end 2020-01-01") |
|
|
|
lines_(self.captured, 15600) |
|
|
|
|
|
|
|
# raise Exception() |