You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_bulkdata.py 3.1 KiB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. # -*- coding: utf-8 -*-
  2. import nilmdb
  3. from nilmdb.utils.printf import *
  4. from nose.tools import *
  5. from nose.tools import assert_raises
  6. import itertools
  7. from testutil.helpers import *
  8. testdb = "tests/bulkdata-testdb"
  9. import nilmdb.server.bulkdata
  10. from nilmdb.server.bulkdata import BulkData
  11. class TestBulkData(object):
  12. def test_bulkdata(self):
  13. for (size, files, db) in [ ( 0, 0, testdb ),
  14. ( 25, 1000, testdb ),
  15. ( 1000, 3, testdb.decode("utf-8") ) ]:
  16. recursive_unlink(db)
  17. os.mkdir(db)
  18. self.do_basic(db, size, files)
  19. def do_basic(self, db, size, files):
  20. """Do the basic test with variable file_size and files_per_dir"""
  21. if not size or not files:
  22. data = BulkData(db)
  23. else:
  24. data = BulkData(db, file_size = size, files_per_dir = files)
  25. # create empty
  26. with assert_raises(ValueError):
  27. data.create("/foo", "uint16_8")
  28. with assert_raises(ValueError):
  29. data.create("foo/bar", "uint16_8")
  30. with assert_raises(ValueError):
  31. data.create("/foo/bar", "uint8_8")
  32. data.create("/foo/bar", "uint16_8")
  33. data.create(u"/foo/baz/quux", "float64_16")
  34. with assert_raises(ValueError):
  35. data.create("/foo/bar/baz", "uint16_8")
  36. with assert_raises(ValueError):
  37. data.create("/foo/baz", "float64_16")
  38. # get node -- see if caching works
  39. nodes = []
  40. for i in range(5000):
  41. nodes.append(data.getnode("/foo/bar"))
  42. nodes.append(data.getnode("/foo/baz/quux"))
  43. del nodes
  44. # Test node
  45. node = data.getnode("/foo/bar")
  46. with assert_raises(IndexError):
  47. x = node[0]
  48. raw = []
  49. for i in range(1000):
  50. raw.append([10000+i, 1, 2, 3, 4, 5, 6, 7, 8 ])
  51. node.append(raw[0:1])
  52. node.append(raw[1:100])
  53. node.append(raw[100:])
  54. misc_slices = [ 0, 100, slice(None), slice(0), slice(10),
  55. slice(5,10), slice(3,None), slice(3,-3),
  56. slice(20,10), slice(200,100,-1), slice(None,0,-1),
  57. slice(100,500,5) ]
  58. # Extract slices
  59. for s in misc_slices:
  60. eq_(node[s], raw[s])
  61. # Get some coverage of remove; remove is more fully tested
  62. # in cmdline
  63. with assert_raises(IndexError):
  64. node.remove(9999,9998)
  65. # close, reopen
  66. # reopen
  67. data.close()
  68. if not size or not files:
  69. data = BulkData(db)
  70. else:
  71. data = BulkData(db, file_size = size, files_per_dir = files)
  72. node = data.getnode("/foo/bar")
  73. # Extract slices
  74. for s in misc_slices:
  75. eq_(node[s], raw[s])
  76. # destroy
  77. with assert_raises(ValueError):
  78. data.destroy("/foo")
  79. with assert_raises(ValueError):
  80. data.destroy("/foo/baz")
  81. with assert_raises(ValueError):
  82. data.destroy("/foo/qwerty")
  83. data.destroy("/foo/baz/quux")
  84. data.destroy("/foo/bar")
  85. # close
  86. data.close()