1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138
|
import tempfile
import unittest
from time import perf_counter as clock
from pathlib import Path
import hotshot
import hotshot.stats
import tables as tb
verbose = 0
class WideTreeTestCase(unittest.TestCase):
"""Checks for maximum number of childs for a Group."""
def test00_leafs(self):
"""Checking creation of large number of leafs (1024) per group.
Variable 'maxchilds' controls this check. PyTables support up to
4096 childs per group, but this would take too much memory (up
to 64 MB) for testing purposes (may be we can add a test for big
platforms). A 1024 childs run takes up to 30 MB. A 512 childs
test takes around 25 MB.
"""
maxchilds = 1000
if verbose:
print("\n", "-=" * 30)
print("Running %s.test00_wideTree..." % self.__class__.__name__)
print("Maximum number of childs tested :", maxchilds)
# Open a new empty HDF5 file
# file = tempfile.mktemp(".h5")
file = "test_widetree.h5"
fileh = tb.open_file(file, mode="w")
if verbose:
print("Children writing progress: ", end=" ")
for child in range(maxchilds):
if verbose:
print("%3d," % (child), end=" ")
a = [1, 1]
fileh.create_group(
fileh.root, "group" + str(child), "child: %d" % child
)
fileh.create_array(
"/group" + str(child),
"array" + str(child),
a,
"child: %d" % child,
)
if verbose:
print()
# Close the file
fileh.close()
t1 = clock()
# Open the previous HDF5 file in read-only mode
fileh = tb.open_file(file, mode="r")
print(
"\nTime spent opening a file with %d groups + %d arrays: "
"%s s" % (maxchilds, maxchilds, clock() - t1)
)
if verbose:
print("\nChildren reading progress: ", end=" ")
# Close the file
fileh.close()
# Then, delete the file
# os.remove(file)
def test01_wide_tree(self):
"""Checking creation of large number of groups (1024) per group.
Variable 'maxchilds' controls this check. PyTables support up to
4096 childs per group, but this would take too much memory (up
to 64 MB) for testing purposes (may be we can add a test for big
platforms). A 1024 childs run takes up to 30 MB. A 512 childs
test takes around 25 MB.
"""
maxchilds = 1000
if verbose:
print("\n", "-=" * 30)
print("Running %s.test00_wideTree..." % self.__class__.__name__)
print("Maximum number of childs tested :", maxchilds)
# Open a new empty HDF5 file
file = tempfile.mktemp(".h5")
# file = "test_widetree.h5"
fileh = tb.open_file(file, mode="w")
if verbose:
print("Children writing progress: ", end=" ")
for child in range(maxchilds):
if verbose:
print("%3d," % (child), end=" ")
fileh.create_group(
fileh.root, "group" + str(child), "child: %d" % child
)
if verbose:
print()
# Close the file
fileh.close()
t1 = clock()
# Open the previous HDF5 file in read-only mode
fileh = tb.open_file(file, mode="r")
print(
"\nTime spent opening a file with %d groups: %s s"
% (maxchilds, clock() - t1)
)
# Close the file
fileh.close()
# Then, delete the file
Path(file).unlink()
# ----------------------------------------------------------------------
def suite():
suite_ = unittest.TestSuite()
from tables.tests.common import make_suite
suite_.addTest(make_suite(WideTreeTestCase))
return suite_
if __name__ == "__main__":
prof = hotshot.Profile("widetree.prof")
benchtime, stones = prof.runcall(unittest.main(defaultTest="suite"))
prof.close()
stats = hotshot.stats.load("widetree.prof")
stats.strip_dirs()
stats.sort_stats("time", "calls")
stats.print_stats(20)
|