Skip to content

Commit 41f92e3

Browse files
committed
Add regression testing
1 parent ac85071 commit 41f92e3

File tree

4 files changed

+186
-1
lines changed

4 files changed

+186
-1
lines changed

04_Testing.ipynb

+11
Original file line numberDiff line numberDiff line change
@@ -378,6 +378,17 @@
378378
"```"
379379
]
380380
},
381+
{
382+
"cell_type": "markdown",
383+
"metadata": {},
384+
"source": [
385+
"## TDD and tests first\n",
386+
"\n",
387+
"Have you ever heard about test-driven-development? It is a commonly used practice in which you write the tests for your scripts before or at the time of writing the actual code. \n",
388+
"\n",
389+
"Some of the advantages include early bug detection, better test coverage, and generally a higher code quality. It also helps you ensure you know what your code is doing at all times and makes it easier to add new features without major risk of breakdown. "
390+
]
391+
},
381392
{
382393
"cell_type": "markdown",
383394
"metadata": {

05_SharingAnalysis.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@
8888
" - source activate testenv\n",
8989
" \n",
9090
"script:\n",
91-
" - pytest\n",
91+
" - python3 -m pytest tests/\n",
9292
" - pytest --nbval notebooks/00_explore-data.ipynb\n",
9393
"```"
9494
]

solutions/tests/regression_test.py

+50
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
import unittest
2+
import os
3+
import sys
4+
5+
class FixturesTest(unittest.TestCase):
6+
7+
#------------------- setup and teardown ---------------------------
8+
@classmethod
9+
def setUpClass(cls):
10+
print('\nin set up - this takes about 80 secs')
11+
12+
from tests.write_fixtures import generate_fixture_hashes, unpickle_hash
13+
cls.hash_dict_new = generate_fixture_hashes()
14+
cls.hash_dict_original = unpickle_hash()
15+
# define dictionary keys for individual files for checking
16+
folder = 'temporary_test_fixtures'
17+
cls.corrmat = folder + '/corrmat_file.txt'
18+
cls.gm = folder + '/network-analysis/GlobalMeasures_corrmat_file_COST010.csv'
19+
cls.lm = folder + '/network-analysis/NodalMeasures_corrmat_file_COST010.csv'
20+
cls.rich = folder + '/network-analysis/RICH_CLUB_corrmat_file_COST010.csv'
21+
22+
#--------------------------- Tests --------------------------------
23+
# Each of these tests checks that ourly newly generated version of
24+
# file_x matches the fixture version
25+
26+
def test_corrmat_matches_fixture(self):
27+
# test new correlation matrix against fixture
28+
print('\ntesting new correlation matrix against fixture')
29+
self.assertEqual(self.hash_dict_new[self.corrmat],
30+
self.hash_dict_original[self.corrmat])
31+
32+
def test_lm_against_fixture(self):
33+
# test new local measures against fixture
34+
print('\ntesting new nodal measures against fixture')
35+
self.assertEqual(self.hash_dict_new[self.lm],
36+
self.hash_dict_original[self.lm])
37+
38+
def test_gm_against_fixture(self):
39+
# test new global measures against fixture
40+
print('\ntesting new global measures against fixture')
41+
self.assertEqual(self.hash_dict_new[self.gm],
42+
self.hash_dict_original[self.gm])
43+
44+
def test_rich_against_fixture(self):
45+
# test rich club against fixture
46+
print('\ntesting rich club against fixture')
47+
self.assertEqual(self.hash_dict_new[self.rich],
48+
self.hash_dict_original[self.rich])
49+
50+
if __name__ == '__main__':

solutions/tests/write_fixtures.py

+124
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
#--------------------------- Write fixtures ---------------------------
2+
# To regression test our wrappers we need examples. This script
3+
# generates files. We save these files once, and regression_test.py
4+
# re-generates these files to tests them for identicality with the
5+
# presaved examples (fixtures). If they are found not to be identical
6+
# it throws up an error.
7+
#
8+
# The point of this is to check that throughout the changes we make to
9+
# our package the functionality of this script stays the same
10+
#------------------------------------------------------------------------
11+
12+
import os
13+
import sys
14+
import networkx as nx
15+
16+
def recreate_correlation_matrix_fixture(folder):
17+
##### generate a correlation matrix in the given folder using #####
18+
##### the Whitaker_Vertes dataset #####
19+
import BrainNetworksInPython.datasets.NSPN_WhitakerVertes_PNAS2016.data as data
20+
centroids, regionalmeasures, names, covars, names_308_style = data._get_data()
21+
from BrainNetworksInPython.wrappers.corrmat_from_regionalmeasures import corrmat_from_regionalmeasures
22+
corrmat_path = os.getcwd()+folder+'/corrmat_file.txt'
23+
corrmat_from_regionalmeasures(
24+
regionalmeasures,
25+
names,
26+
corrmat_path,
27+
names_308_style=names_308_style)
28+
29+
def recreate_network_analysis_fixture(folder, corrmat_path):
30+
##### generate network analysis in the given folder using the #####
31+
##### data in example_data and the correlation matrix given #####
32+
##### by corrmat_path #####
33+
import BrainNetworksInPython.datasets.NSPN_WhitakerVertes_PNAS2016.data as data
34+
centroids, regionalmeasures, names, covars, names_308_style = data._get_data()
35+
# It is necessary to specify a random seed because
36+
# network_analysis_from_corrmat generates random graphs to
37+
# calculate global measures
38+
import random
39+
random.seed(2984)
40+
from BrainNetworksInPython.wrappers.network_analysis_from_corrmat import network_analysis_from_corrmat
41+
network_analysis_from_corrmat(corrmat_path,
42+
names,
43+
centroids,
44+
os.getcwd()+folder+'/network-analysis',
45+
cost=10,
46+
n_rand=10, # this is not a reasonable
47+
# value for n, we generate only 10 random
48+
# graphs to save time
49+
names_308_style=names_308_style)
50+
51+
def write_fixtures(folder='/temporary_test_fixtures'):
52+
## Run functions corrmat_from_regionalmeasures and ##
53+
## network_analysis_from_corrmat to save corrmat in given folder ##
54+
##---------------------------------------------------------------##
55+
# if the folder does not exist, create it
56+
if not os.path.isdir(os.getcwd()+folder):
57+
os.makedirs(os.getcwd()+folder)
58+
# generate and save the correlation matrix
59+
print("generating new correlation matrix")
60+
recreate_correlation_matrix_fixture(folder)
61+
# generate and save the network analysis
62+
print("generating new network analysis")
63+
corrmat_path = 'temporary_test_fixtures/corrmat_file.txt'
64+
recreate_network_analysis_fixture(folder, corrmat_path)
65+
66+
def delete_fixtures(folder):
67+
import shutil
68+
print('\ndeleting temporary files')
69+
shutil.rmtree(os.getcwd()+folder)
70+
71+
def hash_folder(folder='temporary_test_fixtures'):
72+
hashes = {}
73+
for path, directories, files in os.walk(folder):
74+
for file in sorted(files):
75+
hashes[os.path.join(path, file)] = hash_file(os.path.join(path, file))
76+
for dir in sorted(directories):
77+
hashes.update(hash_folder(os.path.join(path, dir)))
78+
break
79+
return hashes
80+
81+
82+
def hash_file(filename):
83+
import hashlib
84+
m = hashlib.sha256()
85+
with open(filename, 'rb') as f:
86+
while True:
87+
b = f.read(2**10)
88+
if not b: break
89+
m.update(b)
90+
return m.hexdigest()
91+
92+
def generate_fixture_hashes(folder='temporary_test_fixtures'):
93+
# generate the fixtures
94+
write_fixtures("/"+folder)
95+
# calculate the hash
96+
hash_dict = hash_folder(folder)
97+
# delete the new files
98+
delete_fixtures("/"+folder)
99+
# return hash
100+
return hash_dict
101+
102+
def current_fixture_name():
103+
# returns the fixture name appropriate the current versions
104+
# of python and networkx
105+
return "tests/.fixture_hash"+str(sys.version_info[:2])+'networkx_version'+str(nx.__version__)
106+
107+
def pickle_hash(hash_dict):
108+
import pickle
109+
# when we save we record the python and networkx versions
110+
with open(current_fixture_name(), 'wb') as f:
111+
pickle.dump(hash_dict, f)
112+
113+
def unpickle_hash():
114+
import pickle
115+
# import fixture relevant to the current python, networkx versions
116+
print('loading fixtures for python version {}, networkx version {}'.format(sys.version_info[:2], nx.__version__))
117+
with open(current_fixture_name(), "rb" ) as f:
118+
pickle_file = pickle.load( f )
119+
return pickle_file
120+
121+
if __name__ == '__main__':
122+
if input("Are you sure you want to update Brain Networks In Python's test fixtures? (y/n)") == 'y':
123+
hash_dict = generate_fixture_hashes()
124+
pickle_hash(hash_dict)

0 commit comments

Comments
 (0)