1
+ #--------------------------- Write fixtures ---------------------------
2
+ # To regression test our wrappers we need examples. This script
3
+ # generates files. We save these files once, and regression_test.py
4
+ # re-generates these files to tests them for identicality with the
5
+ # presaved examples (fixtures). If they are found not to be identical
6
+ # it throws up an error.
7
+ #
8
+ # The point of this is to check that throughout the changes we make to
9
+ # our package the functionality of this script stays the same
10
+ #------------------------------------------------------------------------
11
+
12
+ import os
13
+ import sys
14
+ import networkx as nx
15
+
16
+ def recreate_correlation_matrix_fixture (folder ):
17
+ ##### generate a correlation matrix in the given folder using #####
18
+ ##### the Whitaker_Vertes dataset #####
19
+ import BrainNetworksInPython .datasets .NSPN_WhitakerVertes_PNAS2016 .data as data
20
+ centroids , regionalmeasures , names , covars , names_308_style = data ._get_data ()
21
+ from BrainNetworksInPython .wrappers .corrmat_from_regionalmeasures import corrmat_from_regionalmeasures
22
+ corrmat_path = os .getcwd ()+ folder + '/corrmat_file.txt'
23
+ corrmat_from_regionalmeasures (
24
+ regionalmeasures ,
25
+ names ,
26
+ corrmat_path ,
27
+ names_308_style = names_308_style )
28
+
29
+ def recreate_network_analysis_fixture (folder , corrmat_path ):
30
+ ##### generate network analysis in the given folder using the #####
31
+ ##### data in example_data and the correlation matrix given #####
32
+ ##### by corrmat_path #####
33
+ import BrainNetworksInPython .datasets .NSPN_WhitakerVertes_PNAS2016 .data as data
34
+ centroids , regionalmeasures , names , covars , names_308_style = data ._get_data ()
35
+ # It is necessary to specify a random seed because
36
+ # network_analysis_from_corrmat generates random graphs to
37
+ # calculate global measures
38
+ import random
39
+ random .seed (2984 )
40
+ from BrainNetworksInPython .wrappers .network_analysis_from_corrmat import network_analysis_from_corrmat
41
+ network_analysis_from_corrmat (corrmat_path ,
42
+ names ,
43
+ centroids ,
44
+ os .getcwd ()+ folder + '/network-analysis' ,
45
+ cost = 10 ,
46
+ n_rand = 10 , # this is not a reasonable
47
+ # value for n, we generate only 10 random
48
+ # graphs to save time
49
+ names_308_style = names_308_style )
50
+
51
+ def write_fixtures (folder = '/temporary_test_fixtures' ):
52
+ ## Run functions corrmat_from_regionalmeasures and ##
53
+ ## network_analysis_from_corrmat to save corrmat in given folder ##
54
+ ##---------------------------------------------------------------##
55
+ # if the folder does not exist, create it
56
+ if not os .path .isdir (os .getcwd ()+ folder ):
57
+ os .makedirs (os .getcwd ()+ folder )
58
+ # generate and save the correlation matrix
59
+ print ("generating new correlation matrix" )
60
+ recreate_correlation_matrix_fixture (folder )
61
+ # generate and save the network analysis
62
+ print ("generating new network analysis" )
63
+ corrmat_path = 'temporary_test_fixtures/corrmat_file.txt'
64
+ recreate_network_analysis_fixture (folder , corrmat_path )
65
+
66
+ def delete_fixtures (folder ):
67
+ import shutil
68
+ print ('\n deleting temporary files' )
69
+ shutil .rmtree (os .getcwd ()+ folder )
70
+
71
+ def hash_folder (folder = 'temporary_test_fixtures' ):
72
+ hashes = {}
73
+ for path , directories , files in os .walk (folder ):
74
+ for file in sorted (files ):
75
+ hashes [os .path .join (path , file )] = hash_file (os .path .join (path , file ))
76
+ for dir in sorted (directories ):
77
+ hashes .update (hash_folder (os .path .join (path , dir )))
78
+ break
79
+ return hashes
80
+
81
+
82
+ def hash_file (filename ):
83
+ import hashlib
84
+ m = hashlib .sha256 ()
85
+ with open (filename , 'rb' ) as f :
86
+ while True :
87
+ b = f .read (2 ** 10 )
88
+ if not b : break
89
+ m .update (b )
90
+ return m .hexdigest ()
91
+
92
+ def generate_fixture_hashes (folder = 'temporary_test_fixtures' ):
93
+ # generate the fixtures
94
+ write_fixtures ("/" + folder )
95
+ # calculate the hash
96
+ hash_dict = hash_folder (folder )
97
+ # delete the new files
98
+ delete_fixtures ("/" + folder )
99
+ # return hash
100
+ return hash_dict
101
+
102
+ def current_fixture_name ():
103
+ # returns the fixture name appropriate the current versions
104
+ # of python and networkx
105
+ return "tests/.fixture_hash" + str (sys .version_info [:2 ])+ 'networkx_version' + str (nx .__version__ )
106
+
107
+ def pickle_hash (hash_dict ):
108
+ import pickle
109
+ # when we save we record the python and networkx versions
110
+ with open (current_fixture_name (), 'wb' ) as f :
111
+ pickle .dump (hash_dict , f )
112
+
113
+ def unpickle_hash ():
114
+ import pickle
115
+ # import fixture relevant to the current python, networkx versions
116
+ print ('loading fixtures for python version {}, networkx version {}' .format (sys .version_info [:2 ], nx .__version__ ))
117
+ with open (current_fixture_name (), "rb" ) as f :
118
+ pickle_file = pickle .load ( f )
119
+ return pickle_file
120
+
121
+ if __name__ == '__main__' :
122
+ if input ("Are you sure you want to update Brain Networks In Python's test fixtures? (y/n)" ) == 'y' :
123
+ hash_dict = generate_fixture_hashes ()
124
+ pickle_hash (hash_dict )
0 commit comments