Skip to content

Commit d596e9a

Browse files
committed
Revert "Update ConMod.m file"
This reverts commit d6188f9.
1 parent d6188f9 commit d596e9a

File tree

8 files changed

+25
-106
lines changed

8 files changed

+25
-106
lines changed

ConMod.m

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,21 +3,23 @@
33
% modules in multiple networks
44
%
55
% INPUT:
6-
% multiNetworks : a cell contains multiple networks, each of which is
7-
% presented by edgelist format or a full matrix
8-
% with N nodes
6+
% multiNetworks: a cell contains multiple networks, each is
7+
% presented by a sparse matrix or a full matrix with N nodes
98
% N: the number of all nodes
109
% K: the number of hidden factors
11-
% lambda: a vector which contains the parameters for balancing the relative
10+
% lambda: a vector containing the parameters for balancing the relative
1211
% weight among different views
1312
% xita: the parameter for selecting nodes
1413
% maxIter: the maximum number of iterations for multi-view NMF
1514
%
1615
% OUTPUT:
17-
% modulesfinal: a cell which contains the final conserved modules
16+
% modulesfinal: a cell contains the final conserved modules
1817
%
1918
% Peizhuo Wang ([email protected])
2019

20+
%% parameters
21+
22+
2123
%% Calculting the feature matrices
2224
disp('Calculating the strengh matrix and the uniformity matrix...')
2325
[Strength, Distribution] = featureNets(multiNetworks, N);

README.md

Lines changed: 0 additions & 83 deletions
This file was deleted.

SNMFforView.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
% X: the adjacency matrix of a network
66
% Hc: initialization for consensus factor matrix
77
% H: initialization for factor matrix of each view
8-
% lambda: a vector which contains the parameters for balancing the relative
8+
% lambda: a vector containing the parameters for balancing the relative
99
% weight among different views
1010
% MaxIter: the maximal number of iterations for alternating minimization
1111
% epsilon: the convergence parameter

evaluation.m

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
% FPR: the False Positive Rate
1111
% Accuracy:
1212
% MCC: the Matthews Correlation Coefficient
13+
% I: Confusion Matrix
1314
%
1415
% Peizhuo Wang ([email protected])
1516

featureNets.m

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22
% Compute two feature metrices from multiple networks
33
%
44
% INPUT:
5-
% multiNetworks : a cell contains multiple networks, each of which is
6-
% presented by edgelist format or a full matrix
5+
% multiNetworks : a cell contains multiple networks, each is
6+
% presented by a sparse matrix or a full matrix
77
% with N nodes
88
% N : the number of all nodes
99
%
@@ -19,7 +19,7 @@
1919
Strength = zeros(N);
2020
temp = zeros(N);
2121
A = zeros(N);
22-
if (m <= 3) % Edgelist format
22+
if (m <= 3) % Sparse matrix format
2323
for k = 1:network_count
2424
theMatrix = multiNetworks{k};
2525
[edge_count, col_count] = size(theMatrix);
@@ -73,9 +73,9 @@
7373
Participation = (network_count/(network_count-1)) * (1-(temp./(A.^2)));
7474
Participation(isinf(Participation)) = 0;
7575
Participation(isnan(Participation)) = 0;
76-
Participation = Participation - diag(diag(Participation)); % The diagonal is 0
76+
Participation = Participation - diag(diag(Participation));
7777

7878
Strength = A./network_count;
79-
Strength = Strength - diag(diag(Strength)); % The diagonal is 0
79+
Strength = Strength - diag(diag(Strength));
8080

8181
end

main_run.m

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
%
4545
% % Selecting nodes from the consensus factors
4646
% xita = 1.5;
47-
% modules_final = moduleNodesSelection( Hc, xita );
47+
% modules_final = modulesTruing( Hc, xita );
4848
% runtime = toc;
4949
% disp(['Running time: ', num2str(runtime), ' sec.'])
5050

moduleNodesSelection.m

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
function [ modulesFinal ] = moduleNodesSelection( Hc, xita )
2-
% Assigning the module members by a soft node selection procedure
3-
% and then truing the modules to obtain more accurate results
2+
% A soft node selection procedure from the consensus factors to assign the module members
3+
% and then truing the modules to obtain more accurate results
44
%
55
% INPUT:
66
% Hc: the consensus factor matrix
77
% xita: the parameter for selecting nodes
88
%
99
% OUTPUT:
10-
% modulesFinal: a cell which contains the final result modules
10+
% modulesFinal: a cell contains the final result modules
1111
%
1212
% Peizhuo Wang ([email protected])
1313

@@ -26,7 +26,7 @@
2626

2727
for i = 1:size(HPI, 1)-1
2828
for j = (i+1):size(HPI, 2)
29-
if HPI(i,j)>0.5 % merge these two modules
29+
if HPI(i,j)>0.5
3030
[Y, I] = max([moduleSignal(i), moduleSignal(j)]);
3131
if I == 1
3232
modulesFinal{j} = [];
@@ -43,7 +43,6 @@
4343
end
4444
end
4545

46-
% Only modules with no less than 5 nodes are kept
4746
i = 1;
4847
while i ~= length(modulesFinal)+1
4948
if isempty(modulesFinal{i}) || (length(modulesFinal{i})<5)

multiViewNMF.m

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,14 @@
22
% Multi-View Non-negative symmetric Matrix Factorization
33
%
44
% INPUT:
5-
% X: a cell which contains symmetric matrices
5+
% X: a cell containing symmetric matrices
66
% K: the number of hidden factors
7-
% lambda: a vector which contains the parameters for balancing the relative
7+
% lambda: a vector containing the parameters for balancing the relative
88
% weight among different views
99
% maxiter: the maximum number of iterations
1010
%
1111
% OUTPUT:
12-
% H: a cell which contains factor matrices for all views
12+
% H: a cell containing factor matrices for all views
1313
% Hc: the result consensus factor matrix
1414
% objValue: the value of objective function
1515
%
@@ -86,9 +86,9 @@
8686
obj_consensus = norm(H{i} - Hc, 'fro')^2;
8787
obj = obj + obj_body + lambda(i)*obj_consensus;
8888

89-
% errX = mean(mean(abs(obj_body)))/mean(mean(X{i}));
90-
% errH = mean(mean(abs(obj_consensus)))/mean(mean(H{i}));
91-
% err = errX + errH;
89+
errX = mean(mean(abs(obj_body)))/mean(mean(X{i}));
90+
errH = mean(mean(abs(obj_consensus)))/mean(mean(H{i}));
91+
err = errX + errH;
9292
end
9393

9494

0 commit comments

Comments
 (0)