|
| 1 | +/* PX-Bayesian Low-rank Graph Regression Model |
| 2 | + * main.cpp |
| 3 | + * Eunjuee Lee |
| 4 | + * |
| 5 | + */ |
| 6 | + |
| 7 | +#include <iostream> |
| 8 | +#include <armadillo> |
| 9 | +#include "px_blgrm.h" |
| 10 | +#include "px_blgrm_helper.h" |
| 11 | +#include "mcmc_para.h" |
| 12 | +#include "random_generator.h" |
| 13 | + |
| 14 | + |
| 15 | + |
| 16 | +using namespace std; |
| 17 | +using namespace arma; |
| 18 | + |
| 19 | + |
| 20 | +int main(int argc, char *argv[]){ |
| 21 | + |
| 22 | + try{ |
| 23 | + |
| 24 | + mat L_temp; L_temp.load(argv[1], csv_ascii); |
| 25 | + mat X; X.load(argv[2], csv_ascii); |
| 26 | + |
| 27 | + string R_in = argv[3]; |
| 28 | + string V_in = argv[4]; |
| 29 | + string niter_in = argv[5]; |
| 30 | + string burnin_in = argv[6]; |
| 31 | + string slice_niter_in = argv[7]; |
| 32 | + string slice_burnin_in = argv[8]; |
| 33 | + |
| 34 | + int R = stoi(R_in); |
| 35 | + int V = stoi(V_in); |
| 36 | + int n = X.n_rows; int p = X.n_cols; |
| 37 | + |
| 38 | + // load in L |
| 39 | + cube L; L.zeros(V, V, n); |
| 40 | + for(int i = 0; i < n; ++i){ |
| 41 | + L.slice(i) = reshape(L_temp.row(i), V, V); |
| 42 | + } |
| 43 | + |
| 44 | + // set parameters |
| 45 | + mcmc_para parameters; |
| 46 | + parameters.set_niter(stoi(niter_in)); |
| 47 | + parameters.set_burnin(stoi(burnin_in)); |
| 48 | + parameters.set_B(V, R); |
| 49 | + parameters.set_Lambda(n, R); |
| 50 | + parameters.set_Gamma(p, R); |
| 51 | + parameters.set_sigma(1); |
| 52 | + parameters.set_sig_gam(1); |
| 53 | + parameters.set_b1(.01); |
| 54 | + parameters.set_b2(.01); |
| 55 | + parameters.set_c1(.01); |
| 56 | + parameters.set_c2(.01); |
| 57 | + parameters.set_va(.5); |
| 58 | + parameters.set_vb(.5); |
| 59 | + |
| 60 | + parameters.set_slice_niter(stoi(slice_niter_in)); |
| 61 | + parameters.set_slice_burnin(stoi(slice_burnin_in)); |
| 62 | + parameters.set_slice_width(10); |
| 63 | + cout << size(L) << "\n"; |
| 64 | + cout << size(X) << "\n"; |
| 65 | + |
| 66 | + px_blgrm(L, X, R, parameters); |
| 67 | + }catch(const std::exception& e){ |
| 68 | + cout << "Error in inputs. Please correct inputs, or uncomment line 164 in main.cpp to run simulation\n"; |
| 69 | + |
| 70 | + srand(1); // fix random seed |
| 71 | + |
| 72 | + // initialize all parameters, according to coni3_test.m |
| 73 | + int R = 10; |
| 74 | + int V = 50; |
| 75 | + int n = 100; |
| 76 | + int p = 2; |
| 77 | + |
| 78 | + double sigma_0 = 1.0; // Fix variance of Lambda prior for identifiability |
| 79 | + |
| 80 | + mcmc_para parameters; |
| 81 | + parameters.set_niter(5500); |
| 82 | + parameters.set_B(V, R); |
| 83 | + parameters.set_burnin(500); |
| 84 | + parameters.set_Lambda(n, R); |
| 85 | + |
| 86 | + |
| 87 | + parameters.set_Gamma(p, R); // p as in the simulation |
| 88 | + // init to defaults |
| 89 | + parameters.set_sigma(1); |
| 90 | + parameters.set_sig_gam(1); |
| 91 | + parameters.set_b1(.01); |
| 92 | + parameters.set_b2(.01); |
| 93 | + parameters.set_c1(.01); |
| 94 | + parameters.set_c2(.01); |
| 95 | + parameters.set_va(.5); |
| 96 | + parameters.set_vb(.5); |
| 97 | + |
| 98 | + // iterations for metrop/slice |
| 99 | + parameters.set_slice_niter(25); |
| 100 | + parameters.set_slice_burnin(10); |
| 101 | + parameters.set_slice_width(10); |
| 102 | + |
| 103 | + // now simulate data, L and X |
| 104 | + double sigma_error = 1.0; |
| 105 | + |
| 106 | + mat B(V, R); |
| 107 | + |
| 108 | + |
| 109 | + for(int i = 0; i < R; ++i){ |
| 110 | + B.col(i) = n_norm(V); |
| 111 | + } |
| 112 | + |
| 113 | + |
| 114 | + // simulate data |
| 115 | + cube L; L.zeros(V, V, n); |
| 116 | + cube tL; tL.zeros(V, V, n); |
| 117 | + cube Lambda; Lambda.zeros(R, R, n); |
| 118 | + |
| 119 | + |
| 120 | + vec x; x = n_norm(n) + 0.5; // init x to 100 std norm, mean .5 |
| 121 | + |
| 122 | + mat A; vec temp(R*(R+1)/2); temp = n_norm(R*(R+1)/2); |
| 123 | + mat Lamb; Lamb.zeros(R, R); |
| 124 | + mat AA; AA.zeros(V, V); vec a; |
| 125 | + mat noise; noise.zeros(50, 50); |
| 126 | + |
| 127 | + for(int i = 0; i < n; ++i){ |
| 128 | + A.zeros(R, R); |
| 129 | + vec_2_uptri(A, n_norm(R * (R + 1) / 2) * (1 / sqrt(1)) + 1, R); |
| 130 | + A(0, 1) = A(0, 1) + x(i) * 4; |
| 131 | + A(1, 2) = A(1, 2) + x(i) * 4; |
| 132 | + Lamb = A + A.t() - 2 * diagmat(A.diag()); |
| 133 | + Lamb.diag() = n_norm(R) + 1; // 3 norm(1,1) RV |
| 134 | + |
| 135 | + Lambda.slice(i) = Lamb; |
| 136 | + |
| 137 | + // need V*(V+1)/2 RB |
| 138 | + a = n_norm_musig(V * (V + 1) / 2, 1, sigma_error / sqrt(2)); |
| 139 | + vec_2_uptri(AA, a, V); |
| 140 | + |
| 141 | + noise = AA + AA.t() - 2 * diagmat(AA.diag()); |
| 142 | + |
| 143 | + noise.diag() = n_norm_musig(V, 0, sigma_error); |
| 144 | + |
| 145 | + tL.slice(i) = B * Lambda.slice(i) * B.t(); |
| 146 | + L.slice(i) = B * Lambda.slice(i) * B.t() + noise; |
| 147 | + |
| 148 | + } |
| 149 | + |
| 150 | + // set up design matrix |
| 151 | + mat X; X.ones(n, 2); |
| 152 | + X.col(1) = x; |
| 153 | + |
| 154 | + |
| 155 | + a = n_norm_musig(V * (V + 1) / 2, 1, sigma_error); |
| 156 | + |
| 157 | + // write the actual B, Lambda, for testing |
| 158 | + ofstream outf; |
| 159 | + outf.open("B_true.txt"); |
| 160 | + outf << B; |
| 161 | + outf.close(); |
| 162 | + |
| 163 | + // begin loop and testing helper functions |
| 164 | + //px_blgrm(L, X, R, parameters); |
| 165 | + |
| 166 | + } |
| 167 | + |
| 168 | + return 0; |
| 169 | +} |
0 commit comments