-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathiir_class.py
152 lines (140 loc) · 6.85 KB
/
iir_class.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
# -*- coding: utf_8 -*-
import numpy as np
import chainer
from chainer import cuda, Function, gradient_check, utils, Variable
from chainer import optimizers, serializers
from chainer import Link, Chain, ChainList
import chainer.functions as F
import chainer.links as L
'''
1. FIR 0, IIR 1, LSTM 2
2. EEG-ECoG間でフィルター共通 0, バラバラ 1
3. 予測対象 EEG 0, ECoG 1
4. 予測の仕方 共通 0, NNをわける(フォルター特性が変わる) 1
'''
# 1000, 1011 をやる
# IIR フィルター共通 EEG予測 共通
class TimeSpacePerceptron1000(Chain):
def __init__(self, time_range):
self.tt = time_range
self.eeg_num =16
self.ecog_num = 256
super(TimeSpacePerceptron1000, self).__init__(
transformECoG = L.Linear(self.ecog_num, self.eeg_num)# ECoG to EEG 空間
)
super(TimeSpacePerceptron1000, self).add_link('IIR', L.Linear(10, 1))
#self.W = Variable(np.array([1, 0.01, 0.01, 0.01, 0.01, 0.01, 0.5, 0.5, 0.5, 0.5]).astype('float32'))
def __call__(self, EEG_t, ECoG_t, EEG_filtered_prev, ECoG_filtered_prev, train=False):
# EEG_t は 16 * time * time_delay
#EEG IIR
EEG_filtered = [] # y0, y1, y2, y3, y4 が入る ch の数だけ配列
for ch in range(self.eeg_num):
pre_y = [np.zeros((self.tt, 1)).astype('float32') for i in range(5)]
for t in range(5):
pre_y[t][t:] = EEG_filtered_prev[ch].data[:len(pre_y[t])-t]
pre_y[t] = Variable(pre_y[t])
x = F.concat((EEG_t[:, ch, :], pre_y[0], pre_y[1], pre_y[2], pre_y[3], pre_y[4]))
y = self.__dict__['IIR'](x)
EEG_filtered.append(y)
EEG_filtered_prev[ch] = y
EEG_filtered = F.concat(tuple(EEG_filtered))
#ECoG IIR
ECoG_filtered = [] # y0, y1, y2, y3, y4 が入る ch の数だけ配列
for ch in range(self.ecog_num):
pre_y = [np.zeros((self.tt, 1)).astype('float32') for i in range(5)]
for t in range(5):
pre_y[t][t:] = ECoG_filtered_prev[ch].data[:len(pre_y[t])-t]
pre_y[t] = Variable(pre_y[t])
x = F.concat((ECoG_t[:, ch, :], pre_y[0], pre_y[1], pre_y[2], pre_y[3], pre_y[4]))
y = self.__dict__['IIR'](x)
ECoG_filtered.append(y)
ECoG_filtered_prev[ch] = y
ECoG_filtered = F.concat(tuple(ECoG_filtered))
# ここまで IIR
ecog_space = self.transformECoG(ECoG_filtered)
ecog_space_dropout = F.dropout(ecog_space, train=train, ratio=0.5)
eeg = EEG_filtered
return ecog_space, eeg, EEG_filtered_prev, ECoG_filtered_prev
# IIR フィルターバラバラ EEG予測 共通
class TimeSpacePerceptron1100(Chain):
def __init__(self, time_range):
self.tt = time_range
self.eeg_num =16
self.ecog_num = 256
super(TimeSpacePerceptron1100, self).__init__(
transformECoG = L.Linear(self.ecog_num, self.eeg_num)# ECoG to EEG 空間
)
super(TimeSpacePerceptron1100, self).add_link('IIR_EEG', L.Linear(10, 1))
super(TimeSpacePerceptron1100, self).add_link('IIR_ECoG', L.Linear(10, 1))
def __call__(self, EEG_t, ECoG_t, EEG_filtered_prev, ECoG_filtered_prev, train=False):
# EEG_t は 16 * time * time_delay
#EEG IIR
EEG_filtered = [] # y0, y1, y2, y3, y4 が入る ch の数だけ配列
for ch in range(self.eeg_num):
pre_y = [np.zeros((self.tt, 1)).astype('float32') for i in range(5)]
for t in range(5):
pre_y[t][t:] = EEG_filtered_prev[ch].data[:len(pre_y[t])-t]
pre_y[t] = Variable(pre_y[t])
x = F.concat((EEG_t[:, ch, :], pre_y[0], pre_y[1], pre_y[2], pre_y[3], pre_y[4]))
y = self.__dict__['IIR_EEG'](x)
EEG_filtered.append(y)
EEG_filtered_prev[ch] = y
EEG_filtered = F.concat(tuple(EEG_filtered))
#ECoG IIR
ECoG_filtered = [] # y0, y1, y2, y3, y4 が入る ch の数だけ配列
for ch in range(self.ecog_num):
pre_y = [np.zeros((self.tt, 1)).astype('float32') for i in range(5)]
for t in range(5):
pre_y[t][t:] = ECoG_filtered_prev[ch].data[:len(pre_y[t])-t]
pre_y[t] = Variable(pre_y[t])
x = F.concat((ECoG_t[:, ch, :], pre_y[0], pre_y[1], pre_y[2], pre_y[3], pre_y[4]))
y = self.__dict__['IIR_ECoG'](x)
ECoG_filtered.append(y)
ECoG_filtered_prev[ch] = y
ECoG_filtered = F.concat(tuple(ECoG_filtered))
# ここまで IIR
ecog_space = self.transformECoG(ECoG_filtered)
ecog_space_dropout = F.dropout(ecog_space, train=train, ratio=0.5)
eeg = EEG_filtered
return ecog_space_dropout, eeg, EEG_filtered_prev, ECoG_filtered_prev
# IIR フィルター共通 ECoG予測 バラバラ
class TimeSpacePerceptron1011(Chain):
def __init__(self, time_range):
self.tt = time_range
self.eeg_num =16
self.ecog_num = 1#256
super(TimeSpacePerceptron1011, self).__init__(
transformECoG = L.Linear(self.eeg_num, self.ecog_num)# EEG to ECoG 空間
)
super(TimeSpacePerceptron1011, self).add_link('IIR', L.Linear(10, 1))
def __call__(self, EEG_t, ECoG_t, EEG_filtered_prev, ECoG_filtered_prev, train=False):
# EEG_t は 16 * time * time_delay
#EEG IIR
EEG_filtered = [] # y0, y1, y2, y3, y4 が入る ch の数だけ配列
for ch in range(self.eeg_num):
pre_y = [np.zeros((self.tt, 1)).astype('float32') for i in range(5)]
for t in range(5):
pre_y[t][t:] = EEG_filtered_prev[ch].data[:len(pre_y[t])-t]
pre_y[t] = Variable(pre_y[t])
x = F.concat((EEG_t[:, ch, :], pre_y[0], pre_y[1], pre_y[2], pre_y[3], pre_y[4]))
y = self.__dict__['IIR'](x)
EEG_filtered.append(y)
EEG_filtered_prev[ch] = y
EEG_filtered = F.concat(tuple(EEG_filtered))
#ECoG IIR
ECoG_filtered = [] # y0, y1, y2, y3, y4 が入る ch の数だけ配列
for ch in range(self.ecog_num):
pre_y = [np.zeros((self.tt, 1)).astype('float32') for i in range(5)]
for t in range(5):
pre_y[t][t:] = ECoG_filtered_prev[ch].data[:len(pre_y[t])-t]
pre_y[t] = Variable(pre_y[t])
x = F.concat((ECoG_t, pre_y[0], pre_y[1], pre_y[2], pre_y[3], pre_y[4]))
y = self.__dict__['IIR'](x)
ECoG_filtered.append(y)
ECoG_filtered_prev[ch] = y
ECoG_filtered = F.concat(tuple(ECoG_filtered))
# ここまで IIR
x1 = self.transformECoG(EEG_filtered)
x1_d = F.dropout(x1, train=train, ratio=0.5)
x2 = ECoG_filtered
return x1_d, x2, EEG_filtered_prev, ECoG_filtered_prev