Skip to content

Commit a4d9389

Browse files
authored
fixed svm for not posdef kernel matrix, updated .travis.yml with Python 3.8 and added svr with r2 and accuracy metrics (#1185)
1 parent ca301ea commit a4d9389

16 files changed

+441
-379
lines changed

.travis.yml

+2-16
Original file line numberDiff line numberDiff line change
@@ -4,27 +4,13 @@ python:
44
- 3.5
55
- 3.6
66
- 3.7
7+
- 3.8
78

89
before_install:
910
- git submodule update --remote
1011

1112
install:
12-
- pip install flake8
13-
- pip install ipython
14-
- pip install ipythonblocks
15-
- pip install ipywidgets
16-
- pip install keras
17-
- pip install matplotlib
18-
- pip install networkx
19-
- pip install numpy
20-
- pip install opencv-python
21-
- pip install Pillow
22-
- pip install pytest-cov
23-
- pip install qpsolvers
24-
- pip install quadprog
25-
- pip install six
26-
- pip install sortedcontainers
27-
- pip install tensorflow
13+
- pip install --upgrade -r requirements.txt
2814

2915
script:
3016
- py.test --cov=./

csp.py

+5-4
Original file line numberDiff line numberDiff line change
@@ -758,8 +758,9 @@ class Sudoku(CSP):
758758
. . 2 | 6 . 9 | 5 . .
759759
8 . . | 2 . 3 | . . 9
760760
. . 5 | . 1 . | 3 . .
761-
>>> AC3(e); e.display(e.infer_assignment())
762-
(True, 6925)
761+
>>> AC3(e) # doctest: +ELLIPSIS
762+
(True, ...)
763+
>>> e.display(e.infer_assignment())
763764
4 8 3 | 9 2 1 | 6 5 7
764765
9 6 7 | 3 4 5 | 8 2 1
765766
2 5 1 | 8 7 6 | 4 9 3
@@ -1265,7 +1266,7 @@ def display(self, assignment=None):
12651266
else:
12661267
var = "p" + str(j) + str(i)
12671268
if assignment is not None:
1268-
if isinstance(assignment[var], set) and len(assignment[var]) is 1:
1269+
if isinstance(assignment[var], set) and len(assignment[var]) == 1:
12691270
puzzle += "[" + str(first(assignment[var])).upper() + "] "
12701271
elif isinstance(assignment[var], str):
12711272
puzzle += "[" + str(assignment[var]).upper() + "] "
@@ -1393,7 +1394,7 @@ def display(self, assignment=None):
13931394
var2 = "0" + var2
13941395
var = "X" + var1 + var2
13951396
if assignment is not None:
1396-
if isinstance(assignment[var], set) and len(assignment[var]) is 1:
1397+
if isinstance(assignment[var], set) and len(assignment[var]) == 1:
13971398
puzzle += "[" + str(first(assignment[var])) + "]\t"
13981399
elif isinstance(assignment[var], int):
13991400
puzzle += "[" + str(assignment[var]) + "]\t"

deep_learning4e.py

+50-14
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from keras.layers import Embedding, SimpleRNN, Dense
99
from keras.preprocessing import sequence
1010

11-
from utils4e import (softmax1D, conv1D, gaussian_kernel, element_wise_product, vector_add, random_weights,
11+
from utils4e import (conv1D, gaussian_kernel, element_wise_product, vector_add, random_weights,
1212
scalar_vector_product, map_vector, mean_squared_error_loss)
1313

1414

@@ -46,6 +46,9 @@ def function(self, x):
4646
def derivative(self, x):
4747
return NotImplementedError
4848

49+
def __call__(self, x):
50+
return self.function(x)
51+
4952

5053
class Sigmoid(Activation):
5154

@@ -56,7 +59,7 @@ def derivative(self, value):
5659
return value * (1 - value)
5760

5861

59-
class Relu(Activation):
62+
class ReLU(Activation):
6063

6164
def function(self, x):
6265
return max(0, x)
@@ -65,13 +68,28 @@ def derivative(self, value):
6568
return 1 if value > 0 else 0
6669

6770

68-
class Elu(Activation):
71+
class ELU(Activation):
72+
73+
def __init__(self, alpha=0.01):
74+
self.alpha = alpha
6975

70-
def function(self, x, alpha=0.01):
71-
return x if x > 0 else alpha * (np.exp(x) - 1)
76+
def function(self, x):
77+
return x if x > 0 else self.alpha * (np.exp(x) - 1)
7278

73-
def derivative(self, value, alpha=0.01):
74-
return 1 if value > 0 else alpha * np.exp(value)
79+
def derivative(self, value):
80+
return 1 if value > 0 else self.alpha * np.exp(value)
81+
82+
83+
class LeakyReLU(Activation):
84+
85+
def __init__(self, alpha=0.01):
86+
self.alpha = alpha
87+
88+
def function(self, x):
89+
return max(x, self.alpha * x)
90+
91+
def derivative(self, value):
92+
return 1 if value > 0 else self.alpha
7593

7694

7795
class Tanh(Activation):
@@ -83,13 +101,31 @@ def derivative(self, value):
83101
return 1 - (value ** 2)
84102

85103

86-
class LeakyRelu(Activation):
104+
class SoftMax(Activation):
105+
106+
def function(self, x):
107+
return np.exp(x) / np.sum(np.exp(x))
108+
109+
def derivative(self, x):
110+
return np.ones_like(x)
111+
112+
113+
class SoftPlus(Activation):
87114

88-
def function(self, x, alpha=0.01):
89-
return x if x > 0 else alpha * x
115+
def function(self, x):
116+
return np.log(1. + np.exp(x))
117+
118+
def derivative(self, x):
119+
return 1. / (1. + np.exp(-x))
90120

91-
def derivative(self, value, alpha=0.01):
92-
return 1 if value > 0 else alpha
121+
122+
class Linear(Activation):
123+
124+
def function(self, x):
125+
return x
126+
127+
def derivative(self, x):
128+
return np.ones_like(x)
93129

94130

95131
class InputLayer(Layer):
@@ -112,9 +148,9 @@ class OutputLayer(Layer):
112148
def __init__(self, size=3):
113149
super().__init__(size)
114150

115-
def forward(self, inputs):
151+
def forward(self, inputs, activation=SoftMax):
116152
assert len(self.nodes) == len(inputs)
117-
res = softmax1D(inputs)
153+
res = activation().function(inputs)
118154
for node, val in zip(self.nodes, res):
119155
node.value = val
120156
return res

0 commit comments

Comments
 (0)