-
Notifications
You must be signed in to change notification settings - Fork 2
/
elm.py
53 lines (41 loc) · 1.78 KB
/
elm.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import numpy as np
from sklearn.preprocessing import OneHotEncoder
from scipy.special import softmax
from utils import relu, sigmoid, linear
class ELMBase:
def __init__(self, n_hiddens=128, random_state=12, activation=linear):
self.n_hiddens = n_hiddens
self.rs = np.random.RandomState(random_state)
self.activation = activation
class ELMRegressor(ELMBase):
def __init__(self, n_hiddens=128, random_state=12):
ELMBase.__init__(self, n_hiddens, random_state, linear)
def fit(self, X, y):
self.W = self.rs.normal(size=(X.shape[1], self.n_hiddens))
self.b = self.rs.normal(size=(self.n_hiddens))
y = y.reshape(-1, 1)
H = self.activation(X.dot(self.W) + self.b)
self.Beta = np.linalg.pinv(H).dot(y)
return self
def predict(self, X):
H = self.activation(X.dot(self.W) + self.b)
dot_product = H.dot(self.Beta)
return dot_product
class ELMClassifier(ELMBase):
def __init__(self, n_hiddens=128, random_state=12):
ELMBase.__init__(self, n_hiddens, random_state, relu)
self.output_activation = softmax
self.encoder = OneHotEncoder()
def fit(self, X, y):
self.W = self.rs.normal(size=(X.shape[1], self.n_hiddens))
self.b = self.rs.normal(size=(self.n_hiddens))
y = self.encoder.fit_transform(y.reshape(-1, 1)).toarray()
H = self.activation(X.dot(self.W) + self.b)
self.Beta = np.linalg.pinv(H).dot(y)
return self
def predict(self, X):
return np.argmax(self.predict_proba(X), axis=1)
def predict_proba(self, X):
H = self.activation(X.dot(self.W) + self.b)
dot_product = H.dot(self.Beta)
return self.output_activation(dot_product)