-
Notifications
You must be signed in to change notification settings - Fork 661
/
MLP.py
29 lines (25 loc) · 1.16 KB
/
MLP.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/037_models.MLP.ipynb.
# %% auto 0
__all__ = ['MLP']
# %% ../../nbs/037_models.MLP.ipynb 3
from ..imports import *
from fastai.layers import *
from .layers import *
# %% ../../nbs/037_models.MLP.ipynb 4
class MLP(Module):
def __init__(self, c_in, c_out, seq_len, layers=[500,500,500], ps=[0.1, 0.2, 0.2], act=nn.ReLU(inplace=True),
use_bn=False, bn_final=False, lin_first=False, fc_dropout=0., y_range=None):
layers, ps = L(layers), L(ps)
if len(ps) <= 1: ps = ps * len(layers)
assert len(layers) == len(ps), '#layers and #ps must match'
self.flatten = Reshape()
nf = [c_in * seq_len] + layers
self.mlp = nn.ModuleList()
for i in range(len(layers)): self.mlp.append(LinBnDrop(nf[i], nf[i+1], bn=use_bn, p=ps[i], act=get_act_fn(act), lin_first=lin_first))
_head = [LinBnDrop(nf[-1], c_out, bn=bn_final, p=fc_dropout)]
if y_range is not None: _head.append(SigmoidRange(*y_range))
self.head = nn.Sequential(*_head)
def forward(self, x):
x = self.flatten(x)
for mlp in self.mlp: x = mlp(x)
return self.head(x)