-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathclasses.py
More file actions
58 lines (46 loc) · 1.29 KB
/
classes.py
File metadata and controls
58 lines (46 loc) · 1.29 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import Dataset
############################################
# Dataset
############################################
# Dataset object
class RegularDataset(Dataset):
def __init__(self, X, y):
self.X = torch.Tensor(np.array(X)) # store X as a pytorch Tensor
self.y = torch.Tensor(np.array(y)) # store y as a pytorch Tensor
self.len=len(self.X) # number of samples in the data
def __getitem__(self, index):
return self.X[index], self.y[index] # get the appropriate item
def __len__(self):
return self.len
############################################
# Model Classes
############################################
# simple MLP
class Linear(nn.Module):
def __init__(self, size):
self.size = size
super(Linear, self).__init__()
self.model = nn.Sequential(
nn.Linear(size, 1),
)
def forward(self, x):
x = self.model(x)
return x
class OneHidden(nn.Module):
def __init__(self, size, hidden_dim, dropout):
self.size = size
self.hidden_dim = hidden_dim
self.dropout = dropout
super(OneHidden, self).__init__()
self.model = nn.Sequential(
nn.Linear(size, hidden_dim),
nn.ReLU(),
nn.Dropout(dropout),
nn.Linear(hidden_dim, 1),
)
def forward(self, x):
x = self.model(x)
return x