X-Git-Url: https://www.fleuret.org/cgi-bin/gitweb/gitweb.cgi?p=agtree2dot.git;a=blobdiff_plain;f=mlp.py;h=21d166bba9447a87315776cd741ad6d6f591e841;hp=7ddb894dd7248a54861dad3e9ef9af585a2c6372;hb=HEAD;hpb=546094cd63776f6d2a5d6ce1a0fa935a7b9dc956 diff --git a/mlp.py b/mlp.py index 7ddb894..4bf5841 100755 --- a/mlp.py +++ b/mlp.py @@ -19,31 +19,32 @@ import subprocess +import torch from torch import nn -from torch.nn import functional as fn -from torch import Tensor -from torch.autograd import Variable from torch.nn import Module import agtree2dot class MLP(Module): def __init__(self, input_dim, hidden_dim, output_dim): - super(MLP, self).__init__() + super().__init__() self.fc1 = nn.Linear(input_dim, hidden_dim) self.fc2 = nn.Linear(hidden_dim, output_dim) def forward(self, x): x = self.fc1(x) - x = fn.tanh(x) + x = torch.tanh(x) x = self.fc2(x) return x mlp = MLP(10, 20, 1) -input = Variable(Tensor(100, 10).normal_()) -target = Variable(Tensor(100).normal_()) -output = mlp(input) criterion = nn.MSELoss() + +input = torch.randn(100, 10) +target = torch.randn(100, 1) + +output = mlp(input) + loss = criterion(output, target) agtree2dot.save_dot(loss,