self.fc1 = nn.Linear(input_size, hidden_size)
        self.relu = nn.ReLU()
        self.fc2 = nn.Linear(hidden_size, num_classes)

    def forward(self, x):
        out = self.fc1(x)
        out =out.detach().numpy()
        out =rand_func(out)
        out =out.from_numpy()
        out = self.relu(out)
        out = self.fc2(out)


RuntimeError: element 0 of tensors does not require grad and does not have a grad_fn

0

There are 0 best solutions below