-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathinference.py
More file actions
32 lines (26 loc) · 870 Bytes
/
inference.py
File metadata and controls
32 lines (26 loc) · 870 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
# -*- coding: utf-8 -*-
"""inference.ipynb
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1ee8oay9IiyIVqCwVVmMQ6NQyiPMWLCBV
"""
import torch
import torchvision
import matplotlib.pyplot as plt
from model import CNN
def inference():
# Load model
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = CNN().to(device)
model.eval()
# Load test data and making the predictions
test_data = torchvision.datasets.MNIST('mnist_data', train=False, download=True, transform=torchvision.transforms.ToTensor())
data, target = test_data[21]
data = data.unsqueeze(0).to(device)
output = model(data)
prediction = output.argmax(dim=1, keepdim=True).item()
print(f'Prediction: {prediction}')
# image display
image = data.squeeze(0).squeeze(0).cpu().numpy()
plt.imshow(image, cmap='gray')
plt.show()