-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathaffine_transformer_test.py
More file actions
38 lines (28 loc) · 945 Bytes
/
affine_transformer_test.py
File metadata and controls
38 lines (28 loc) · 945 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import torch
import torch.nn.functional as F
from PIL import Image
import numpy as np
import torchvision
class SpatialTransformerModule(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, img, theta):
# img.shape --> (N, C, H, W)
# theta.shape --> (N, 2, 3)
grid = F.affine_grid(theta, img.size())
return F.grid_sample(img, grid)
stm = SpatialTransformerModule()
'''
Affine transformation matrix
cos(theta) sin(theta) x
-sin(theta) cos(theta) y
where theta is clockwise angle
x and y are relative translation
'''
aff = np.array([[[0.866, 0.5, 0], [-1, 0.866, 0]]], np.float32)
theta = torch.from_numpy(aff)
img = Image.open("/home/lastjedi/Pictures/view.png")
img = torch.from_numpy(np.array(img, np.float32)).transpose(0, 2).transpose(1, 2)
output = stm(torch.unsqueeze(img, 0), theta)
img = torchvision.transforms.functional.to_pil_image(output[0])
img.show()