import torch
import torch.nn as nn
import torch.nn.init as init
import matplotlib.pyplot as plt
print(torch.__version__)
1.1.0
# ÀÔ·ÂÀ¸·Î 1·Î ä¿öÁø ÅÙ¼¸¦ »ý¼ºÇÕ´Ï´Ù.
img = torch.ones(1,1,3,3)
print(img)
plt.imshow(img.numpy()[0,0,...],vmin=0)
tensor([[[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]]]])
<matplotlib.image.AxesImage at 0x7f44a786ada0>
transpose = nn.ConvTranspose2d(in_channels=1, out_channels=1, kernel_size=3, stride=1, padding=0, output_padding=0, bias=False)
#print(transpose.weight.data)
# °á°ú¸¦ È®ÀÎÇϱ⠽±°Ô ÀüÄ¡ ÄÁº¼·ç¼Ç ¿¬»êÀÇ °¡ÁßÄ¡¸¦ 1·Î ÃʱâÈÇÕ´Ï´Ù.
init.constant_(transpose.weight.data,1)
tensor([[[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]]]])
out = transpose(img)
print(out,out.size())
plt.imshow(out.detach().numpy()[0,0,...],vmin=0)
plt.show()
tensor([[[[1., 2., 3., 2., 1.], [2., 4., 6., 4., 2.], [3., 6., 9., 6., 3.], [2., 4., 6., 4., 2.], [1., 2., 3., 2., 1.]]]], grad_fn=<ThnnConvTranspose2DBackward>) torch.Size([1, 1, 5, 5])
transpose = nn.ConvTranspose2d(in_channels=1, out_channels=1, kernel_size=3, stride=2, padding=0, output_padding=0, bias=False)
init.constant_(transpose.weight.data,1)
out = transpose(img)
print(out,out.size())
plt.imshow(out.detach().numpy()[0,0,...],vmin=0)
tensor([[[[1., 1., 2., 1., 2., 1., 1.], [1., 1., 2., 1., 2., 1., 1.], [2., 2., 4., 2., 4., 2., 2.], [1., 1., 2., 1., 2., 1., 1.], [2., 2., 4., 2., 4., 2., 2.], [1., 1., 2., 1., 2., 1., 1.], [1., 1., 2., 1., 2., 1., 1.]]]], grad_fn=<ThnnConvTranspose2DBackward>) torch.Size([1, 1, 7, 7])
<matplotlib.image.AxesImage at 0x7f44a4f5e860>
transpose = nn.ConvTranspose2d(in_channels=1, out_channels=1, kernel_size=3, stride=2, padding=1, output_padding=0, bias=False)
init.constant_(transpose.weight.data,1)
out = transpose(img)
print(out,out.size())
plt.imshow(out.detach().numpy()[0,0,...],vmin=0)
tensor([[[[1., 2., 1., 2., 1.], [2., 4., 2., 4., 2.], [1., 2., 1., 2., 1.], [2., 4., 2., 4., 2.], [1., 2., 1., 2., 1.]]]], grad_fn=<ThnnConvTranspose2DBackward>) torch.Size([1, 1, 5, 5])
<matplotlib.image.AxesImage at 0x7f44a4ebfc18>
transpose = nn.ConvTranspose2d(in_channels=1, out_channels=1, kernel_size=3, stride=2, padding=0, output_padding=1, bias=False)
init.constant_(transpose.weight.data,1)
out=transpose(img)
print(out,out.size())
plt.imshow(out.detach().numpy()[0,0,...],vmin=0)
tensor([[[[1., 1., 2., 1., 2., 1., 1., 0.], [1., 1., 2., 1., 2., 1., 1., 0.], [2., 2., 4., 2., 4., 2., 2., 0.], [1., 1., 2., 1., 2., 1., 1., 0.], [2., 2., 4., 2., 4., 2., 2., 0.], [1., 1., 2., 1., 2., 1., 1., 0.], [1., 1., 2., 1., 2., 1., 1., 0.], [0., 0., 0., 0., 0., 0., 0., 0.]]]], grad_fn=<ThnnConvTranspose2DBackward>) torch.Size([1, 1, 8, 8])
<matplotlib.image.AxesImage at 0x7f44a4e93cc0>
transpose = nn.ConvTranspose2d(in_channels=1, out_channels=1, kernel_size=3, stride=2, padding=1, output_padding=1, bias=False)
init.constant_(transpose.weight.data,1)
out = transpose(img)
print(out,out.size())
plt.imshow(out.detach().numpy()[0,0,...],vmin=0)
tensor([[[[1., 2., 1., 2., 1., 1.], [2., 4., 2., 4., 2., 2.], [1., 2., 1., 2., 1., 1.], [2., 4., 2., 4., 2., 2.], [1., 2., 1., 2., 1., 1.], [1., 2., 1., 2., 1., 1.]]]], grad_fn=<ThnnConvTranspose2DBackward>) torch.Size([1, 1, 6, 6])
<matplotlib.image.AxesImage at 0x7f44a4e6c6d8>
transpose = nn.ConvTranspose2d(in_channels=1, out_channels=1, kernel_size=4, stride=2, padding=1, output_padding=0, bias=False)
init.constant_(transpose.weight.data,1)
out = transpose(img)
print(out,out.size())
plt.imshow(out.detach().numpy()[0,0,...],vmin=0)
tensor([[[[1., 2., 2., 2., 2., 1.], [2., 4., 4., 4., 4., 2.], [2., 4., 4., 4., 4., 2.], [2., 4., 4., 4., 4., 2.], [2., 4., 4., 4., 4., 2.], [1., 2., 2., 2., 2., 1.]]]], grad_fn=<ThnnConvTranspose2DBackward>) torch.Size([1, 1, 6, 6])
<matplotlib.image.AxesImage at 0x7f44a4f994e0>