# ÆÄÀÌÅäÄ¡°¡ ¼³Ä¡µÇ¾î ÀÖ´ÂÁö È®ÀÎÇØÁÝ´Ï´Ù.
!pip3 install torch torchvision
Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (1.1.0) Requirement already satisfied: torchvision in /usr/local/lib/python3.6/dist-packages (0.3.0) Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torch) (1.16.4) Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.12.0) Requirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision) (4.3.0) Requirement already satisfied: olefile in /usr/local/lib/python3.6/dist-packages (from pillow>=4.1.1->torchvision) (0.46)
# ³ÑÆÄÀÌ¿Í ÆÄÀÌÅäÄ¡¸¦ ºÒ·¯¿É´Ï´Ù.
import numpy as np
import torch
# Neural NetworkÀÇ ¾àÀÚ·Î Àΰø½Å°æ¸Á ¿¬»êµéÀÌ µé¾î°¡ ÀÖ½À´Ï´Ù. (ex. Linear, Convolution, RNN µîµî)
import torch.nn as nn
# ¸ðµ¨À» ÃÖÀûÈ Çϴµ¥ ÇÊ¿äÇÑ ÃÖÀûÈ ÇÔ¼öµéÀÌ µé¾î°¡ ÀÖ½À´Ï´Ù (ex. SGD, ADAM, LBFGS µîµî)
import torch.optim as optim
# ÅÙ¼¸¦ ÃʱâÈ ÇÏ´Â ÇÔ¼öµéÀÌ µé¾î°¡ ÀÖ½À´Ï´Ù. (ex. uniform, normal, xavier µîµî)
import torch.nn.init as init
# µ¥ÀÌÅͳª ÇнÀ °á°ú¸¦ Ãâ·ÂÇϱâ À§ÇØ »ç¿ëÇÕ´Ï´Ù.
import matplotlib.pyplot as plt
# µ¥ÀÌÅÍÀÇ °³¼ö´Â 1000°³, ÇнÀ Ƚ¼ö´Â 500ȸ·Î ÁöÁ¤ÇØÁÝ´Ï´Ù.
# ÀÌ´Â ÀÓÀÇ·Î ÁöÁ¤ÇÑ ¼öÄ¡ÀÔ´Ï´Ù.
num_data = 1000
num_epoch = 500
# µ¥ÀÌÅÍ¿¡ Ãß°¡ÇÒ ³ëÀÌÁ Á¤±ÔºÐÆ÷¸¦ µû¸£°Ô ¸¸µé¾îÁÝ´Ï´Ù.
# À̶§ Æò±ÕÀº µðÆúÆ®·Î 0, ÆíÂ÷´Â 0.2·Î ÀÓÀÇ·Î ÁöÁ¤Çß½À´Ï´Ù.
noise = init.normal_(torch.FloatTensor(num_data,1),std=0.2)
# x ´Â -10¿¡¼ 10¿¡¼ uniform ÇÏ°Ô »ý¼ºÇÕ´Ï´Ù.
# ÀÌ·¸°Ô µÇ¸é x´Â 1000x1 ÇüŸ¦ °¡Áö°í -10¿¡¼ 10 »çÀÌÀÇ °ªµéÀ» uniform ÇÏ°Ô °®°Ô µË´Ï´Ù.
x = init.uniform_(torch.Tensor(num_data,1),-10,10)
# ¿¬»ê ±×·¡ÇÁ¸¦ Á¤ÀÇÇÕ´Ï´Ù.
y = 2*x+3
# y¿¡ ³ëÀÌÁ ´õÇØ y_noise¸¦ ¸¸µé¾îÁÝ´Ï´Ù.
# ÇнÀ¶§ y_noise¸¦ ¸ñÇ¥°ªÀ¸·Î »ç¿ëÇÕ´Ï´Ù.
# ÀÌ·¸°Ô ÇÏ´Â ÀÌÀ¯´Â ½ÇÁ¦ µ¥ÀÌÅ͸¦ »ç¿ëÇÒ ¶§ ¿©·¯ ÃøÁ¤°úÁ¤¿¡¼ ³ëÀÌÁî°¡ Ãß°¡µÇ´Â °æ¿ì°¡ ¸¹±â ¶§¹®ÀÔ´Ï´Ù.
y_noise = y+noise
# https://matplotlib.org/3.1.0/api/_as_gen/matplotlib.pyplot.scatter.html
# matplotlibÀÇ scatter ÇÔ¼ö¸¦ »ç¿ëÇØ ÇнÀ µ¥ÀÌÅ͸¦ È®ÀÎÇÕ´Ï´Ù.
# figureÀÇ Å©±â¸¦ ÁöÁ¤ÇØÁÝ´Ï´Ù.
plt.figure(figsize=(10,10))
# xÃà¿¡´Â x¸¦ »ç¿ëÇÏ°í yÃà¿¡´Â y_noise¸¦ »ç¿ëÇØ scatter plot ÇØÁÝ´Ï´Ù.
# À̶§ Á¡ÀÇ Å©±â´Â 7, Á¡ÀÇ »ö»óÀº ȸ»öÀ¸·Î ÀÓÀÇ·Î ÁöÁ¤Çß½À´Ï´Ù.
plt.scatter(x.numpy(),y_noise.numpy(),s=7,c="gray")
# figureÀÇ x,y Ãà ¹üÀ§¸¦ ÁöÁ¤ÇØÁÝ´Ï´Ù.
plt.axis([-12, 12, -25, 25])
# figure¸¦ Ãâ·ÂÇÕ´Ï´Ù.
plt.show()
# ¼±Çü ¸ðµ¨À» »ý¼ºÇÕ´Ï´Ù.
# ÀÔ·ÂÀ¸·Î µé¾î¿À´Â x°¡ 1000x1 ÀÇ ÇüŸ¦ °¡Áö°í ÀÖ°í ¿©±â¼ Ư¼ºÀÇ °³¼ö´Â 1°³À̱⠶§¹®¿¡ ¾Õ¿¡ 1ÀÌ µé¾î°¡°Ô µË´Ï´Ù. Linear(1,?)
# Ãâ·ÂÀ¸·Î ±â´ëÇÏ´Â °ª ¶ÇÇÑ 1000x1 ÀÇ ÇüÅÂÀ̱⠶§¹®¿¡ Ư¼ºÀÇ °³¼ö°¡ 1°³. ±×·¸±â ¶§¹®¿¡ µÚ¿¡ 1ÀÌ µé¾î°©´Ï´Ù. Linear(?,1)
model = nn.Linear(1,1)
# ¼Õ½Ç ÇÔ¼ö¸¦ ÁöÁ¤ÇØÁÝ´Ï´Ù.
# ÀÓÀÇ·Î L1 ¼Õ½ÇÀ» »ç¿ëÇß½À´Ï´Ù.
loss_func = nn.L1Loss()
# ÃÖÀûÈ ÇÔ¼ö¸¦ ÁöÁ¤ÇØÁÝ´Ï´Ù.
# À̶§ Àμö·Î ÇнÀÀÇ ´ë»óÀÌ µÇ´Â ¸ðµ¨ÀÇ º¯¼ö(model.parameters())¸¦ Àü´ÞÇÕ´Ï´Ù.
# ¶ÇÇÑ ÇнÀ·üÀº 0.01·Î ÀÓÀÇ·Î ÁöÁ¤Çß½À´Ï´Ù.
optimizer = optim.SGD(model.parameters(),lr=0.01)
# ¼Õ½ÇÀÌ ¾î¶»°Ô º¯ÇÏ´ÂÁö È®ÀÎÇϱâ À§ÇØ loss_arr¸¦ ¸¸µé¾î ±â·ÏÇÕ´Ï´Ù.
loss_arr =[]
# ¶ÇÇÑ ¸ñÇ¥°ªÀº y_noise·Î ÁöÁ¤ÇØÁÝ´Ï´Ù.
label = y_noise
# 500À¸·Î ÁöÁ¤Çß´ø ÇнÀ Ƚ¼ö¸¸Å ¹Ýº¹ÇÕ´Ï´Ù.
for i in range(num_epoch):
# ÀÌÀü ÇнÀÀÇ ±â¿ï±â¸¦ Áö¿ì°í ÃÖÀûÈ ÇÔ¼ö¸¦ ÃʱâÈÇØÁÝ´Ï´Ù.
# ±â¿ï±â¸¦ Áö¿ìÁö ¾ÊÀ¸¸é ±âÁ¸ÀÇ ¾÷µ¥ÀÌÆ® ¶§¹®¿¡ ÇнÀÀÌ Àß ÀÌ·ç¾îÁöÁö ¾Ê½À´Ï´Ù.
optimizer.zero_grad()
# ÀԷ°ª x¸¦ ¸ðµ¨¿¡ ³Ö¾î °á°ú°ªÀ» ¾ò½À´Ï´Ù.
output = model(x)
# °á°ú°ª°ú ¸ñÇ¥°ªÀÇ Â÷À̸¦ L1 ¼Õ½Ç ÇÔ¼ö·Î ±¸ÇØÁÝ´Ï´Ù.
loss = loss_func(output,label)
# ¼Õ½Ç¿¡ ´ëÇÑ ±â¿ï±â¸¦ ±¸ÇÕ´Ï´Ù.
loss.backward()
# ±¸ÇÑ ±â¿ï±â¸¦ ÀÌ¿ëÇØ ¸ðµ¨ÀÇ º¯¼ö¸¦ ¾÷µ¥ÀÌÆ® ÇÕ´Ï´Ù.
optimizer.step()
# 10¹ø ¸¶´Ù ¸ðµ¨ÀÇ º¯¼ö°¡ ¾î¶»°Ô º¯ÇÏ°í ÀÖ´ÂÁö Ãâ·ÂÇØÁÝ´Ï´Ù.
if i % 10 == 0:
# https://pytorch.org/docs/stable/tensors.html?highlight=detach#torch.Tensor.detach
# ÇöÀç ¿¬»ê ±×·¡ÇÁ¿¡ ¼ÓÇØÀÖ´Â x, output °ªÀ» detach¸¦ ÅëÇØ ºÐ¸®ÇÏ°í, ÅÙ¼¸¦ ³ÑÆÄÀÌ ¹è¿·Î ¹Ù²ã¼ plt.scatter¿¡ Àü´ÞÇÕ´Ï´Ù.
plt.scatter(x.detach().numpy(),output.detach().numpy())
plt.axis([-10, 10, -30, 30])
plt.show()
print(loss.data)
# ¼Õ½ÇÀ» loss_arr¿¡ Ãß°¡ÇØÁÝ´Ï´Ù.
loss_arr.append(loss.detach().numpy())
tensor(5.5297)
tensor(3.6593)
tensor(2.6410)
tensor(2.5033)
tensor(2.4026)
tensor(2.3027)
tensor(2.2028)
tensor(2.1030)
tensor(2.0031)
tensor(1.9033)
tensor(1.8035)
tensor(1.7036)
tensor(1.6038)
tensor(1.5040)
tensor(1.4042)
tensor(1.3043)
tensor(1.2045)
tensor(1.1047)
tensor(1.0049)
tensor(0.9053)
tensor(0.8056)
tensor(0.7061)
tensor(0.6066)
tensor(0.5074)
tensor(0.4103)
tensor(0.3236)
tensor(0.2546)
tensor(0.2080)
tensor(0.1821)
tensor(0.1688)
tensor(0.1627)
tensor(0.1603)
tensor(0.1593)
tensor(0.1589)
tensor(0.1587)
tensor(0.1587)
tensor(0.1587)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
tensor(0.1586)
plt.figure(figsize=(15,15))
plt.scatter(x.numpy(),y_noise.numpy(),s=5,c="gray")
plt.scatter(x.detach().numpy(),output.detach().numpy(),s=5,c="red")
plt.axis([-10, 10, -30, 30])
plt.show()
# matplotlibÀÇ plot ÇÔ¼ö¸¦ ÀÌ¿ëÇØ ¼Õ½ÇÀÌ ¾î¶»°Ô Áپ´ÂÁö È®ÀÎÇÕ´Ï´Ù.
plt.plot(loss_arr)
plt.show()
# ÇöÀç ¸ðµ¨Àº weight¿Í biasÀ» º¯¼ö·Î °¡Áö°í Àִµ¥ ±× °ªµéÀÌ ÇнÀ ÈÄ ½ÇÁ¦ ¸îÀÎÁö ¼öÄ¡ÀûÀ¸·Î È®ÀÎÇغ¾´Ï´Ù.
param_list = list(model.parameters())
print("Weight:",param_list[0].item(),"\nBias: ",param_list[1].item())
Weight: 2.001312017440796 Bias: 2.98982834815979