15.2 가장 인기있는 머신러닝과 딥러닝 플랫폼 : 텐서플로우

In [1]:
!pip install tensorflow
Requirement already satisfied: tensorflow in /usr/local/lib/python3.7/dist-packages (2.8.0)
Requirement already satisfied: libclang>=9.0.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (13.0.0)
Requirement already satisfied: flatbuffers>=1.12 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (2.0)
Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.1.0)
Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from tensorflow) (57.4.0)
Requirement already satisfied: absl-py>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.0.0)
Requirement already satisfied: numpy>=1.20 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.21.5)
Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (0.24.0)
Requirement already satisfied: wrapt>=1.11.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.14.0)
Requirement already satisfied: keras<2.9,>=2.8.0rc0 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (2.8.0)
Requirement already satisfied: tensorboard<2.9,>=2.8 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (2.8.0)
Requirement already satisfied: tf-estimator-nightly==2.8.0.dev2021122109 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (2.8.0.dev2021122109)
Requirement already satisfied: keras-preprocessing>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.1.2)
Requirement already satisfied: protobuf>=3.9.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (3.17.3)
Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (3.3.0)
Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.15.0)
Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.44.0)
Requirement already satisfied: gast>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (0.5.3)
Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (0.2.0)
Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (3.1.0)
Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (3.10.0.2)
Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow) (1.6.3)
Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.7/dist-packages (from astunparse>=1.6.0->tensorflow) (0.37.1)
Requirement already satisfied: cached-property in /usr/local/lib/python3.7/dist-packages (from h5py>=2.9.0->tensorflow) (1.5.2)
Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard<2.9,>=2.8->tensorflow) (2.23.0)
Requirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard<2.9,>=2.8->tensorflow) (0.6.1)
Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard<2.9,>=2.8->tensorflow) (1.8.1)
Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard<2.9,>=2.8->tensorflow) (1.35.0)
Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.7/dist-packages (from tensorboard<2.9,>=2.8->tensorflow) (0.4.6)
Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.7/dist-packages (from tensorboard<2.9,>=2.8->tensorflow) (1.0.1)
Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.7/dist-packages (from tensorboard<2.9,>=2.8->tensorflow) (3.3.6)
Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow) (0.2.8)
Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow) (4.2.4)
Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow) (4.8)
Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.7/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.9,>=2.8->tensorflow) (1.3.1)
Requirement already satisfied: importlib-metadata>=4.4 in /usr/local/lib/python3.7/dist-packages (from markdown>=2.6.8->tensorboard<2.9,>=2.8->tensorflow) (4.11.3)
Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata>=4.4->markdown>=2.6.8->tensorboard<2.9,>=2.8->tensorflow) (3.7.0)
Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.7/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow) (0.4.8)
Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow) (3.0.4)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow) (2021.10.8)
Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow) (2.10)
Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow) (1.24.3)
Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.9,>=2.8->tensorflow) (3.2.0)
In [2]:
import tensorflow as tf

print(tf.__version__)
2.8.0

15.3 구글 코래버러토리를Colaboratory를 이용한 텐서플로우 사용

In [3]:
# tensorflow¿Í tf.keras¸¦ ÀÓÆ÷Æ®ÇÑ´Ù
import tensorflow as tf
from tensorflow import keras 
import numpy as np
import matplotlib.pyplot as plt
 
# ÆÐ¼Ç MNIST µ¥ÀÌÅÍ´Â kerasÀÇ µ¥ÀÌÅͼ¿¡ Àִµ¥ À̸¦ Àоî¿Í¼­ ÇнÀ¿ë, Å×½ºÆ® µ¥ÀÌÅÍ·Î ±¸ºÐ
fashion_mnist = keras.datasets.fashion_mnist 
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
In [4]:
print(train_images.shape)  # ÇнÀ À̹ÌÁöÀÇ ÇüÅÂ¿Í ·¹À̺íÀ» Ãâ·ÂÇÑ´Ù
print(train_labels)
print(test_images.shape)
(60000, 28, 28)
[9 0 0 ... 3 0 5]
(10000, 28, 28)

15.5 패션 MNIST는 레이블이 있는 이미지 데이터이다

In [5]:
fig = plt.figure()
ax1 = fig.add_subplot(1, 3, 1)
ax2 = fig.add_subplot(1, 3, 2)
ax3 = fig.add_subplot(1, 3, 3)

ax1.imshow(train_images[0])       # ù ¹ø° ÈÆ·Ã¿ë µ¥ÀÌÅÍ
ax2.imshow(train_images[1])       # µÎ ¹ø° ÈÆ·Ã¿ë µ¥ÀÌÅÍ
ax3.imshow(train_images[2])       # ¼¼ ¹ø° ÈÆ·Ã¿ë µ¥ÀÌÅÍ
plt.show()
In [6]:
print(train_labels[:3])
[9 0 0]

15.6 딥러닝? 인공 신경망을 구축해 보자

In [7]:
model = keras.Sequential([
    keras.layers.Flatten(input_shape=(28, 28)),
    keras.layers.Dense(128, activation='relu'),
    keras.layers.Dense(10, activation='softmax')
])

15.7 인공 신경망을 학습시켜 보자 : 최적화와 에폭

In [8]:
model.compile(optimizer='adam',
              loss='sparse_categorical_crossentropy',
              metrics=['accuracy'])
In [9]:
model.fit(train_images, train_labels, epochs=5)
Epoch 1/5
1875/1875 [==============================] - 6s 3ms/step - loss: 3.9218 - accuracy: 0.6828
Epoch 2/5
1875/1875 [==============================] - 7s 4ms/step - loss: 0.7127 - accuracy: 0.7242
Epoch 3/5
1875/1875 [==============================] - 5s 3ms/step - loss: 0.6353 - accuracy: 0.7470
Epoch 4/5
1875/1875 [==============================] - 4s 2ms/step - loss: 0.5764 - accuracy: 0.7936
Epoch 5/5
1875/1875 [==============================] - 4s 2ms/step - loss: 0.5440 - accuracy: 0.8123
Out[9]:
<keras.callbacks.History at 0x7f2af70ad250>
In [10]:
test_loss, test_acc = model.evaluate(test_images,  test_labels, verbose=2)
print('\nÅ×½ºÆ® Á¤È®µµ:', test_acc)
313/313 - 1s - loss: 0.5907 - accuracy: 0.7885 - 536ms/epoch - 2ms/step

Å×½ºÆ® Á¤È®µµ: 0.7885000109672546

15.8 학습된 신경망을 새 이미지에 적용해 보자

In [11]:
test_images.shape
Out[11]:
(10000, 28, 28)
In [12]:
import numpy as np
randIdx = np.random.randint(0, 1000)
plt.imshow(test_images[randIdx])
Out[12]:
<matplotlib.image.AxesImage at 0x7f2af3914290>
In [13]:
yhat = model.predict( test_images[randIdx][np.newaxis, :, :])
yhat
Out[13]:
array([[6.3061146e-03, 4.0521673e-03, 2.5764462e-05, 9.8613852e-01,
        1.8511799e-03, 1.2283935e-11, 1.3911854e-03, 1.7016291e-21,
        2.3499684e-04, 3.3500117e-17]], dtype=float32)

15.9 클래스를 찾아 출력하게 만들기

In [14]:
yhat = np.argmax( model.predict( test_images[randIdx][np.newaxis, :, :]) )
yhat
Out[14]:
3
In [15]:
class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
               'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot']
In [16]:
yhat = np.argmax( model.predict( test_images[randIdx][np.newaxis, :, :]) )
print(class_names[yhat])
Dress

15.10 나만의 데이터 이용법 : 드라이브에 올린 파일 접근하기

In [17]:
from google.colab import drive
drive.mount('/content/drive')
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
In [18]:
!pwd
/content

15.11 클라우드에 자료를 코랩에서 사용하기

In [19]:
!ls ./drive/'My Drive' -la
total 12068
drwx------ 2 root root    4096 Feb  5  2020  ????
drwx------ 2 root root    4096 Sep 21  2020  ?????
drwx------ 2 root root    4096 Nov 19  2020  ?????
drwx------ 2 root root    4096 Nov  9 08:39 '????? ?????'
-rw------- 1 root root   73930 Sep 10  2020  bag_cartoon.png
-rw------- 1 root root  309419 Oct 26 09:24 'chap6 (1).pdf'
-rw------- 1 root root  309419 Oct 26 09:25  chap6.pdf
drwx------ 2 root root    4096 Aug  9  2021 'Colab my test'
drwx------ 2 root root    4096 Jun  4  2021 'Colab Notebooks'
-rw------- 1 root root 9665466 Oct 26 09:25  jeff.pdf
-rw------- 1 root root  598385 Sep  9  2020  myData.png
-rw------- 1 root root 1374504 Mar 24 10:09  myFirstModel.h5

15.12 드라이브의 이미지 파일을 읽어 화면에 표시해 보기

In [20]:
import matplotlib.image as mpimg
import matplotlib.pyplot as plt

img = mpimg.imread('./drive/My Drive/myData.png')
plt.imshow(img)
Out[20]:
<matplotlib.image.AxesImage at 0x7f2af6eed890>
In [21]:
import cv2
img = cv2.imread('./drive/My Drive/myData.png', cv2.IMREAD_GRAYSCALE)
img = cv2.resize(img, (28, 28) )
plt.imshow(img)
Out[21]:
<matplotlib.image.AxesImage at 0x7f2af2971cd0>

15.13 이미지를 모델에 입력으로 제공해 인식을 시도하자

In [22]:
input_data = img[np.newaxis, :, :]
input_data.shape
Out[22]:
(1, 28, 28)
In [23]:
class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
              'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot']
yhat = np.argmax( model.predict( input_data ) )

print(class_names[yhat])
Bag
In [24]:
input_mirror = input_data[:, :, ::-1]
plt.imshow(input_mirror[0])
Out[24]:
<matplotlib.image.AxesImage at 0x7f2af4e64110>
In [25]:
class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
              'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot']
yhat = np.argmax( model.predict( input_mirror ) )

print(class_names[yhat])
Ankle boot

15.14 신경망 학습과 편향 : 학습의 한계를 인식하자

In [26]:
model2 = keras.Sequential([
    keras.layers.Flatten(input_shape=(28, 28)),
    keras.layers.Dense(128, activation='relu'),
    keras.layers.Dense(64, activation='relu'),
    keras.layers.Dense(32, activation='relu'),
    keras.layers.Dense(10, activation='softmax')
])
model2.compile(optimizer='adam',
             loss='sparse_categorical_crossentropy',
             metrics=['accuracy'])

model2.fit(train_images, train_labels, epochs=5)
Epoch 1/5
1875/1875 [==============================] - 5s 3ms/step - loss: 1.6357 - accuracy: 0.5627
Epoch 2/5
1875/1875 [==============================] - 5s 3ms/step - loss: 0.8956 - accuracy: 0.6446
Epoch 3/5
1875/1875 [==============================] - 5s 3ms/step - loss: 0.6894 - accuracy: 0.7281
Epoch 4/5
1875/1875 [==============================] - 5s 3ms/step - loss: 0.5930 - accuracy: 0.7697
Epoch 5/5
1875/1875 [==============================] - 5s 3ms/step - loss: 0.5280 - accuracy: 0.8038
Out[26]:
<keras.callbacks.History at 0x7f2af4dbe6d0>

15.16 학습된 모델 저장하고 불러와서 사용하기

In [27]:
model2.save('./drive/My Drive/myFirstModel.h5')
!ls ./drive/'My Drive' -la
total 12068
drwx------ 2 root root    4096 Feb  5  2020  ????
drwx------ 2 root root    4096 Sep 21  2020  ?????
drwx------ 2 root root    4096 Nov 19  2020  ?????
drwx------ 2 root root    4096 Nov  9 08:39 '????? ?????'
-rw------- 1 root root   73930 Sep 10  2020  bag_cartoon.png
-rw------- 1 root root  309419 Oct 26 09:24 'chap6 (1).pdf'
-rw------- 1 root root  309419 Oct 26 09:25  chap6.pdf
drwx------ 2 root root    4096 Aug  9  2021 'Colab my test'
drwx------ 2 root root    4096 Jun  4  2021 'Colab Notebooks'
-rw------- 1 root root 9665466 Oct 26 09:25  jeff.pdf
-rw------- 1 root root  598385 Sep  9  2020  myData.png
-rw------- 1 root root 1374504 Mar 24 10:16  myFirstModel.h5
In [28]:
model_imported = keras.models.load_model('./drive/My Drive/myFirstModel.h5')
model_imported.summary()
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 flatten_1 (Flatten)         (None, 784)               0         
                                                                 
 dense_2 (Dense)             (None, 128)               100480    
                                                                 
 dense_3 (Dense)             (None, 64)                8256      
                                                                 
 dense_4 (Dense)             (None, 32)                2080      
                                                                 
 dense_5 (Dense)             (None, 10)                330       
                                                                 
=================================================================
Total params: 111,146
Trainable params: 111,146
Non-trainable params: 0
_________________________________________________________________
In [29]:
!ls -al ./drive/'My Drive'
total 12068
drwx------ 2 root root    4096 Feb  5  2020  ????
drwx------ 2 root root    4096 Sep 21  2020  ?????
drwx------ 2 root root    4096 Nov 19  2020  ?????
drwx------ 2 root root    4096 Nov  9 08:39 '????? ?????'
-rw------- 1 root root   73930 Sep 10  2020  bag_cartoon.png
-rw------- 1 root root  309419 Oct 26 09:24 'chap6 (1).pdf'
-rw------- 1 root root  309419 Oct 26 09:25  chap6.pdf
drwx------ 2 root root    4096 Aug  9  2021 'Colab my test'
drwx------ 2 root root    4096 Jun  4  2021 'Colab Notebooks'
-rw------- 1 root root 9665466 Oct 26 09:25  jeff.pdf
-rw------- 1 root root  598385 Sep  9  2020  myData.png
-rw------- 1 root root 1374504 Mar 24 10:16  myFirstModel.h5
In [30]:
import cv2
img = cv2.imread('./drive/My Drive/bag_cartoon.png', cv2.IMREAD_GRAYSCALE)
plt.imshow(img)
Out[30]:
<matplotlib.image.AxesImage at 0x7f2af4cac7d0>
In [31]:
img = cv2.resize(img, (28, 28) )
plt.imshow(img)
Out[31]:
<matplotlib.image.AxesImage at 0x7f2af57ae410>
In [32]:
input_data = img[np.newaxis, :, :]
yhat = np.argmax( model_imported.predict( input_data ) )
print(class_names[yhat])
Bag