๋ฐ์ํ
1. 몇 개의 단어를 통해서 RNN 구조를 익혀보자¶
In [1]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
데이터셋 구축하기¶
- ๊ธ์ ํ๋ํ๋ ๋จ์๋ก RNN ์ฌ์ฉํด๋ณด๊ธฐ
- hello, apple, hobby, daddy, bobby
In [2]:
# ๋ฌธ์ ๋ฐ์ดํฐ : hell , appl, hobb, dadd, bobb
# ์ด 4๋ฒ ์ํํ๋ ๋จ๊ณ๋ก ์งํ๋จ
# ์ ๋ต ๋ฐ์ดํฐ : o, e, y, y
# timestamps = 4
원 핫 인코딩으로 문자를 숫자로 변경¶
- ๋ฌธ์ + ์ ๋ต ์ ์ฒด ๋ฐ์ดํฐ์์ ๋ฑ์ฅํ๋ ๋ฌธ์ ๋ h,e,l,o,a,p,b,y,d ์ด 9๊ฐ
In [26]:
a = ['hello', 'apple', 'hobby', 'daddy', 'bobby']
# RNN ๋ฐ์ดํฐ ๊ตฌ์กฐ๋ฅผ ํ์
ํ๊ธฐ ์ํด ์ํซ์ธ์ฝ๋ฉ์ ์งํํด๋ณด์
# ๋ฌธ์ ๋ฐ์ดํฐ,3์ฐจ์
X_train = np.array([
# ๊ฐ๊ฐ์ ๋จ์ด๋ค
[[1,0,0,0,0,0,0,0,0],[0,1,0,0,0,0,0,0,0],[0,0,1,0,0,0,0,0,0],[0,0,1,0,0,0,0,0,0]],
[[0,0,0,0,1,0,0,0,0],[0,0,0,0,0,1,0,0,0],[0,0,0,0,0,1,0,0,0],[0,0,1,0,0,0,0,0,0]],
[[1,0,0,0,0,0,0,0,0],[0,0,0,1,0,0,0,0,0],[0,0,0,0,0,0,1,0,0],[0,0,0,0,0,0,1,0,0]],
[[0,0,0,0,0,0,0,0,1],[0,0,0,0,1,0,0,0,0],[0,0,0,0,0,0,0,0,1],[0,0,0,0,0,0,0,0,1]],
[[0,0,0,0,0,0,1,0,0],[0,0,0,1,0,0,0,0,0],[0,0,0,0,0,0,1,0,0],[0,0,0,0,0,0,1,0,0]],
])
# ์ ๋ต ๋ฐ์ดํฐ,2์ฐจ์
y_train = np.array([
[0,0,0,1,0,0,0,0,0], # o
[0,1,0,0,0,0,0,0,0], # e
[0,0,0,0,0,0,0,1,0], # y
[0,0,0,0,0,0,0,1,0]
]) # y
In [27]:
X_train.shape, y_train.shape
# sample ์ํ์
# time steps ์ํ ํ์
# feature ๋ฐ์ดํฐ์ ํน์ฑ ์ =์ํซ์ธ์ฝ๋ฉ๋ ๋ ์ด๋ธ ์
Out[27]:
((5, 4, 9), (4, 9))
Rnn 신경망 모델링¶
In [28]:
from tensorflow.keras import Sequential
from tensorflow.keras.layers import SimpleRNN, Dense
In [29]:
#์ ๊ฒฝ๋ง ์ค๊ณ
model = Sequential()
#์
๋ ฅ์ธต + ์ค๊ฐ์ธต
# 8 ๊ฐ์ ๋ด๋ฐ์ ๊ฐ์ง๊ณ , ๊ฐ ๋ด๋ฐ์ด 4๋ฒ์ฉ ์ํํ๋ฉฐ ๊ฐ ์ํ๋ง๋ค 9๊ฐ์ ์ซ์๊ฐ ๋ค์ด๊ฐ
model.add(SimpleRNN(8, input_shape=(4, 9)))
# ์ถ๋ ฅ์ธต
model.add(Dense(9, activation = 'softmax')) # SOFTMAX ๋ค์ค ๋ถ๋ฅ
model.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= simple_rnn_3 (SimpleRNN) (None, 8) 144 dense_3 (Dense) (None, 9) 81 ================================================================= Total params: 225 Trainable params: 225 Non-trainable params: 0 _________________________________________________________________
In [30]:
# RNN์ ๊ฐ์ค์น๊ฐ ๋ ์ข
๋ฅ๊ฐ ์์ต๋๋ค.
# ๊ณผ๊ฑฐ ๋ฐ์ดํฐ์ ๊ฐ์ค์น์ ํ์ฌ ๋ฐ์ดํฐ์ ๊ฐ์ค์น
# ํ์ฌ ๋ฐ์ดํฐ์ ๊ฐ์ค์น(MLP์ ๊ฐ์) => 9(์
๋ ฅ ํน์ฑ์ ๊ฐ์) * 8(RNN์ธต์ ๋ด๋ฐ์) + 8(RNN ์ธต์ ๋ด๋ฐ์ = b๊ฐ ๊ณ์)
# ๊ณผ๊ฑฐ ๋ฐ์ดํฐ์ ๊ฐ์ค์น -> 8 (RNN ์ธต์ ๋ด๋ฐ์) * 8 (RNN์ธต์ ๋ด๋ฐ์)
# RNN ์ธต์ ๊ฐ ๋ด๋ฐ๋ค์ด ๋ฝ์๋ธ ๊ฒฐ๊ณผ๊ฐ ๋ค์ ๋ชจ๋ ๋ด๋ฐ๋ค์๊ฒ ๊ณผ๊ฑฐ ๋ฐ์ดํฐ๋ก ๋์๊ฐ๊ฒ๋จ
# ์ํํ์๋์ ์๊ด์์ด ์ต์ข
๊ฒฐ๊ณผ๊ฐ์ ๋ํ ๊ฐ์ค์น๋ฅผ ์นด์ดํธ
model.compile(loss = 'categorical_crossentropy', # ๋ค์ค ๋ถ๋ฅ์ด๋ฏ๋ก
optimizer='Adam',
metrics =['acc']) # ์ ํ๋
In [ ]:
h = model.fit(X_train, y_train, epochs = 100)
In [ ]:
plt.figure(figsize=(15,5))
plt.plot(h.history['acc'],
label = 'acc')
plt.legend()
plt.show()
๋ฐ์ํ
'๐๏ธ์ํํธ์จ์ด > ๐ปpython' ์นดํ ๊ณ ๋ฆฌ์ ๋ค๋ฅธ ๊ธ
nltk pos_tag ์ข ๋ฅ (0) | 2021.12.04 |
---|---|
python ๊ฐ์ํ๊ฒฝ ์ค์น์ค์ vscode ์๋ฌ : Kernel process Exited (0) | 2021.12.04 |
vscode์์ rtx 2060super ์จ๋จน์ด๋ณด๊ธฐ (feat.gpu cuda๋ฅผ ์ข์จ๋ณด์) (0) | 2021.12.03 |
crawling parrel processing (0) | 2021.11.30 |
[๋ฐฑ์ค]10798. ์ธ๋ก์ฐ๊ธฐ (0) | 2021.11.29 |