9장 텍스트 분류 – 순환 신경망(3)

– 반복 신경망 클래스 구현

하나. init() 메서드 변경

import tensorflow as tf

def __init__(self, n_cells=10, batch_size=32, learning_rate=0.1):
  self.n_cells = n_cells     # 셀 개수
  self.batch_size = batch_size     # 배치 크기
  self.w1h = None            # 은닉 상태에 대한 가중치
  self.w1x = None            # 입력에 대한 가중치
  self.b1 = None             # 순환층의 절편
  self.w2 = None             # 출력층의 가중치
  self.b2 = None             # 출력층의 절편
  self.h = None              # 순환층의 활성화 출력
  self.losses = ()           # 훈련 손실
  self.val_losses = ()       # 검증 손실
  self.lr = learning_rate    # 학습률

2. 직교 행렬 방식으로 가중치 초기화

def init_weights(self, n_features, n_classes):
  orth_init = tf.initializers.Orthogonal()
  glorot_init = tf.initializers.GlorotUniform()
  
  self.w1h = orth_init((self.n_cells, self.n_cells)).numpy() # (셀 개수, 셀 개수)
  self.w1x = glorot_init((n_features, self.n_cells)).numpy() # (특성 개수, 셀 개수)
  self.b1 = np.zeros(self.n_cells)                           # 은닉층의 크기
  self.w2 = glorot_init((self.n_cells, n_classes)).numpy()   # (셀 개수, 클래스 개수)
  self.b2 = np.zeros(n_classes)

3. 전방 계산의 구현

def forpass(self, x):
  self.h = (np.zeros((x.shape(0), self.n_cells)))   # 은닉 상태를 초기화합니다.

# 배치 차원과 타임 스텝 차원을 바꿉니다.

seq = np.swapaxes(x, 0, 1) # 순환 층의 선형 식을 계산합니다.

for x in seq: z1 = np.dot(x, self.w1x) + np.dot(self.h(-1), self.w1h) + self.b1 h = np.tanh(z1) # 활성화 함수를 적용합니다.

self.h.append(h) # 역전파를 위해 은닉 상태 저장합니다.

z2 = np.dot(h, self.w2) + self.b2 # 출력층의 선형 식을 계산합니다.

return z2

4. NumPy의 swapaxes() 함수를 사용하여 입력의 첫 번째 배치 차원과 두 번째 시간 단계 차원을 변경합니다.


5. 각 샘플의 모든 시간 단계에 대해 정방향 계산 수행

6. 역산 계산 구현

def backprop(self, x, err):
  m = len(x)       # 샘플 개수
  
  # 출력층의 가중치와 절편에 대한 그래디언트를 계산합니다.

w2_grad = np.dot(self.h(-1).T, err) / m b2_grad = np.sum(err) / m # 배치 차원과 타임 스텝 차원을 바꿉니다.

seq = np.swapaxes(x, 0, 1) w1h_grad = w1x_grad = b1_grad = 0 # 셀 직전까지 그래디언트를 계산합니다.

err_to_cell = np.dot(err, self.w2.T) * (1 - self.h(-1) ** 2) # 모든 타임 스텝을 거슬러가면서 그래디언트를 전파합니다.

for x, h in zip(seq(::-1)(:10), self.h(:-1)(::-1)(:10)): w1h_grad += np.dot(h.T, err_to_cell) w1x_grad += np.dot(x.T, err_to_cell) b1_grad += np.sum(err_to_cell, axis=0) # 이전 타임 스텝의 셀 직전까지 그래디언트를 계산합니다.

err_to_cell = np.dot(err_to_cell, self.w1h) * (1 - h ** 2) w1h_grad /= m w1x_grad /= m b1_grad /= m return w1h_grad, w1x_grad, b1_grad, w2_grad, b2_grad

7. 나머지 메서드 수정

※ 완전한 코드

class RecurrentNetwork:
    
    def __init__(self, n_cells=10, batch_size=32, learning_rate=0.1):
        self.n_cells = n_cells     # 셀 개수
        self.batch_size = batch_size     # 배치 크기
        self.w1h = None            # 은닉 상태에 대한 가중치
        self.w1x = None            # 입력에 대한 가중치
        self.b1 = None             # 순환층의 절편
        self.w2 = None             # 출력층의 가중치
        self.b2 = None             # 출력층의 절편
        self.h = None              # 순환층의 활성화 출력
        self.losses = ()           # 훈련 손실
        self.val_losses = ()       # 검증 손실
        self.lr = learning_rate    # 학습률

    def forpass(self, x):
        self.h = (np.zeros((x.shape(0), self.n_cells)))   # 은닉 상태를 초기화합니다.

# 배치 차원과 타임 스텝 차원을 바꿉니다.

seq = np.swapaxes(x, 0, 1) # 순환 층의 선형 식을 계산합니다.

for x in seq: z1 = np.dot(x, self.w1x) + np.dot(self.h(-1), self.w1h) + self.b1 h = np.tanh(z1) # 활성화 함수를 적용합니다.

self.h.append(h) # 역전파를 위해 은닉 상태 저장합니다.

z2 = np.dot(h, self.w2) + self.b2 # 출력층의 선형 식을 계산합니다.

return z2 def backprop(self, x, err): m = len(x) # 샘플 개수 # 출력층의 가중치와 절편에 대한 그래디언트를 계산합니다.

w2_grad = np.dot(self.h(-1).T, err) / m b2_grad = np.sum(err) / m # 배치 차원과 타임 스텝 차원을 바꿉니다.

seq = np.swapaxes(x, 0, 1) w1h_grad = w1x_grad = b1_grad = 0 # 셀 직전까지 그래디언트를 계산합니다.

err_to_cell = np.dot(err, self.w2.T) * (1 - self.h(-1) ** 2) # 모든 타임 스텝을 거슬러가면서 그래디언트를 전파합니다.

for x, h in zip(seq(::-1)(:10), self.h(:-1)(::-1)(:10)): w1h_grad += np.dot(h.T, err_to_cell) w1x_grad += np.dot(x.T, err_to_cell) b1_grad += np.sum(err_to_cell, axis=0) # 이전 타임 스텝의 셀 직전까지 그래디언트를 계산합니다.

err_to_cell = np.dot(err_to_cell, self.w1h) * (1 - h ** 2) w1h_grad /= m w1x_grad /= m b1_grad /= m return w1h_grad, w1x_grad, b1_grad, w2_grad, b2_grad def sigmoid(self, z): z = np.clip(z, -100, None) # 안전한 np.exp() 계산을 위해 a = 1 / (1 + np.exp(-z)) # 시그모이드 계산 return a def init_weights(self, n_features, n_classes): orth_init = tf.initializers.Orthogonal() glorot_init = tf.initializers.GlorotUniform() self.w1h = orth_init((self.n_cells, self.n_cells)).numpy() # (셀 개수, 셀 개수) self.w1x = glorot_init((n_features, self.n_cells)).numpy() # (특성 개수, 셀 개수) self.b1 = np.zeros(self.n_cells) # 은닉층의 크기 self.w2 = glorot_init((self.n_cells, n_classes)).numpy() # (셀 개수, 클래스 개수) self.b2 = np.zeros(n_classes) def fit(self, x, y, epochs=100, x_val=None, y_val=None): y = y.reshape(-1, 1) y_val = y_val.reshape(-1, 1) np.random.seed(42) self.init_weights(x.shape(2), y.shape(1)) # 은닉층과 출력층의 가중치를 초기화합니다.

# epochs만큼 반복합니다.

for i in range(epochs): print('에포크', i, end=' ') # 제너레이터 함수에서 반환한 미니배치를 순환합니다.

batch_losses = () for x_batch, y_batch in self.gen_batch(x, y): print('.', end='') a = self.training(x_batch, y_batch) # 안전한 로그 계산을 위해 클리핑합니다.

a = np.clip(a, 1e-10, 1-1e-10) # 로그 손실과 규제 손실을 더하여 리스트에 추가합니다.

loss = np.mean(-(y_batch*np.log(a) + (1-y_batch)*np.log(1-a))) batch_losses.append(loss) print() self.losses.append(np.mean(batch_losses)) # 검증 세트에 대한 손실을 계산합니다.

self.update_val_loss(x_val, y_val) # 미니배치 제너레이터 함수 def gen_batch(self, x, y): length = len(x) bins = length // self.batch_size # 미니배치 횟수 if length % self.batch_size: bins += 1 # 나누어 떨어지지 않을 때 indexes = np.random.permutation(np.arange(len(x))) # 인덱스를 섞습니다.

x = x(indexes) y = y(indexes) for i in range(bins): start = self.batch_size * i end = self.batch_size * (i + 1) yield x(start:end), y(start:end) # batch_size만큼 슬라이싱하여 반환합니다.

def training(self, x, y): m = len(x) # 샘플 개수를 저장합니다.

z = self.forpass(x) # 정방향 계산을 수행합니다.

a = self.sigmoid(z) # 활성화 함수를 적용합니다.

err = -(y - a) # 오차를 계산합니다.

# 오차를 역전파하여 그래디언트를 계산합니다.

w1h_grad, w1x_grad, b1_grad, w2_grad, b2_grad = self.backprop(x, err) # 셀의 가중치와 절편을 업데이트합니다.

self.w1h -= self.lr * w1h_grad self.w1x -= self.lr * w1x_grad self.b1 -= self.lr * b1_grad # 출력층의 가중치와 절편을 업데이트합니다.

self.w2 -= self.lr * w2_grad self.b2 -= self.lr * b2_grad return a def predict(self, x): z = self.forpass(x) # 정방향 계산을 수행합니다.

return z > 0 # 스텝 함수를 적용합니다.

def score(self, x, y): # 예측과 타깃 열 벡터를 비교하여 True의 비율을 반환합니다.

return np.mean(self.predict(x) == y.reshape(-1, 1)) def update_val_loss(self, x_val, y_val): z = self.forpass(x_val) # 정방향 계산을 수행합니다.

a = self.sigmoid(z) # 활성화 함수를 적용합니다.

a = np.clip(a, 1e-10, 1-1e-10) # 출력 값을 클리핑합니다.

val_loss = np.mean(-(y_val*np.log(a) + (1-y_val)*np.log(1-a))) self.val_losses.append(val_loss)
반응형


– 반복 신경망 모델 학습

1. 반복 신경망 모델 훈련(셀 수 32, 배치 크기 32, 학습률 0.01, 에포크 수 20)

rn = RecurrentNetwork(n_cells=32, batch_size=32, learning_rate=0.01)

rn.fit(x_train_onehot, y_train, epochs=20, x_val=x_val_onehot, y_val=y_val)


##
/usr/local/lib/python3.8/dist-packages/keras/initializers/initializers_v2.py:120: UserWarning: The initializer GlorotUniform is unseeded and being called multiple times, which will return identical values  each time (even if the initializer is unseeded). Please update your code to provide a seed to the initializer, or avoid using the same initalizer instance more than once.
  warnings.warn(
에포크 0 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 1 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 2 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 3 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 4 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 5 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 6 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 7 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 8 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 9 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 10 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 11 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 12 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 13 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 14 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 15 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 16 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 17 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 18 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
에포크 19 .................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................

2. 교육 및 검증 세트에 대한 손실 플롯 그리기

import matplotlib.pyplot as plt

plt.plot(rn.losses)
plt.plot(rn.val_losses)
plt.show()


3. 유효성 검사 세트의 정확도 평가

rn.score(x_val_onehot, y_val)


##출력: 0.6948

※ 내용 Dies sind die Inhalte, die basierend auf den Inhalten der Einführung in Deep Learning> 배우고 조직하는 것.