[DL] 밑바닥부터 DL 코딩
딥러닝 밑바닥부터 한땀한땀 하드코딩
1. 데이터가 1개인 경우
키, 몸무게 예측 |
# 1. data
height = 170
weight = 70
# 2. equation
# weight = height * a + b
# 3. inital parameters
a = tf.Variable(0.1)
b = tf.Variable(0.2)
# 4. loss function
def loss_func():
y_pred = height * a + b
return tf.square(weight - y_pred)
# 5. update a, b using Gradient Descent
opt = tf.keras.optimizers.Adam(learning_rate=0.1)
for i in range(100):
opt.minimize(loss_func, var_list=[a,b])
print(a.numpy(), b.numpy())
--------------------
0.20000286 0.30000287
...
0.40781355 0.50781375
0.40751693 0.5075171
# 6. predict
height = 160
weight = 0.40751693 * height + 0.5075171
print(weight)
--------------------
65.7102259
2. 데이터가 여러개인 경우
train_x = [1,2,3,4,5,6,7]
train_y = [7,12,17,22,27,32,37]
a = tf.Variable(0.1)
b = tf.Variable(0.2)
def loss_func(a, b):
pred_y = train_x * a + b
return tf.keras.losses.mse(train_y, pred_y) # (actual value, predicted value)
# 1/n * ((pred1 - act1)^2 + (pred2 - act2)^2 + ...)
opt = tf.keras.optimizers.Adam(learning_rate=0.01)
for i in range(7000):
opt.minimize(lambda:loss_func(a,b), var_list=[a,b])
print(a.numpy(), b.numpy())
--------------------
0.11000029 0.21000029
...
4.999987 2.0000572
4.999987 2.000057
train_x = 10
pred_y = train_x * 4.999987 + 2.000057
print(pred_y)
--------------------
51.999927
Leave a comment