Keras 实现多元线性回归

参考代码:

import keras
import matplotlib.pyplot as plt
import numpy as np
from keras.layers import Dense, Activation
from keras.models import Sequential
from keras.optimizers import SGD

print("keras version=>", keras.__version__)
# keras version=> 2.3.1

###########生成数据#######
np.random.seed(0)

trainx = np.linspace(-0.6, 0.6, 1000)
noise = np.random.normal(0, 0.002, trainx.shape)
trainy = np.square(trainx) + noise

validx = np.random.random((200,))
validy = np.square(validx)

plt.scatter(trainx, trainy)
plt.show()

print("input data shape: trainx=>{},trainy=>{},validx=>{},validy=>{}".
      format(trainx.shape, trainy.shape, validx.shape, validy.shape))
# input data shape: trainx=>(1000,),trainy=>(1000,),validx=>(200,),validy=>(200,)

#定义模型
model = Sequential()
model.add(Dense(10, input_dim=1))
model.add(Activation('tanh'))
model.add(Dense(1))
model.add(Activation('tanh'))

print(model.summary())
# Model: "sequential_1"
# _________________________________________________________________
# Layer (type)                 Output Shape              Param #
# =================================================================
# dense_1 (Dense)              (None, 10)                20
# _________________________________________________________________
# activation_1 (Activation)    (None, 10)                0
# _________________________________________________________________
# dense_2 (Dense)              (None, 1)                 11
# _________________________________________________________________
# activation_2 (Activation)    (None, 1)                 0
# =================================================================
# Total params: 31
# Trainable params: 31
# Non-trainable params: 0

#训练模型
model.compile(loss='mse', optimizer=SGD(learning_rate=0.3))
model.fit(trainx, trainy, epochs=10, batch_size=20, validation_data=(validx, validy))

# Train on 1000 samples, validate on 200 samples
# Epoch 1/10
# 1000/1000 [==============================] - 0s 79us/step - loss: 0.0136 - val_loss: 0.1582
# Epoch 2/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0124 - val_loss: 0.1704
# Epoch 3/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0121 - val_loss: 0.1356
# Epoch 4/10
# 1000/1000 [==============================] - 0s 47us/step - loss: 0.0121 - val_loss: 0.1298
# Epoch 5/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0116 - val_loss: 0.1286
# Epoch 6/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0109 - val_loss: 0.1295
# Epoch 7/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0103 - val_loss: 0.1074
# Epoch 8/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0097 - val_loss: 0.1113
# Epoch 9/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0088 - val_loss: 0.1171
# Epoch 10/10
# 1000/1000 [==============================] - 0s 31us/step - loss: 0.0081 - val_loss: 0.0981

W1, b1 = model.layers[0].get_weights()
print('W1=', W1, 'b1=', b1)
# W1= [[ 0.28135994 -0.47979814  0.14364576  0.8361093   0.07907297  0.21880604
#    0.2095565  -0.2443634   0.90930325  0.4009881 ]]
# b1= [ 1.1032269e-04  3.2156784e-02 -1.0267902e-01 -3.4914458e-01
#  -1.5268266e-01  7.9040296e-02 -2.9658666e-03  4.2442270e-02
#   3.8890505e-01 -1.6171424e-02]

W3, b3 = model.layers[2].get_weights()
print('W3=', W3, 'b3=', b3)
# W3= [[ 0.03213232]
#  [-0.08738267]
#  [-0.39353922]
#  [ 0.5851934 ]
#  [-0.49480903]
#  [ 0.46942565]
#  [-0.00343001]
#  [ 0.29466492]
#  [-0.5664696 ]
#  [ 0.08584545]]
# b3= [0.33810842]



个人资料
感情洁癖
等级:5
文章:4篇
访问:1.9k
排名: 30
上一篇: Keras 实现一元线性回归
下一篇:Keras实现非线性回归
猜你感兴趣的圈子:
深度学习交流圈
标签: loss、0s、31us、epoch、trainx、面试题
隐藏