Action disabled: source

练习-学习随机梯度下降算法

  • 随机梯度下降(Stochastic gradient descent)和 批量梯度下降(Batch gradient descent )的公式对比、实现对比
  • 请同学们补充完成下面的代码(SGD 函数 部分)
  • 完整代码 sgd_whyx.txt 后缀名改成.py
# -*- coding: utf-8 -*-
'''
@date: 2017-11-25
@author: whyx
@title: demo of  SGD
'''
import numpy as np
import random
import matplotlib.pyplot as plt
 
 
trainData = np.array(
    [[1.1, 1.5, 1], [1.3, 1.9, 1], [1.5, 2.3, 1], [1.7, 2.7, 1], [1.9, 3.1, 1], [2.1, 3.5, 1], [2.3, 3.9, 1],
     [2.5, 4.3, 1], [2.7, 4.7, 1], [2.9, 5.1, 1]])  #训练集
 
trainLabel = np.array([2.5, 3.2, 3.9, 4.6, 5.3, 6, 6.7, 7.4, 8.1, 8.8]) #训练集标注
 
testData = np.array([[3.1, 5.5], [3.3, 5.9], [3.5, 6.3], [3.7, 6.7], [3.9, 7.1]]) #测试数据集
 
testRes = np.array([9.5, 10.2, 10.9, 11.6, 12.3]) #测试数据真实答案.
 
m, n = np.shape(trainData)
theta = np.ones(n)
alpha = 0.01
maxIteration = 5000
 
def batchSGD(x, y, theta, alpha, m, maxIteration):
#输入你的代码
 
    return theta
 
 
def SGD(x, y, theta, alpha, m, maxIteration):
#输入你的代码
 
    return theta
 
 
def predict(x, theta, testRes):
    #print(np.shape(x))
    m, n = np.shape(x)
    xTest = np.ones([m, n + 1])  # 注意test的数组里面最后一列 少了个1
    xTest[:, :-1] = x  # 输入前2列
    res = np.dot(xTest, theta)  # 预测结果
    loss = res - testRes  #验证量
    loss_sqr = np.dot(loss, loss.T) #输出总的loss 平方和
    print('loss total:', loss_sqr)
    return res
 
# plt.plot(trainData)
# plt.show()
 
theta = np.ones(n) #reset theta.
theta = batchSGD(trainData, trainLabel, theta, alpha, m, maxIteration)
print('BSGD theta = ', theta)  #利用全局平均
print(predict(testData, theta, testRes))
 
theta = np.ones(n) #reset theta.
theta = SGD(trainData, trainLabel, theta, alpha, m, maxIteration)
print('SGD theta = ', theta)  #利用随机下降
print(predict(testData, theta, testRes))
print('Real Values', testRes)
  • course/ml/sgd.txt
  • 最后更改: 2017/11/25 19:39
  • (外部编辑)