python Realizes a Simple Linear Regression Case

  • 2021-08-28 20:40:21
  • OfStack


#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File :  Self-realization 1 Linear regression .py
# @Author:  Zhao Lucang 
# @Date : 2020/4/12
# @Desc :
# @Contact : 398333404@qq.com
import os

import tensorflow as tf


def linear_regression():
  """
   Self-realization 1 Linear regression 
  :return:
  """
  #  Namespace 
  with tf.variable_scope("prepared_data"):
    #  Prepare data 
    x = tf.random_normal(shape=[100, 1], name="Feature")
    y_true = tf.matmul(x, [[0.08]]) + 0.7
    # x = tf.constant([[1.0], [2.0], [3.0]])
    # y_true = tf.constant([[0.78], [0.86], [0.94]])

  with tf.variable_scope("create_model"):
    # 2. Constructor 
    #  Define model variable parameters 
    weights = tf.Variable(initial_value=tf.random_normal(shape=[1, 1], name="Weights"))
    bias = tf.Variable(initial_value=tf.random_normal(shape=[1, 1], name="Bias"))
    y_predit = tf.matmul(x, weights) + bias

  with tf.variable_scope("loss_function"):
    # 3. Construct loss function 
    error = tf.reduce_mean(tf.square(y_predit - y_true))

  with tf.variable_scope("optimizer"):
    # 4. Optimization loss 
    optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(error)

  #  Collect variables 
  tf.summary.scalar("error", error)
  tf.summary.histogram("weights", weights)
  tf.summary.histogram("bias", bias)

  #  Merge variables 
  merged = tf.summary.merge_all()

  #  Create saver Object 
  saver = tf.train.Saver()

  #  Explicit initialization variable 
  init = tf.global_variables_initializer()

  #  Open a session 
  with tf.Session() as sess:
    #  Initialization variable 
    sess.run(init)

    #  Create an event file 
    file_writer = tf.summary.FileWriter("E:/tmp/linear", graph=sess.graph)

    # print(x.eval())
    # print(y_true.eval())
    #  View the value after initializing the variable model parameter 
    print(" The model parameters before training are : Weight %f, Offset %f" % (weights.eval(), bias.eval()))

    #  Start training 
    for i in range(1000):
      sess.run(optimizer)
      print(" No. 1 %d Secondary parameter is: Weight %f, Offset %f, Loss %f" % (i + 1, weights.eval(), bias.eval(), error.eval()))

      #  Run the merge variable operation 
      summary = sess.run(merged)
      #  Writes the variables after each iteration to the event 
      file_writer.add_summary(summary, i)

      #  Save model 
      if i == 999:
        saver.save(sess, "./tmp/model/my_linear.ckpt")

    # #  Loading model 
    # if os.path.exists("./tmp/model/checkpoint"):
    #   saver.restore(sess, "./tmp/model/my_linear.ckpt")

    print(" Parameter is: Weight %f, Offset %f, Loss %f" % (weights.eval(), bias.eval(), error.eval()))
    pre = [[0.5]]
    prediction = tf.matmul(pre, weights) + bias
    sess.run(prediction)
    print(prediction.eval())

  return None


if __name__ == "__main__":
  linear_regression()

The above is python to achieve a simple case of linear regression details, more about python to achieve linear regression information please pay attention to other related articles on this site!


Related articles: