Artificial intelligence chapter01 linear regression equation

Step 1: task description

Task: write a small program that can load linear regression data
Programming requirements:
The data in the actual combat content is a metadata, which is read into the data file by pandas, and the corresponding data is attached with a name tag, namely, Population and profile
Data = PD. Read_csv (path, header =, names = [',']) / / prompt code
1. Import os and pandas
2. Use os.getcwd() to get the current path and combine the return
3. Use panda to read in data, and name the data of n rows and two columns respectively

#encoding=utf8
import os
import pandas as pd

if __name__ == "__main__":
    path = os.getcwd() + '/ex1data1.txt' #Import file os.gecwd() to get the current path
    #Use pandas to read in data, and name data attributes' popularity 'and' profile 'respectively
    #********* begin *********#
    data=pd.read_csv(path,header=None,names=['Population','Profit'])
    #********* end *********#
    print(data.shape)

Step 2: loss function
Programming requirements:


According to the above formula, write the loss function computeCost(X, y, theta), and return cost at last.
10: One is metadata matrix, that is, Population data;
y: Target data, i.e. profile data;
theta: model parameters;
cost: loss function value.

#encoding=utf8
import numpy as np

def computeCost(X, y, theta):
    #Write loss function calculation function according to formula
    #********* begin *********#
    inner = np.power(((X * theta.T) - y), 2)
    cost=np.sum(inner) / (2 * len(X))
    
    #********* end *********#
    return round(cost,10)
    

##Test file test whether the above loss function is correct
	import os
	import numpy as np
	import pandas as pd
	import student
	
	path = os.getcwd() + '/ex1data1.txt'
	data = pd.read_csv(path, header=None, names=['Population', 'Profit'])
	data.insert(0, 'Ones', 1)
	cols = data.shape[1]
	X = data.iloc[:,0:cols-1]
	y = data.iloc[:,cols-1:cols]
	
	X = np.matrix(X.values)
	y = np.matrix(y.values)
	theta = np.matrix(np.array([0,0]))
	
	cost = student.computeCost(X, y, theta)
	print("the cost is:", cost)

Step 3: gradient function
Programming requirements

According to the above formula, write the calculation loss function gradientDescent(X, y, theta, alpha, iters), and finally return theta, cost.
x: One is metadata matrix, that is, Population data;
y: Target data, i.e. profile data;
theta: model parameters;
m: Data scale;
α: Learning rate.

#encoding=utf8
import numpy as np

def computeCost(X, y, theta):
    inner = np.power(((X * theta.T) - y), 2)
    return np.sum(inner) / (2 * len(X))

def gradientDescent(X, y, theta, alpha, iters):
    temp = np.matrix(np.zeros(theta.shape))
    parameters = int(theta.ravel().shape[1])
    cost = np.zeros(iters)
    
    for i in range(iters):
        error = (X * theta.T) - y
        
        for j in range(parameters):
            #********* begin *********#
            term = np.multiply(error, X[:,j])
            temp[0,j] = theta[0,j] - ((alpha / len(X)) * np.sum(term))    
            
            #********* end *********#
        theta = temp
        cost[i] = computeCost(X, y, theta)
        
    return theta, cost
    
#Test and test
import os
import numpy as np
import pandas as pd
import student

path = os.getcwd() + '/ex1data1.txt'
data = pd.read_csv(path, header=None, names=['Population', 'Profit'])
data.insert(0, 'Ones', 1)
cols = data.shape[1]
X = data.iloc[:,0:cols-1]
y = data.iloc[:,cols-1:cols]

X = np.matrix(X.values)
y = np.matrix(y.values)
theta = np.matrix(np.array([0,0]))

cost = student.computeCost(X, y, theta)
print("the cost is:", cost

**

Complete linear regression equation

**

#encoding=utf8

import os
import numpy as np
import pandas as pd

#Load and process data
path = os.getcwd() + '/ex1data1.txt'
#********* begin *********#
data=pd.read_csv(path,header=-1,names=['Population','Profit'])

#********* end *********#
data.insert(0, 'Ones', 1)
cols = data.shape[1]
X = data.iloc[:,0:cols-1]
y = data.iloc[:,cols-1:cols]

#Initialize related parameters
X = np.matrix(X.values)
y = np.matrix(y.values)
theta = np.matrix(np.array([0,0]))
alpha = 0.01
iters = 1000

#Define loss function
def computeCost(X, y, theta):
    #********* begin *********#
    inner = np.power(((X * theta.T) - y), 2)
    cost=np.sum(inner) / (2 * len(X))

    #********* end *********#
    return cost

#Define gradient descent function
def gradientDescent(X, y, theta, alpha, iters):
    temp = np.matrix(np.zeros(theta.shape))
    parameters = int(theta.ravel().shape[1])
    cost = np.zeros(iters)
    
    for i in range(iters):
        error = (X * theta.T) - y
        
        for j in range(parameters):
            #********* begin *********#
            term = np.multiply(error, X[:,j])
            temp[0,j] = theta[0,j] - ((alpha / len(X)) * np.sum(term))    

            #********* end *********#            
        theta = temp
        cost[i] = computeCost(X, y, theta)        
    return theta, cost

#Get the final linear model parameters according to the gradient off frame algorithm
g, cost = gradientDescent(X, y, theta, alpha, iters)

print("The model parameters are:", g)

import os

1. Read file path
path = os.getcwd() + '/ex1data1.txt'

import pandas as pd

1.data.head() ා view the contents of the file
2.data ['lau'] (view the data whose content is listed as lau)
3.data.iloc[0,1] -- view the data in the first row and the second column
data.iloc[:,0] ා view the first column data of all rows
data.iloc[3:10,0:2] -- view the data of 3-10 rows (excluding 10), 0-2 columns
4.data.loc[0, 'lau'] (view the data of lau in the first line)
5.data.insert(0, 'Ones', 1) / / the first column is followed by a number named Ones with a value of 1
6.cols = data.shape[1] 񖓿 get the number of columns
7.X = data.iloc[:,0:cols-1] 񖓿 get data except the last column of all rows of data
8.y = data.iloc[:,cols-1:cols] 񖓿 get the data of the last column of all rows of data

import numpy as np

1.X = np.matrix(X.values)
2.y = np.matrix(y.values)
3.theta = np.matrix(np.array([0,0]) creates a matrix of 1 row and 1 column

Published 5 original articles, praised 0, visited 43
Private letter follow

Tags: encoding Programming Java

Posted on Fri, 13 Mar 2020 04:36:46 -0400 by Jim02