日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程语言 > python >内容正文

python

【Python-ML】自适应线性神经网络(Adaline)

發布時間:2025/4/16 python 38 豆豆
生活随笔 收集整理的這篇文章主要介紹了 【Python-ML】自适应线性神经网络(Adaline) 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

# -*- coding: utf-8 -*- ''' Created on 2017年12月21日 @author: Jason.F @summary: 自適應線性神經網絡學習算法 ''' import numpy as np import time import matplotlib.pyplot as plt import pandas as pdclass AdalineGD(object):'''Adaptive Linear Neuron classifier.hyper-Parameterseta:float=Learning rate (between 0.0 and 1.0)n_iter:int=Passes over the training dataset.Attributesw_:ld-array=weights after fitting.costs_:list=Number of misclassification in every epoch.'''def __init__(self,eta=0.01,n_iter=50):self.eta=etaself.n_iter=n_iterdef fit(self,X,y):'''Fit training data.ParametersX:{array-like},shape=[n_samples,n_features]=Training vectors,where n_samples is the number of samples and n_features is the number of features.y:array-like,shape=[n_samples]=Target values.Returnsself:object'''self.w_=np.zeros(1+X.shape[1])self.costs_=[]for i in range(self.n_iter):output=self.net_input(X)errors=(y-output)self.w_[1:] += self.eta * X.T.dot(errors)self.w_[0] += self.eta * errors.sum()cost=(errors ** 2).sum() /2.0self.costs_.append(cost)return selfdef net_input(self,X):#calculate net inputreturn np.dot(X,self.w_[1:])+self.w_[0]def activation(self,X):#computer linear activationreturn self.net_input(X)def predict(self,X):#return class label after unit stepreturn np.where(self.activation(X)>=0.0,1,-1) if __name__ == "__main__": start = time.clock() #訓練數據train =pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data',header=None)X_train = train.drop([4], axis=1).values #dataframe convert to arrayy_train = train[4].values#特征值標準化,特征縮放方法,使數據具有標準正態分布的特性,各特征的均值為0,標準差為1.X_std=np.copy(X_train)X_std[:,0]=(X_train[:,0]-X_train[:,0].mean()) / X_train[:,0].std()X_std[:,1]=(X_train[:,1]-X_train[:,1].mean()) / X_train[:,1].std()#X_std[:,2]=(X_train[:,2]-X_train[:,2].mean()) / X_train[:,2].std()#X_std[:,3]=(X_train[:,3]-X_train[:,3].mean()) / X_train[:,3].std()y=np.where(y_train == 'Iris-setosa',-1,1)#one vs rest:OvR#學習速率和迭代次數者兩個超參進行觀察fig,ax=plt.subplots(nrows=1,ncols=2,figsize=(8,4))#eta=0.01,n_iter=20agd1 = AdalineGD(eta=0.01,n_iter=20).fit(X_std,y)print (agd1.predict([6.9,3.0,5.1,1.8]))#預測ax[0].plot(range(1,len(agd1.costs_)+1),np.log10(agd1.costs_),marker='o')ax[0].set_xlabel('Epochs')ax[0].set_ylabel('log(Sum-Squared-error)')ax[0].set_title('Adaline-learning rate 0.01')#eta=0.0001,n_iter=20agd2 = AdalineGD(eta=0.0001,n_iter=20).fit(X_std,y)print (agd2.predict([6.9,3.0,5.1,1.8]))#預測ax[1].plot(range(1,len(agd2.costs_)+1),np.log10(agd2.costs_),marker='x')ax[1].set_xlabel('Epochs')ax[1].set_ylabel('log(Sum-Squared-error)')ax[1].set_title('Adaline-learning rate 0.0001')#show plt.show()end = time.clock() print('finish all in %s' % str(end - start))

# -*- coding: utf-8 -*- ''' Created on 2017年12月21日 @author: Jason.F @summary: 自適應線性神經網絡學習算法 ''' import numpy as np import time import matplotlib.pyplot as plt import pandas as pd from numpy.random import seedclass AdalineSGD(object):'''Adaptive Linear Neuron classifier.hyper-Parameterseta:float=Learning rate (between 0.0 and 1.0)n_iter:int=Passes over the training dataset.Attributesw_:ld-array=weights after fitting.costs_:list=Number of misclassification in every epoch.shuffle:bool(default:True)=Shuffles training data every epoch if True to prevent cycles.random_state:int(default:None)=set random state for shuffling and initializing the weights.'''def __init__(self,eta=0.01,n_iter=20,shuffle=True,random_state=None):self.eta=etaself.n_iter=n_iterself.w_initialized=Falseself.shuffle=shuffleif random_state:seed(random_state)def fit(self,X,y):'''Fit training data.ParametersX:{array-like},shape=[n_samples,n_features]=Training vectors,where n_samples is the number of samples and n_features is the number of features.y:array-like,shape=[n_samples]=Target values.Returnsself:object'''self._initialize_weights(X.shape[1])self.cost_=[]for i in range(self.n_iter):if self.shuffle:X,y=self._shuffle(X,y)cost=[]for xi,target in zip(X,y):cost.append(self._update_weights(xi,target))avg_cost=sum(cost)/len(y)self.cost_.append(avg_cost)return selfdef partial_fit(self,X,y):#Fit training data without reinitializing the weightsif not self.w_initialized:self._initialize_weights(X.shape[1])if y.ravel().shape[0]>1:for xi,target in zip(X,y):self._update_weights(xi,target)else:self._update_weights(X,y)return selfdef _shuffle(self,X,y):#shuffle training datar=np.random.permutation(len(y))return X[r],y[r]def _initialize_weights(self,m):#Initialize weights to zerosself.w_ =np.zeros(1+m)self.w_initialized=Truedef _update_weights(self,xi,target):#apply adaline learning rule to update the weightsoutput=self.net_input(xi)error=(target-output)self.w_[1:] += self.eta * xi.dot(error)self.w_[0] += self.eta * errorcost= 0.5 * error ** 2return costdef net_input(self,X):#calculate net inputreturn np.dot(X,self.w_[1:])+self.w_[0]def activation(self,X):#computer linear activationreturn self.net_input(X)def predict(self,X):#return class label after unit stepreturn np.where(self.activation(X)>=0.0,1,-1) if __name__ == "__main__": start = time.clock() #訓練數據train =pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data',header=None)X_train = train.drop([4], axis=1).values #dataframe convert to arrayy_train = train[4].values#特征值標準化,特征縮放方法,使數據具有標準正態分布的特性,各特征的均值為0,標準差為1.X_std=np.copy(X_train)X_std[:,0]=(X_train[:,0]-X_train[:,0].mean()) / X_train[:,0].std()X_std[:,1]=(X_train[:,1]-X_train[:,1].mean()) / X_train[:,1].std()#X_std[:,2]=(X_train[:,2]-X_train[:,2].mean()) / X_train[:,2].std()#X_std[:,3]=(X_train[:,3]-X_train[:,3].mean()) / X_train[:,3].std()y=np.where(y_train == 'Iris-setosa',-1,1)#one vs rest:OvR#學習速率和迭代次數者兩個超參進行觀察fig,ax=plt.subplots(nrows=1,ncols=2,figsize=(8,4))#eta=0.01,n_iter=20agd1 = AdalineSGD(eta=0.01,n_iter=20,random_state=1).fit(X_std,y)print (agd1.predict([6.9,3.0,5.1,1.8]))#預測ax[0].plot(range(1,len(agd1.cost_)+1),agd1.cost_,marker='o')ax[0].set_xlabel('Epochs')ax[0].set_ylabel('Average Cost')ax[0].set_title('Adaline-learning rate 0.01')#eta=0.0001,n_iter=20agd2 = AdalineSGD(eta=0.0001,n_iter=20,random_state=1).fit(X_std,y)print (agd2.predict([6.9,3.0,5.1,1.8]))#預測ax[1].plot(range(1,len(agd2.cost_)+1),agd2.cost_,marker='x')ax[1].set_xlabel('Epochs')ax[1].set_ylabel('Average Cost')ax[1].set_title('Adaline-learning rate 0.0001')#show plt.show()#測試在線更新print (agd2.w_) #更新前agd2.partial_fit(X_std[0,:],y[0])print (agd2.w_) #更新后end = time.clock() print('finish all in %s' % str(end - start))

下圖是對特征值不做標準化的,可以比對效果:


《新程序員》:云原生和全面數字化實踐50位技術專家共同創作,文字、視頻、音頻交互閱讀

總結

以上是生活随笔為你收集整理的【Python-ML】自适应线性神经网络(Adaline)的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。