By using this site, you agree to our updated Privacy Policy and our Terms of Use. Manage your Cookies Settings.
435,009 Members | 2,821 Online
Bytes IT Community
+ Ask a Question
Need help? Post your question and get tips & solutions from a community of 435,009 IT Pros & Developers. It's quick & easy.

How To Modify Adaline Stochastic Gradient Descent

P: 7





Dear


May I know how to modify my own Python programming so that I will get the same picture as refer to the attached file - Adaline Stochastic gradient descent

(I am using the Anaconda Python 3.7)

Prayerfully

Tron Orino Yeong
tcynotebook@yahoo.com
0916643858











Expand|Select|Wrap|Line Numbers
  1.  
  2.  
  3. from matplotlib.colors import ListedColormap
  4. import matplotlib.pyplot as plt
  5. import numpy as np
  6. from numpy.random import seed
  7. import pandas as pd
  8.  
  9. # Stochastic Gradient Descent
  10. class SGD(object):
  11.    def __init__(self, rate = 0.01, niter = 10,
  12.                 shuffle=True, random_state=None):
  13.       self.rate = rate
  14.       self.niter = niter
  15.       self.weight_initialized = False
  16. 
  17.       # If True, Shuffles training data every epoch
  18.       self.shuffle = shuffle
  19.  
  20.       # Set random state for shuffling and initializing the weights.
  21.       if random_state:
  22.          seed(random_state)
  23.  
  24.    def fit(self, X, y):
  25.       """Fit training data
  26.       X : Training vectors, X.shape : [#samples, #features]
  27.       y : Target values, y.shape : [#samples]
  28.       """
  29.  
  30.       # weights
  31.       self.initialize_weights(X.shape[1])
  32.  
  33.       # Cost function
  34.       self.cost = []
  35.  
  36.       for i in range(self.niter):
  37.          if self.shuffle:
  38.             X, y = self.shuffle_set(X, y)
  39.          cost = []
  40.          for xi, target in zip(X, y):
  41.             cost.append(self.update_weights(xi, target))
  42.          avg_cost = sum(cost)/len(y)
  43.          self.cost.append(avg_cost)
  44.       return self
  45.  
  46.    def partial_fit(self, X, y):
  47.       """Fit training data without reinitializing the weights"""
  48.       if not self.weight_initialized:
  49.          self.initialize_weights(X.shape[1])
  50.       if y.ravel().shape[0] > 1:
  51.          for xi, target in zip(X, y):
  52.             self.update_weights(xi, target)
  53.       else:
  54.          self.up
  55.       return self
  56.  
  57.    def shuffle_set(self, X, y):
  58.       """Shuffle training data"""
  59.       r = np.random.permutation(len(y))
  60.       return X[r], y[r]
  61.  
  62.    def initialize_weights(self, m):
  63.       """Initialize weights to zeros"""
  64.       self.weight = np.zeros(1 + m)
  65.       self.weight_initialized = True
  66.  
  67.    def update_weights(self, xi, target):
  68.       """Apply SGD learning rule to update the weights"""
  69.       output = self.net_input(xi)
  70.       error = (target - output)
  71.       self.weight[1:] += self.rate * xi.dot(error)
  72.       self.weight[0] += self.rate * error
  73.       cost = 0.5 * error**2
  74.       return cost
  75.  
  76.    def net_input(self, X):
  77.       """Calculate net input"""
  78.       return np.dot(X, self.weight[1:]) + self.weight[0]
  79.  
  80.    def activation(self, X):
  81.       """Compute linear activation"""
  82.       return self.net_input(X)
  83.  
  84.    def predict(self, X):
  85.       """Return class label after unit step"""
  86.       return np.where(self.activation(X) >= 0.0, 1, -1)
  87.  
  88. def plot_decision_regions(X, y, classifier, resolution=0.02):
  89.    # setup marker generator and color map
  90.    markers = ('s', 'x', 'o', '^', 'v')
  91.    colors = ('red', 'blue', 'lightgreen', 'gray', 'cyan')
  92.    cmap = ListedColormap(colors[:len(np.unique(y))])
  93.  
  94.    # plot the decision surface
  95.    x1_min, x1_max = X[:,  0].min() - 1, X[:, 0].max() + 1
  96.    x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1
  97.    xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, resolution),
  98.    np.arange(x2_min, x2_max, resolution))
  99.    Z = classifier.predict(np.array([xx1.ravel(), xx2.ravel()]).T)
  100.    Z = Z.reshape(xx1.shape)
  101.    plt.contourf(xx1, xx2, Z, alpha=0.4, cmap=cmap)
  102.    plt.xlim(xx1.min(), xx1.max())
  103.    plt.ylim(xx2.min(), xx2.max())
  104.  
  105.    # plot class samples
  106.    for idx, cl in enumerate(np.unique(y)):
  107.       plt.scatter(x=X[y == cl, 0], y=X[y == cl, 1],
  108.       alpha=0.8, c=cmap(idx),
  109.       marker=markers[idx], label=cl)
  110.  
  111. df = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data', header=None)
  112.  
  113. y = df.iloc[0:100, 4].values
  114. y = np.where(y == 'Iris-setosa', -1, 1)
  115. X = df.iloc[0:100, [0, 2]].values
  116.  
  117. # standardize
  118. X_std = np.copy(X)
  119. X_std[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
  120. X_std[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()
  121.  
  122. sgd1 = SGD(niter=100, rate=0.01, random_state=1)
  123. sgd2 = SGD(niter=50, rate=0.01, random_state=1)
  124. sgd3 = SGD(niter=10, rate=0.01, random_state=1)
  125.  
  126. sgd1.fit(X_std, y)
  127. sgd2.fit(X_std, y)
  128. sgd3.fit(X_std, y)
  129.  
  130. plt.plot(range(1, len(sgd1.cost) + 1), sgd1.cost, 
  131.          marker='o', linestyle='oo', label='batch=1')
  132. plt.plot(range(1, len(sgd2.cost_) + 1), np.array(sgd2.cost_) / len(y_train), 
  133.          marker='o', linestyle='--', label='batch=2')
  134. plt.plot(range(1, len(sgd3.cost_) + 1), np.array(sgd3.cost_) / len(y_train), 
  135.          marker='o', linestyle='xx', label='batch=3')
  136.  
  137. plt.xlabel('Epochs')
  138. plt.ylabel('Average Cost')
  139. plt.show()
  140.  
  141.  


Please refer to the link -

https://www.scienceforums.net/topic/...omment-1097272



Attached Images
File Type: jpg 00001.jpg (159.9 KB, 9 views)
File Type: jpg 00002.jpg (68.4 KB, 8 views)
Attached Files
File Type: pdf Adaline Stochastic gradient descent.pdf (171.1 KB, 31 views)
File Type: txt Python Stochastic gradient descent.txt (4.4 KB, 58 views)
Mar 15 '19 #1
Share this question for a faster answer!
Share on Google+

Post your reply

Sign in to post your reply or Sign up for a free account.