Jump to content

How to modify Adaline Stochastic gradient descent

Featured Replies

 
Dear 

May I know how to modify my own Python programming so that I will get the 
same picture as refer to the attached file - Adaline Stochastic gradient descent

(I am using the Anaconda Python 3.7)

Prayerfully 
 
Tron Orino Yeong  
tcynotebook@yahoo.com 
0916643858
 
 
 

from matplotlib.colors import ListedColormap
import matplotlib.pyplot as plt
import numpy as np
from numpy.random import seed
import pandas as pd

# Stochastic Gradient Descent
class SGD(object):
   def __init__(self, rate = 0.01, niter = 10,
                shuffle=True, random_state=None):
      self.rate = rate
      self.niter = niter
      self.weight_initialized = False

      # If True, Shuffles training data every epoch
      self.shuffle = shuffle

      # Set random state for shuffling and initializing the weights.
      if random_state:
         seed(random_state)

   def fit(self, X, y):
      """Fit training data
      X : Training vectors, X.shape : [#samples, #features]
      y : Target values, y.shape : [#samples]
      """

      # weights
      self.initialize_weights(X.shape[1])

      # Cost function
      self.cost = []

      for i in range(self.niter):
         if self.shuffle:
            X, y = self.shuffle_set(X, y)
         cost = []
         for xi, target in zip(X, y):
            cost.append(self.update_weights(xi, target))
         avg_cost = sum(cost)/len(y)
         self.cost.append(avg_cost)
      return self

   def partial_fit(self, X, y):
      """Fit training data without reinitializing the weights"""
      if not self.weight_initialized:
         self.initialize_weights(X.shape[1])
      if y.ravel().shape[0] > 1:
         for xi, target in zip(X, y):
            self.update_weights(xi, target)
      else:
         self.up
      return self

   def shuffle_set(self, X, y):
      """Shuffle training data"""
      r = np.random.permutation(len(y))
      return X[r], y[r]

   def initialize_weights(self, m):
      """Initialize weights to zeros"""
      self.weight = np.zeros(1 + m)
      self.weight_initialized = True

   def update_weights(self, xi, target):
      """Apply SGD learning rule to update the weights"""
      output = self.net_input(xi)
      error = (target - output)
      self.weight[1:] += self.rate * xi.dot(error)
      self.weight[0] += self.rate * error
      cost = 0.5 * error**2
      return cost

   def net_input(self, X):
      """Calculate net input"""
      return np.dot(X, self.weight[1:]) + self.weight[0]

   def activation(self, X):
      """Compute linear activation"""
      return self.net_input(X)

   def predict(self, X):
      """Return class label after unit step"""
      return np.where(self.activation(X) >= 0.0, 1, -1)

def plot_decision_regions(X, y, classifier, resolution=0.02):
   # setup marker generator and color map
   markers = ('s', 'x', 'o', '^', 'v')
   colors = ('red', 'blue', 'lightgreen', 'gray', 'cyan')
   cmap = ListedColormap(colors[:len(np.unique(y))])

   # plot the decision surface
   x1_min, x1_max = X[:,  0].min() - 1, X[:, 0].max() + 1
   x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1
   xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, resolution),
   np.arange(x2_min, x2_max, resolution))
   Z = classifier.predict(np.array([xx1.ravel(), xx2.ravel()]).T)
   Z = Z.reshape(xx1.shape)
   plt.contourf(xx1, xx2, Z, alpha=0.4, cmap=cmap)
   plt.xlim(xx1.min(), xx1.max())
   plt.ylim(xx2.min(), xx2.max())

   # plot class samples
   for idx, cl in enumerate(np.unique(y)):
      plt.scatter(x=X[y == cl, 0], y=X[y == cl, 1],
      alpha=0.8, c=cmap(idx),
      marker=markers[idx], label=cl)

df = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data', header=None)

y = df.iloc[0:100, 4].values
y = np.where(y == 'Iris-setosa', -1, 1)
X = df.iloc[0:100, [0, 2]].values

# standardize
X_std = np.copy(X)
X_std[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X_std[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()

sgd1 = SGD(niter=100, rate=0.01, random_state=1)
sgd2 = SGD(niter=50, rate=0.01, random_state=1)
sgd3 = SGD(niter=10, rate=0.01, random_state=1)

sgd1.fit(X_std, y)
sgd2.fit(X_std, y)
sgd3.fit(X_std, y)

plt.plot(range(1, len(sgd1.cost) + 1), sgd1.cost, 
         marker='o', linestyle='oo', label='batch=1')
plt.plot(range(1, len(sgd2.cost_) + 1), np.array(sgd2.cost_) / len(y_train), 
         marker='o', linestyle='--', label='batch=2')
plt.plot(range(1, len(sgd3.cost_) + 1), np.array(sgd3.cost_) / len(y_train), 
         marker='o', linestyle='xx', label='batch=3')

plt.xlabel('Epochs')
plt.ylabel('Average Cost')
plt.show()








 

 

Adaline Stochastic gradient descent.pdf

Python Stochastic gradient descent.py

Archived

This topic is now archived and is closed to further replies.

Important Information

We have placed cookies on your device to help make this website better. You can adjust your cookie settings, otherwise we'll assume you're okay to continue.

Configure browser push notifications

Chrome (Android)
  1. Tap the lock icon next to the address bar.
  2. Tap Permissions → Notifications.
  3. Adjust your preference.
Chrome (Desktop)
  1. Click the padlock icon in the address bar.
  2. Select Site settings.
  3. Find Notifications and adjust your preference.