Single Layer Perceptron Implementation for AND gate
06 May, 2023 Bedram Tamang

I implemented a single layer perceptron (SLP) that utilizes a threshold transfer function and operates as a feed-forward network. The SLP is the most basic form of artificial neural networks and is only capable of classifying cases that are linearly separable and have a binary target of 1 or 0. Specifically, I trained the SLP to recognize and classify the AND Gate.

# Perceptron.py
import numpy as np

class Perceptron(object):

    def __init__(self, no_of_inputs, threshold=100, learning_rate=0.01):
        self.threshold = threshold
        self.learning_rate = learning_rate
        self.weights = np.zeros(no_of_inputs + 1)
           
    def predict(self, inputs):
        summation = np.dot(inputs, self.weights[1:]) + self.weights[0]
        if summation > 0:
          activation = 1
        else:
          activation = 0            
        return activation

    def train(self, training_inputs, labels):
        for _ in range(self.threshold):
            for inputs, label in zip(training_inputs, labels):
                prediction = self.predict(inputs)
                self.weights[1:] += self.learning_rate * (label - prediction) * inputs
                self.weights[0] += self.learning_rate * (label - prediction)

#app.py
import numpy as np
from Perceptron import Perceptron

training_inputs = []
training_inputs.append(np.array([1, 1]))
training_inputs.append(np.array([1, 0]))
training_inputs.append(np.array([0, 1]))
training_inputs.append(np.array([0, 0]))

labels = np.array([1, 0, 0, 0])

perceptron = Perceptron(2)
perceptron.train(training_inputs, labels)

inputs = np.array([1, 1])
perceptron.predict(inputs) 
#=> 1

inputs = np.array([0, 0])
print("inputs "+str(inputs)+" Output "+str(perceptron.predict(inputs)))

inputs = np.array([0, 1])
print("inputs "+str(inputs)+" Output "+str(perceptron.predict(inputs)))

inputs = np.array([1, 1])
print("inputs "+str(inputs)+" Output "+str(perceptron.predict(inputs)))


Profile Image

© 2024, All right reserved.