Skip to content
Snippets Groups Projects
Commit 9fd8fe0a authored by s183897's avatar s183897 :ice_skate:
Browse files

Added files for model loss plot, model accuracy plot and hyperparametertuning....

Added files for model loss plot, model accuracy plot and hyperparametertuning. added tuned neural network to main (neural network - prediction only).py
parent e0700a24
Branches
No related tags found
No related merge requests found
import matplotlib.pyplot as plt
from sklearn.datasets import *
from sklearn.neural_network import MLPClassifier
#X = [[0.729411764705882, 0.647058823529412, 0.945098039215686], [0.482352941176471, 0.443137254901961, 0.0862745098039216], [0.643137254901961, 0.525490196078431, 0.313725490196078], [0.462745098039216, 0.933333333333333, 0.501960784313725], [0.588235294117647, 0.168627450980392, 0.768627450980392], [0.854901960784314, 0.635294117647059, 0.462745098039216], [0.580392156862745, 0.588235294117647, 0.305882352941176], [0.0666666666666667, 0.717647058823529, 0.0901960784313725], [0.133333333333333, 0.627450980392157, 0.0862745098039216], [0.16078431372549, 0.415686274509804, 0.125490196078431], [0.219607843137255, 0.52156862745098, 0.431372549019608], [0.556862745098039, 0.525490196078431, 0.611764705882353], [0.227450980392157, 0.741176470588235, 0.286274509803922], [0.176470588235294, 0.376470588235294, 0.466666666666667], [0.752941176470588, 0.0705882352941176, 0.541176470588235], [0.572549019607843, 0.227450980392157, 0.588235294117647], [0.862745098039216, 0.549019607843137, 0.654901960784314], [0.945098039215686, 0.490196078431373, 0.274509803921569], [0.749019607843137, 0.556862745098039, 0.780392156862745], [0.654901960784314, 0.937254901960784, 0.796078431372549], [0.890196078431372, 0.525490196078431, 0.101960784313725], [0.768627450980392, 0.670588235294118, 0.2], [0.905882352941176, 0.443137254901961, 0.654901960784314], [0.43921568627451, 0.56078431372549, 0.0509803921568627], [0.235294117647059, 0.552941176470588, 0.717647058823529], [0.105882352941176, 0.858823529411765, 0.788235294117647], [0.341176470588235, 0.101960784313725, 0.0823529411764706], [0.780392156862745, 0.745098039215686, 0.180392156862745], [0.741176470588235, 0.392156862745098, 0.976470588235294], [0.517647058823529, 0.258823529411765, 1], [0.854901960784314, 0.580392156862745, 0.101960784313725], [0.858823529411765, 0.211764705882353, 0.737254901960784], [0.968627450980392, 0.2, 0.419607843137255], [0.698039215686274, 0.380392156862745, 0.517647058823529], [0.905882352941176, 0.0196078431372549, 0.988235294117647], [0.372549019607843, 0.419607843137255, 0.0470588235294118], [0.945098039215686, 0.635294117647059, 0.27843137254902], [0.47843137254902, 0.427450980392157, 0.729411764705882], [0.188235294117647, 0.976470588235294, 0.172549019607843], [0.309803921568627, 0.662745098039216, 0.56078431372549], [0.745098039215686, 0.0196078431372549, 0.215686274509804], [0.00784313725490196, 0.0313725490196078, 0.345098039215686], [0.341176470588235, 0.411764705882353, 0.850980392156863], [0.611764705882353, 0.254901960784314, 0.52156862745098], [0.266666666666667, 0.858823529411765, 0.925490196078431], [0.980392156862745, 0.647058823529412, 0.933333333333333], [0.447058823529412, 0.137254901960784, 0.635294117647059], [0.333333333333333, 0.603921568627451, 0.890196078431372], [0.670588235294118, 0.909803921568627, 0.462745098039216], [0.831372549019608, 0.16078431372549, 0.619607843137255], [0.192156862745098, 0.0784313725490196, 0.231372549019608], [0.713725490196078, 0.529411764705882, 0.976470588235294], [0.203921568627451, 0.749019607843137, 0.980392156862745], [0.87843137254902, 0.368627450980392, 0.356862745098039], [0.913725490196078, 0.952941176470588, 0.498039215686275], [0.0666666666666667, 0.929411764705882, 0.266666666666667], [0.784313725490196, 0.486274509803922, 0.152941176470588], [0.117647058823529, 0.741176470588235, 0.894117647058824], [0.776470588235294, 0.466666666666667, 0.533333333333333], [0.133333333333333, 0.254901960784314, 0.494117647058824], [0.635294117647059, 0.294117647058824, 0.882352941176471], [0.627450980392157, 0.470588235294118, 0.47843137254902], [0.592156862745098, 0.815686274509804, 0.145098039215686], [0.290196078431373, 0.333333333333333, 0.262745098039216], [0.0392156862745098, 0.552941176470588, 0.847058823529412], [0.725490196078431, 0.835294117647059, 0.164705882352941], [0.619607843137255, 0.882352941176471, 0.945098039215686], [0.36078431372549, 0.807843137254902, 0.0823529411764706], [0.498039215686275, 0.941176470588235, 0.509803921568627], [0.572549019607843, 0.733333333333333, 0.72156862745098], [0.0745098039215686, 0.113725490196078, 0.623529411764706], [0.533333333333333, 0.325490196078431, 0.00784313725490196], [0.752941176470588, 0.811764705882353, 0.917647058823529], [0.180392156862745, 0.2, 0.941176470588235], [0.27843137254902, 0.909803921568627, 0.156862745098039], [0.133333333333333, 0.650980392156863, 0.258823529411765], [0.250980392156863, 0.588235294117647, 0.435294117647059], [0.898039215686275, 0.729411764705882, 0.294117647058824], [0.631372549019608, 0.0392156862745098, 0.717647058823529], [0.564705882352941, 0.392156862745098, 0.541176470588235], [0.317647058823529, 0.596078431372549, 0.4], [0.133333333333333, 0.274509803921569, 0.474509803921569], [0.749019607843137, 0.164705882352941, 0.713725490196078], [0.305882352941176, 0.749019607843137, 0.533333333333333], [0.145098039215686, 0.647058823529412, 0.356862745098039], [0.925490196078431, 0.243137254901961, 0.972549019607843], [0.890196078431372, 0.286274509803922, 0.749019607843137], [0.227450980392157, 0.509803921568627, 0.929411764705882], [0.823529411764706, 0.686274509803922, 0.631372549019608], [0.0901960784313725, 0.125490196078431, 0.113725490196078], [0.423529411764706, 0.498039215686275, 0.266666666666667], [0.780392156862745, 0.682352941176471, 0.176470588235294], [0.00392156862745098, 0.682352941176471, 0.607843137254902], [0.725490196078431, 0.556862745098039, 0.215686274509804], [0.443137254901961, 0.0666666666666667, 0.823529411764706], [0.713725490196078, 0.980392156862745, 0.882352941176471], [0.988235294117647, 0.36078431372549, 0.83921568627451], [0.486274509803922, 0.792156862745098, 0.235294117647059], [0.482352941176471, 0.603921568627451, 0.447058823529412], [0.592156862745098, 0.152941176470588, 0.733333333333333]]
......@@ -17,3 +15,4 @@ def pred(color):
print("black")
else:
print("white")
This diff is collapsed.
......@@ -93,16 +93,19 @@ while True:
# convert RGB values to between 0 and 1
z = color/255
# training data (sample size: 384)
X = [[0.729411764705882, 0.647058823529412, 0.945098039215686], [0.482352941176471, 0.443137254901961, 0.0862745098039216], [0.643137254901961, 0.525490196078431, 0.313725490196078], [0.462745098039216, 0.933333333333333, 0.501960784313725], [0.588235294117647, 0.168627450980392, 0.768627450980392], [0.854901960784314, 0.635294117647059, 0.462745098039216], [0.580392156862745, 0.588235294117647, 0.305882352941176], [0.0666666666666667, 0.717647058823529, 0.0901960784313725], [0.133333333333333, 0.627450980392157, 0.0862745098039216], [0.16078431372549, 0.415686274509804, 0.125490196078431], [0.219607843137255, 0.52156862745098, 0.431372549019608], [0.556862745098039, 0.525490196078431, 0.611764705882353], [0.227450980392157, 0.741176470588235, 0.286274509803922], [0.176470588235294, 0.376470588235294, 0.466666666666667], [0.752941176470588, 0.0705882352941176, 0.541176470588235], [0.572549019607843, 0.227450980392157, 0.588235294117647], [0.862745098039216, 0.549019607843137, 0.654901960784314], [0.945098039215686, 0.490196078431373, 0.274509803921569], [0.749019607843137, 0.556862745098039, 0.780392156862745], [0.654901960784314, 0.937254901960784, 0.796078431372549], [0.890196078431372, 0.525490196078431, 0.101960784313725], [0.768627450980392, 0.670588235294118, 0.2], [0.905882352941176, 0.443137254901961, 0.654901960784314], [0.43921568627451, 0.56078431372549, 0.0509803921568627], [0.235294117647059, 0.552941176470588, 0.717647058823529], [0.105882352941176, 0.858823529411765, 0.788235294117647], [0.341176470588235, 0.101960784313725, 0.0823529411764706], [0.780392156862745, 0.745098039215686, 0.180392156862745], [0.741176470588235, 0.392156862745098, 0.976470588235294], [0.517647058823529, 0.258823529411765, 1], [0.854901960784314, 0.580392156862745, 0.101960784313725], [0.858823529411765, 0.211764705882353, 0.737254901960784], [0.968627450980392, 0.2, 0.419607843137255], [0.698039215686274, 0.380392156862745, 0.517647058823529], [0.905882352941176, 0.0196078431372549, 0.988235294117647], [0.372549019607843, 0.419607843137255, 0.0470588235294118], [0.945098039215686, 0.635294117647059, 0.27843137254902], [0.47843137254902, 0.427450980392157, 0.729411764705882], [0.188235294117647, 0.976470588235294, 0.172549019607843], [0.309803921568627, 0.662745098039216, 0.56078431372549], [0.745098039215686, 0.0196078431372549, 0.215686274509804], [0.00784313725490196, 0.0313725490196078, 0.345098039215686], [0.341176470588235, 0.411764705882353, 0.850980392156863], [0.611764705882353, 0.254901960784314, 0.52156862745098], [0.266666666666667, 0.858823529411765, 0.925490196078431], [0.980392156862745, 0.647058823529412, 0.933333333333333], [0.447058823529412, 0.137254901960784, 0.635294117647059], [0.333333333333333, 0.603921568627451, 0.890196078431372], [0.670588235294118, 0.909803921568627, 0.462745098039216], [0.831372549019608, 0.16078431372549, 0.619607843137255], [0.192156862745098, 0.0784313725490196, 0.231372549019608], [0.713725490196078, 0.529411764705882, 0.976470588235294], [0.203921568627451, 0.749019607843137, 0.980392156862745], [0.87843137254902, 0.368627450980392, 0.356862745098039], [0.913725490196078, 0.952941176470588, 0.498039215686275], [0.0666666666666667, 0.929411764705882, 0.266666666666667], [0.784313725490196, 0.486274509803922, 0.152941176470588], [0.117647058823529, 0.741176470588235, 0.894117647058824], [0.776470588235294, 0.466666666666667, 0.533333333333333], [0.133333333333333, 0.254901960784314, 0.494117647058824], [0.635294117647059, 0.294117647058824, 0.882352941176471], [0.627450980392157, 0.470588235294118, 0.47843137254902], [0.592156862745098, 0.815686274509804, 0.145098039215686], [0.290196078431373, 0.333333333333333, 0.262745098039216], [0.0392156862745098, 0.552941176470588, 0.847058823529412], [0.725490196078431, 0.835294117647059, 0.164705882352941], [0.619607843137255, 0.882352941176471, 0.945098039215686], [0.36078431372549, 0.807843137254902, 0.0823529411764706], [0.498039215686275, 0.941176470588235, 0.509803921568627], [0.572549019607843, 0.733333333333333, 0.72156862745098], [0.0745098039215686, 0.113725490196078, 0.623529411764706], [0.533333333333333, 0.325490196078431, 0.00784313725490196], [0.752941176470588, 0.811764705882353, 0.917647058823529], [0.180392156862745, 0.2, 0.941176470588235], [0.27843137254902, 0.909803921568627, 0.156862745098039], [0.133333333333333, 0.650980392156863, 0.258823529411765], [0.250980392156863, 0.588235294117647, 0.435294117647059], [0.898039215686275, 0.729411764705882, 0.294117647058824], [0.631372549019608, 0.0392156862745098, 0.717647058823529], [0.564705882352941, 0.392156862745098, 0.541176470588235], [0.317647058823529, 0.596078431372549, 0.4], [0.133333333333333, 0.274509803921569, 0.474509803921569], [0.749019607843137, 0.164705882352941, 0.713725490196078], [0.305882352941176, 0.749019607843137, 0.533333333333333], [0.145098039215686, 0.647058823529412, 0.356862745098039], [0.925490196078431, 0.243137254901961, 0.972549019607843], [0.890196078431372, 0.286274509803922, 0.749019607843137], [0.227450980392157, 0.509803921568627, 0.929411764705882], [0.823529411764706, 0.686274509803922, 0.631372549019608], [0.0901960784313725, 0.125490196078431, 0.113725490196078], [0.423529411764706, 0.498039215686275, 0.266666666666667], [0.780392156862745, 0.682352941176471, 0.176470588235294], [0.00392156862745098, 0.682352941176471, 0.607843137254902], [0.725490196078431, 0.556862745098039, 0.215686274509804], [0.443137254901961, 0.0666666666666667, 0.823529411764706], [0.713725490196078, 0.980392156862745, 0.882352941176471], [0.988235294117647, 0.36078431372549, 0.83921568627451], [0.486274509803922, 0.792156862745098, 0.235294117647059], [0.482352941176471, 0.603921568627451, 0.447058823529412], [0.592156862745098, 0.152941176470588, 0.733333333333333]]
X = [[0.729411764705882, 0.647058823529412, 0.945098039215686], [0.482352941176471, 0.443137254901961, 0.0862745098039216], [0.643137254901961, 0.525490196078431, 0.313725490196078], [0.462745098039216, 0.933333333333333, 0.501960784313725], [0.588235294117647, 0.168627450980392, 0.768627450980392], [0.854901960784314, 0.635294117647059, 0.462745098039216], [0.580392156862745, 0.588235294117647, 0.305882352941176], [0.0666666666666667, 0.717647058823529, 0.0901960784313725], [0.133333333333333, 0.627450980392157, 0.0862745098039216], [0.16078431372549, 0.415686274509804, 0.125490196078431]]
# data classes (0 = black, 1 = white)
Y = [0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1]
Y = [0, 1, 0, 0, 1, 0, 0, 0, 0, 1]
clf = MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(5, 2), random_state=1)
# non-tuned MLPClassifier:
# clf = MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(5, 2), random_state=1)
clf = MLPClassifier(activation="relu",hidden_layer_sizes=(7),alpha=1e-06,learning_rate="constant",random_state=1,max_iter=1000)
# fit model to training data and classes
clf.fit(X, Y)
# please deactivate prediction display to prevent placebo effect during experiment
if len(points) > 1:
if clf.predict(z.reshape(1,-1)) == 1:
display.blit(predPic, (width / 3 - 80 / 2, 200))
......
This diff is collapsed.
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment