From 7b43ffedfcdb7547e82b7d50ddfd22d6da7949ea Mon Sep 17 00:00:00 2001
From: "juliano.souzaluz" <juliano.souza-luz@etu.hesge.ch>
Date: Wed, 15 Nov 2023 01:17:15 +0100
Subject: [PATCH] update code (pas fini)

---
 .idea/misc.xml          |  5 ++++-
 .idea/tp-clustering.iml |  2 +-
 decisiontree-iris.py    |  1 -
 decisiontree-student.py |  1 -
 perceptron-tp3.py       | 39 +++++++++++++++++++++++----------------
 perceptron.py           |  2 +-
 6 files changed, 29 insertions(+), 21 deletions(-)

diff --git a/.idea/misc.xml b/.idea/misc.xml
index 12ebf45..372062d 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -1,7 +1,10 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <project version="4">
+  <component name="Black">
+    <option name="sdkName" value="Python 3.9" />
+  </component>
   <component name="MarkdownSettingsMigration">
     <option name="stateVersion" value="1" />
   </component>
-  <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10.12 WSL (Ubuntu-20.04): (/home/thib/.virtualenvs/tp-clustering/bin/python)" project-jdk-type="Python SDK" />
+  <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9" project-jdk-type="Python SDK" />
 </project>
\ No newline at end of file
diff --git a/.idea/tp-clustering.iml b/.idea/tp-clustering.iml
index df7c332..74d515a 100644
--- a/.idea/tp-clustering.iml
+++ b/.idea/tp-clustering.iml
@@ -4,7 +4,7 @@
     <content url="file://$MODULE_DIR$">
       <excludeFolder url="file://$MODULE_DIR$/venv" />
     </content>
-    <orderEntry type="jdk" jdkName="Python 3.10.12 WSL (Ubuntu-20.04): (/home/thib/.virtualenvs/tp-clustering/bin/python)" jdkType="Python SDK" />
+    <orderEntry type="inheritedJdk" />
     <orderEntry type="sourceFolder" forTests="false" />
   </component>
 </module>
\ No newline at end of file
diff --git a/decisiontree-iris.py b/decisiontree-iris.py
index 4fe15e3..c69fd0a 100644
--- a/decisiontree-iris.py
+++ b/decisiontree-iris.py
@@ -18,7 +18,6 @@ y_iris = df_iris['class']
 X_train_iris, X_test_iris, y_train_iris, y_test_iris = train_test_split(X_iris, y_iris, test_size=0.2, random_state=42)
 
 # Construction de l'arbre de décision avec des paramètres spécifiques
-# Vous pouvez jouer avec les valeurs de min_samples_leaf et max_depth
 clf_iris = DecisionTreeClassifier(min_samples_leaf=5, max_depth=3)
 clf_iris.fit(X_train_iris, y_train_iris)
 
diff --git a/decisiontree-student.py b/decisiontree-student.py
index c2eaa34..17afbba 100644
--- a/decisiontree-student.py
+++ b/decisiontree-student.py
@@ -18,7 +18,6 @@ y = df['success']
 X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)  # Vous pouvez ajuster la taille du test si nécessaire
 
 # Construction de l'arbre de décision avec des paramètres spécifiques
-# Vous pouvez jouer avec les valeurs de min_samples_leaf et max_depth
 clf = DecisionTreeClassifier(min_samples_leaf=5, max_depth=3)
 clf.fit(X_train, y_train)
 
diff --git a/perceptron-tp3.py b/perceptron-tp3.py
index 63f1717..da63e76 100644
--- a/perceptron-tp3.py
+++ b/perceptron-tp3.py
@@ -1,26 +1,31 @@
-# Author : Capt Thibault , Souza Luz Juliano
+# Author : Capt Thibault, Souza Luz Juliano
 # Date : 31.10.2023
-# Project : Perceptron
-# Description : Ce fichier représente notre travail pour le tp du perceptron
+# Project : Perceptron multicouche
+# Description : Ce fichier représente notre travail pour le tp du perceptron multicouche
 import numpy as np
 import pandas as pd
 from matplotlib import pyplot as plt
 
-def upd_weights_hidden(wi_old, learning, t, y, xi):
-    return wi_old + learning * (t - y) * y * (1 - y) * xi
+
+def upd_weights_hidden(wi_old, learning, delta, xi):
+    return wi_old + learning * np.outer(xi, delta)
+
 
 def upd_weights_output(wi_old, learning, delta, y):
     return wi_old + learning * delta * y
 
+
 def sigmoide(x):
     return 1 / (1 + np.exp(-x))
 
+
 def predict(poids_input_hidden, poids_hidden_output, input_data):
-    hidden_input = np.dot(poids_input_hidden, input_data)
+    hidden_input = np.dot(input_data, poids_input_hidden)
     hidden_output = sigmoide(hidden_input)
-    output = sigmoide(np.dot(poids_hidden_output, np.insert(hidden_output, 0, 1)))
+    output = sigmoide(np.dot(np.insert(hidden_output, 0, 1), poids_hidden_output))
     return output, hidden_output
 
+
 if __name__ == '__main__':
     dataset = pd.read_csv("Data/student-data-train.csv", header=0)
     dataset['norm_grade_1'] = (dataset['grade_1'] - dataset['grade_1'].mean()) / dataset['grade_1'].std()
@@ -32,6 +37,7 @@ if __name__ == '__main__':
 
     learning_rate = 1e-2
     max_iterations = 2000
+    num_hidden = 10
 
     # Initialize weights for input to hidden layer and hidden to output layer
     weights_input_hidden = np.random.rand(num_features + 1, num_hidden) - 0.5
@@ -48,13 +54,14 @@ if __name__ == '__main__':
             target = y[i]
 
             # Forward pass
-            hidden_output, output = predict(weights_input_hidden, weights_hidden_output, input_data)
+            output, hidden_output = predict(weights_input_hidden, weights_hidden_output, input_data)
 
             # Backpropagation
-            delta_output = (target - output) * output * (1 - output)
-            weights_hidden_output = upd_weights_output(weights_hidden_output, learning_rate, delta_output, np.insert(hidden_output, 0, 1))
+            delta_output = output * (1 - output) * (target - output)
+            weights_hidden_output = upd_weights_output(weights_hidden_output, learning_rate, delta_output,
+                                                       np.insert(hidden_output, 0, 1))
 
-            delta_hidden = hidden_output * (1 - hidden_output) * delta_output * weights_hidden_output[1:]
+            delta_hidden = hidden_output * (1 - hidden_output) * np.dot(delta_output, weights_hidden_output[1:])
             weights_input_hidden = upd_weights_hidden(weights_input_hidden, learning_rate, delta_hidden, input_data)
 
             total_error += (target - output) ** 2 / 2
@@ -65,17 +72,17 @@ if __name__ == '__main__':
     correct_classifications = 0
     for i in range(len(X)):
         input_data = np.insert(X[i], 0, 1)
-        cible = y[i]
-        sortie = sigmoide(np.dot(poids, input_data))
-        pred = 1 if sortie >= 0.5 else 0
-        if pred == cible:
+        target = y[i]
+        output, _ = predict(weights_input_hidden, weights_hidden_output, input_data)
+        pred = 1 if output >= 0.5 else 0
+        if pred == target:
             correct_classifications += 1
 
     accuracy = correct_classifications / len(X)
     print(f"Taux de classifications correctes: {accuracy * 100}%")
 
     # Affichage de la droite de séparation des classes
-    w1, w2, b = poids[1], poids[2], poids[0]
+    w1, w2, b = weights_hidden_output[1], weights_hidden_output[2], weights_hidden_output[0]
     pente = -w1 / w2
     intercept = -b / w2
     print(f"Droite de séparation: y = {pente}x + {intercept}")
diff --git a/perceptron.py b/perceptron.py
index 63f9d30..55c3b9f 100644
--- a/perceptron.py
+++ b/perceptron.py
@@ -1,4 +1,4 @@
-# Author : Capt Thibault , Souza Luz Juliano
+# Author : Capt Thibault, Souza Luz Juliano
 # Date : 31.10.2023
 # Project : Perceptron
 # Description : Ce fichier représente notre travail pour le tp du perceptron
-- 
GitLab