experimental

This commit is contained in:
Lukas Karras
2024-11-26 12:38:12 +01:00
parent c724954f78
commit 681b99f0d5
2 changed files with 16 additions and 7 deletions

View File

@@ -37,8 +37,8 @@ class NeuralNetwork:
correct_amount = 0
false_amount = 0
for i in range(len(training_set)):
fv_to_train = training_set # random.choice(training_set)
for i in range(len(training_set)*100):
fv_to_train = training_set[i%len(training_set)] # random.choice(training_set)
classified_concept, correct = self.classify(fv_to_train)
if not correct:
@@ -47,7 +47,7 @@ class NeuralNetwork:
else:
correct_amount += 1
print(f"{i}: {(correct_amount/(correct_amount + false_amount)) * 100}%")
#print(f"{i}: {(correct_amount/(correct_amount + false_amount)) * 100}%")
pass
# for a sequence of 3 neurons interpretate their meaning

View File

@@ -2,6 +2,7 @@ import os
import ast
import csv
import numpy as np
import multiprocessing
from classes.concept import Concept
from classes.feature_vector import FeatureVector
@@ -96,10 +97,18 @@ def neural_network(training = os.path.abspath(os.path.join(__file__, "..", "trai
cnt_correct += 1 if fv.concept == classified_concept else 0
# print(f"{fv.concept} was classified as {classified_concept}")
print(f"classified {cnt_correct}/{cnt_data} correctly ({round(cnt_correct/cnt_data*100, 3)}%)")
#print(f"classified {cnt_correct}/{cnt_data} correctly ({round(cnt_correct/cnt_data*100, 3)}%)")
return [cnt_correct, cnt_data - cnt_correct, cnt_data] # richtig, falsch, amount of testing data
def repeat_n_times(n: int, num_training_vectors: int):
for _ in range(n):
cnt_correct = 0
cnt_data = 0
result = neural_network(num_training_vectors=num_training_vectors)
cnt_correct += result[0]
cnt_data += result[2]
print(f"{num_training_vectors}: classified {cnt_correct}/{cnt_data} correctly ({round(cnt_correct/cnt_data*100, 3)}%)")
if __name__ == "__main__":
for i in range(100, 10000, 100):
print(f"{i} training vectors: ", end="")
neural_network()
for num_training_vectors in range(100, 1000, 100):
multiprocessing.Process(target=repeat_n_times, args=(100, num_training_vectors, )).start()