2 step = 0 # If 0.5, we need no bias for AND and OR; if 0, none for NOT.
3 # With learning, the bias will slowly balance any choice.
11 perceptron['inputs'] = inputs[:]
12 for i in range(len(inputs)):
13 s += inputs[i] * perceptron['weights'][i]
14 return step(s + perceptron['bias'])
17 #training_set = [((0,), 0),
21 #training_set = [((0,), 1),
25 #training_set = [((0,0), 0),
31 #training_set = [((0,0), 0),
36 # NOT (with one irrelevant column)
37 #training_set = [((0,0), 1),
42 # XOR (will fail, as Minsky/Papert say)
43 #training_set = [((0,0), 0),
48 # 1 if above f(x)=x line, else 0
49 training_set = [((0,1), 1),
54 # 1 if above f(x)=x**2, else 0 (will fail: no linear separability)
55 #training_set = [((2,4), 0),
60 perceptron = {'weights': [0 for i in range(len(training_set[0][0]))],
67 for element in training_set:
70 result_ = result(inputs)
71 print("inputs %s target %s result %s correctness %5s weights %s bias %s" % (inputs, target, result_, target==result_, perceptron['weights'], perceptron['bias']))
74 perceptron['bias'] += adaption_size * (target - result_)
75 for i in range(len(perceptron['weights'])):
76 perceptron['weights'][i] += adaption_size * (target - result_) * perceptron['inputs'][i]
81 print('COULD NOT SOLVE.')