diff --git a/main.py b/main.py
index 3abc6c7..852af6c 100644
--- a/main.py
+++ b/main.py
@@ -25,7 +25,7 @@
saver = tf.train.Saver(variables)
saver.restore(sess, "wine_quality/data/softmax_regression.ckpt")
def simple(x1):
- return sess.run(y1, feed_dict={x: x1})
+ return sess.run(y1, feed_dict={x: x1}).flatten().tolist()
csrf = CsrfProtect()
@@ -48,11 +48,30 @@ def hello_world():
def test_parameters():
form = TestParameterForm(request.form)
if request.method == 'POST' and form.validate():
+ volatile_acidity = float(form.volatile_acidity.data)
+ citric_acid = float(form.citric_acid.data)
+ residual_sugar = float(form.residual_sugar.data)
+ chlorides = float(form.chlorides.data)
+ free_sulfur_dioxide = float(form.free_sulfur_dioxide.data)
+ total_sulfur_dioxide = float(form.total_sulfur_dioxide.data)
+ density = float(form.density.data)
+ ph = float(form.ph.data)
+ sulphates = float(form.sulphates.data)
+ alcohol = float(form.alcohol.data)
+ input_list = [volatile_acidity, citric_acid, residual_sugar, chlorides, free_sulfur_dioxide,
+ total_sulfur_dioxide, density, ph, sulphates, alcohol]
print(form.__dict__)
+ # VALUES TO USE: [0.7, 0, 1.9, 0.076, 11, 34, 0.99780, 3.51, 0.56, 9.4]
# simple([[0.7, 0, 1.9, 0.076, 11, 34, 0.99780, 3.51, 0.56, 9.4]])
- results = simple([[0.7, 0, 1.9, 0.076, 11, 34, 0.99780, 3.51, 0.56, 9.4]])
- return render_template('test_parameters.html', form=form, result=results[0])
- return render_template('test_parameters.html', form=form)
+ # results = simple([[0.7, 0, 1.9, 0.076, 11, 34, 0.99780, 3.51, 0.56, 9.4]]) # Bad wine
+ # results = simple([[0.35, 0.46, 3.6, 0.078, 15, 37, 0.99730, 3.35, 0.86, 12.8]]) # Good wine
+ results = simple([input_list])
+ # return render_template('test_parameters.html', form=form, result=results)
+ print(results)
+ else:
+ results = None
+
+ return render_template('test_parameters.html', form=form, result=results)
@app.route('/train/', methods=('GET', 'POST'))
@@ -66,8 +85,10 @@ def upload():
print(form.__dict__)
# Save to Redis here
form.training_data.data.save('wine_quality/data/' + filename)
- dataframe = pd.read_csv('wine_quality/data/' + filename, sep=';')
- train_model(dataframe, learning_rate, batch_size)
+ dataframe = pd.read_csv('wine_quality/data/' + filename, sep=',')
+ model_log = train_model(dataframe, learning_rate, batch_size)
+ return render_template('test_data_upload.html', form=form, filename=filename, model_log=model_log,
+ learning_rate=learning_rate, batch_size=batch_size)
else:
filename = None
return render_template('test_data_upload.html', form=form, filename=filename)
diff --git a/templates/test_data_upload.html b/templates/test_data_upload.html
index c183e62..81097fb 100644
--- a/templates/test_data_upload.html
+++ b/templates/test_data_upload.html
@@ -1,19 +1,65 @@
-
+
+
+
+ Model Training
+
+
+
+
+
+
-
-
Input
-{% from "_formhelpers.html" import render_field %}
-
+
+
+
+
+
Model Training
+
Load a csv with your wine data set. Then choose a learning rate, batch size and output model name. Press submit
+ to train the model.
+
+
+
+
+
Input
+ {% from "_formhelpers.html" import render_field %}
+
+
+
+
+ {% if model_log %}
+
Model
+
Training Parameters
+
Learning rate = {{ learning_rate }}
+
Batch size = {{ batch_size}}
+
Model Training Log
+
+ {% for log_entry in model_log %}
+ {{ log_entry }}
+ {% endfor %}
+
+ {% endif %}
+
+
diff --git a/templates/test_parameters.html b/templates/test_parameters.html
index 448658d..ba983ae 100644
--- a/templates/test_parameters.html
+++ b/templates/test_parameters.html
@@ -1,29 +1,62 @@
-
+
+
Fit Model
+
+
+
+
+
+
-
-
Input
-{% from "_formhelpers.html" import render_field %}
-
-
-
-
Result
-
{{result}}
+
+
+
+
+
+
Wine Quality Tester
+
Enter the chemical properties for your wine below. Press submit to discover whether your wine is of good or
+ bad quality
+
+
+
+
+
Input
+ {% from "_formhelpers.html" import render_field %}
+
+
+
+
+ {% if result %}
+
Result
+
Prediction accuracy: {{ result[0] }}
+ {% endif %}
+
+
diff --git a/wine_quality/training.py b/wine_quality/training.py
index 3c46a49..77ac2fd 100644
--- a/wine_quality/training.py
+++ b/wine_quality/training.py
@@ -42,17 +42,15 @@ def train_model(training_df, learning_rate=0.001, batch_size=126):
bins = [3, 5, 8]
red_wine_newcats['category'] = pd.cut(red_wine_newcats.quality, bins, labels=['Bad', 'Good'], include_lowest=True)
-
y_red_wine = red_wine_newcats[['category']].get_values()
# Removing fixed_acidity and quality
X_red_wine = red_wine_newcats.iloc[:,1:-2].get_values()
y_red_wine_raveled = y_red_wine.ravel()
- y_red_wine_integers = [y.replace('Bad', '1') for y in y_red_wine_raveled]
- y_red_wine_integers = [y.replace('Good', '0') for y in y_red_wine_integers]
+ y_red_wine_integers = [y.replace('Bad', '0') for y in y_red_wine_raveled]
+ y_red_wine_integers = [y.replace('Good', '1') for y in y_red_wine_integers]
y_red_wine_integers = [np.int(y) for y in y_red_wine_integers]
-
y_one_hot = _dense_to_one_hot(y_red_wine_integers, num_classes=2)
X_train, X_test, y_train, y_test = train_test_split(X_red_wine, y_one_hot, test_size=0.2, random_state=42)
@@ -70,18 +68,39 @@ def train_model(training_df, learning_rate=0.001, batch_size=126):
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
saver = tf.train.Saver(variables)
- sess = tf.Session()
init = tf.initialize_all_variables()
- sess.run(init)
- for i in range(100):
- average_cost = 0
- number_of_batches = int(len(X_train) / batch_size)
- for start, end in zip(range(0, len(X_train), batch_size), range(batch_size, len(X_train), batch_size)):
- sess.run(optimizer, feed_dict={X: X_train[start:end], y_: y_train[start:end]})
- # Compute average loss
- average_cost += sess.run(cost, feed_dict={X: X_train[start:end], y_: y_train[start:end]}) / number_of_batches
- print(sess.run(accuracy, feed_dict={X: X_test, y_: y_test}))
+ with tf.Session() as sess:
+
+ sess.run(init)
+ log_list = [] # List to store logging of model progress
+ for i in range(100):
+ average_cost = 0
+ number_of_batches = int(len(X_train) / batch_size)
+ for start, end in zip(range(0, len(X_train), batch_size), range(batch_size, len(X_train), batch_size)):
+ sess.run(optimizer, feed_dict={X: X_train[start:end], y_: y_train[start:end]})
+ # Compute average loss
+ average_cost += sess.run(cost,
+ feed_dict={X: X_train[start:end],
+ y_: y_train[start:end]}) / number_of_batches
+ if i % 10 == 0:
+ print("Epoch:", '%04d' % (i + 1), "cost=", "{:.9f}".format(average_cost))
+ log_cost = "Epoch {:d}: cost = {:.9f}".format(i + 1, average_cost)
+ # print(log_cost)
+ log_list.append(log_cost)
+
+ print("Accuracy: {0}".format(sess.run(accuracy, feed_dict={X: X_test, y_: y_test})))
+ log_accuracy = "Accuracy: {0}".format(sess.run(accuracy, feed_dict={X: X_test, y_: y_test}))
+ # print(log_accuracy)
+ log_list.append(log_accuracy)
path = saver.save(sess, os.path.join(os.path.dirname(__file__), "data/softmax_regression.ckpt"))
print("Saved:", path)
+ # log_list.append("Saved: "+path)
+
+ print("")
+ print(log_list)
+
+ return log_list
+# df = pd.read_csv('data/winequality-red.csv', sep=',')
+# train_model(df)
\ No newline at end of file