From 2a404a36218a61c33974be6cd9a8e4ac62feec03 Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Sat, 9 Nov 2024 15:24:38 +0700 Subject: [PATCH 1/8] Update C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 60 +++++++++++++++---- 1 file changed, 47 insertions(+), 13 deletions(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index 8c7d1557..f10bf4a1 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -14,13 +14,38 @@ function getModel() { // many layers, filters, and neurons as you like. // HINT: Take a look at the MNIST example. model = tf.sequential(); - - // YOUR CODE HERE - - + model.add(tf.layers.conv2d({ + inputShape: [28, 28, 1], + filters: 32, + kernelSize: 3, + activation: 'relu' + })); + model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 })); + + model.add(tf.layers.conv2d({ + filters: 64, + kernelSize: 3, + activation: 'relu' + })); + model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 })); + + model.add(tf.layers.flatten()); + model.add(tf.layers.dense({ + units: 128, + activation: 'relu' + })); + model.add(tf.layers.dense({ + units: 10, + activation: 'softmax' + })); + // Compile the model using the categoricalCrossentropy loss, // the tf.train.adam() optimizer, and `acc` for your metrics. - model.compile(// YOUR CODE HERE); + model.compile({ + loss: 'categoricalCrossentropy', + optimizer: tf.train.adam(), + metrics: ['accuracy'] + }); return model; } @@ -28,13 +53,15 @@ function getModel() { async function train(model, data) { // Set the following metrics for the callback: 'loss', 'val_loss', 'acc', 'val_acc'. - const metrics = // YOUR CODE HERE + const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; // Create the container for the callback. Set the name to 'Model Training' and // use a height of 1000px for the styles. - const container = // YOUR CODE HERE - + const container = { + name: 'Model Training', + styles: { height: '1000px' } + }; // Use tfvis.show.fitCallbacks() to setup the callbacks. // Use the container and metrics defined above as the parameters. @@ -47,14 +74,21 @@ async function train(model, data) { // Get the training batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. - const [trainXs, trainYs] = // YOUR CODE HERE - - + const [trainXs, trainYs] = await tf.tidy(() => { + const d = data.getTrainData(); + const xs = d.xs.reshape([TRAIN_DATA_SIZE, 28, 28, 1]); + const ys = d.ys; + return [xs, ys]; + }); // Get the testing batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. - const [testXs, testYs] = // YOUR CODE HERE - + const [testXs, testYs] = await tf.tidy(() => { + const d = data.getTestData(); + const xs = d.xs.reshape([TEST_DATA_SIZE, 28, 28, 1]); + const ys = d.ys; + return [xs, ys]; + }); return model.fit(trainXs, trainYs, { batchSize: BATCH_SIZE, From 3ae132ab2df8c188ed82bc49cfdcf899a41f4592 Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Sat, 9 Nov 2024 15:45:04 +0700 Subject: [PATCH 2/8] Update C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 45 +++++-------------- 1 file changed, 12 insertions(+), 33 deletions(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index f10bf4a1..c0e3d3bd 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -14,37 +14,20 @@ function getModel() { // many layers, filters, and neurons as you like. // HINT: Take a look at the MNIST example. model = tf.sequential(); - model.add(tf.layers.conv2d({ - inputShape: [28, 28, 1], - filters: 32, - kernelSize: 3, - activation: 'relu' - })); + model.add(tf.layers.conv2d({ inputShape: [28, 28, 1], filters: 32, kernelSize: 3, activation: 'relu' })); model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 })); - - model.add(tf.layers.conv2d({ - filters: 64, - kernelSize: 3, - activation: 'relu' - })); + model.add(tf.layers.conv2d({ filters: 64, kernelSize: 3, activation: 'relu' })); model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 })); - model.add(tf.layers.flatten()); - model.add(tf.layers.dense({ - units: 128, - activation: 'relu' - })); - model.add(tf.layers.dense({ - units: 10, - activation: 'softmax' - })); + model.add(tf.layers.dense({ units: 128, activation: 'relu' })); + model.add(tf.layers.dense({ units: 10, activation: 'softmax' })); // Compile the model using the categoricalCrossentropy loss, // the tf.train.adam() optimizer, and `acc` for your metrics. model.compile({ loss: 'categoricalCrossentropy', optimizer: tf.train.adam(), - metrics: ['accuracy'] + metrics: ['accuracy'] }); }); return model; @@ -65,7 +48,7 @@ async function train(model, data) { // Use tfvis.show.fitCallbacks() to setup the callbacks. // Use the container and metrics defined above as the parameters. - const fitCallbacks = // YOUR CODE HERE + const fitCallbacks = tfvis.show.fitCallbacks(container, metrics); const BATCH_SIZE = 512; const TRAIN_DATA_SIZE = 6000; @@ -74,20 +57,16 @@ async function train(model, data) { // Get the training batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. - const [trainXs, trainYs] = await tf.tidy(() => { - const d = data.getTrainData(); - const xs = d.xs.reshape([TRAIN_DATA_SIZE, 28, 28, 1]); - const ys = d.ys; - return [xs, ys]; + const [trainXs, trainYs] = tf.tidy(() => { + const d = data.getTrainData(); + return [d.xs.reshape([TRAIN_DATA_SIZE, 28, 28, 1]), d.ys]; }); // Get the testing batches and resize them. Remember to put your code // inside a tf.tidy() clause to clean up all the intermediate tensors. // HINT: Take a look at the MNIST example. - const [testXs, testYs] = await tf.tidy(() => { - const d = data.getTestData(); - const xs = d.xs.reshape([TEST_DATA_SIZE, 28, 28, 1]); - const ys = d.ys; - return [xs, ys]; + const [testXs, testYs] = tf.tidy(() => { + const d = data.getTestData(); + return [d.xs.reshape([TEST_DATA_SIZE, 28, 28, 1]), d.ys]; }); return model.fit(trainXs, trainYs, { From eaa0b0ab29537ceab56430d9f3b6fb51bbddb2cd Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Sat, 9 Nov 2024 15:51:45 +0700 Subject: [PATCH 3/8] Update C1_W2_Assignment.js --- C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index c0e3d3bd..2ff167cf 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -27,7 +27,7 @@ function getModel() { model.compile({ loss: 'categoricalCrossentropy', optimizer: tf.train.adam(), - metrics: ['accuracy'] }); + metrics: ['accuracy'] }); return model; From 3f276601635de6221215b297d8ea87d6d1579ba8 Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Sat, 9 Nov 2024 16:37:16 +0700 Subject: [PATCH 4/8] Update fashion-mnist.html --- C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html | 1 + 1 file changed, 1 insertion(+) diff --git a/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html b/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html index 52f6254a..efe696d8 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html +++ b/C1_Browser-based-TF-JS/W2/assignment/fashion-mnist.html @@ -1,3 +1,4 @@ + From 6a9c26409316febf0d6953e16343d489ee631c4b Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Mon, 11 Nov 2024 10:44:27 +0700 Subject: [PATCH 5/8] Update C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 29 ------------------- 1 file changed, 29 deletions(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index 2ff167cf..8630b5dc 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -5,14 +5,6 @@ var rawImage; var model; function getModel() { - - // In the space below create a convolutional neural network that can classify the - // images of articles of clothing in the Fashion MNIST dataset. Your convolutional - // neural network should only use the following layers: conv2d, maxPooling2d, - // flatten, and dense. Since the Fashion MNIST has 10 classes, your output layer - // should have 10 units and a softmax activation function. You are free to use as - // many layers, filters, and neurons as you like. - // HINT: Take a look at the MNIST example. model = tf.sequential(); model.add(tf.layers.conv2d({ inputShape: [28, 28, 1], filters: 32, kernelSize: 3, activation: 'relu' })); model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 })); @@ -22,8 +14,6 @@ function getModel() { model.add(tf.layers.dense({ units: 128, activation: 'relu' })); model.add(tf.layers.dense({ units: 10, activation: 'softmax' })); - // Compile the model using the categoricalCrossentropy loss, - // the tf.train.adam() optimizer, and `acc` for your metrics. model.compile({ loss: 'categoricalCrossentropy', optimizer: tf.train.adam(), @@ -34,36 +24,21 @@ function getModel() { } async function train(model, data) { - - // Set the following metrics for the callback: 'loss', 'val_loss', 'acc', 'val_acc'. const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; - - - // Create the container for the callback. Set the name to 'Model Training' and - // use a height of 1000px for the styles. const container = { name: 'Model Training', styles: { height: '1000px' } }; - - // Use tfvis.show.fitCallbacks() to setup the callbacks. - // Use the container and metrics defined above as the parameters. const fitCallbacks = tfvis.show.fitCallbacks(container, metrics); const BATCH_SIZE = 512; const TRAIN_DATA_SIZE = 6000; const TEST_DATA_SIZE = 1000; - // Get the training batches and resize them. Remember to put your code - // inside a tf.tidy() clause to clean up all the intermediate tensors. - // HINT: Take a look at the MNIST example. const [trainXs, trainYs] = tf.tidy(() => { const d = data.getTrainData(); return [d.xs.reshape([TRAIN_DATA_SIZE, 28, 28, 1]), d.ys]; }); - // Get the testing batches and resize them. Remember to put your code - // inside a tf.tidy() clause to clean up all the intermediate tensors. - // HINT: Take a look at the MNIST example. const [testXs, testYs] = tf.tidy(() => { const d = data.getTestData(); return [d.xs.reshape([TEST_DATA_SIZE, 28, 28, 1]), d.ys]; @@ -113,7 +88,6 @@ function save() { "Dress", "Coat", "Sandal", "Shirt", "Sneaker", "Bag", "Ankle boot"]; - alert(classNames[pIndex]); } @@ -145,6 +119,3 @@ async function run() { } document.addEventListener('DOMContentLoaded', run); - - - From 44ca5477723d18156e53bde13ea953698ebd4ebc Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Mon, 11 Nov 2024 11:16:21 +0700 Subject: [PATCH 6/8] Update C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 109 +++++++++++------- 1 file changed, 67 insertions(+), 42 deletions(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index 8630b5dc..8ae1c834 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -1,49 +1,74 @@ -import {FMnistData} from './fashion-data.js'; +import { FMnistData } from './fashion-data.js'; + var canvas, ctx, saveButton, clearButton; -var pos = {x:0, y:0}; +var pos = { x: 0, y: 0 }; var rawImage; var model; function getModel() { + // Membuat model CNN model = tf.sequential(); - model.add(tf.layers.conv2d({ inputShape: [28, 28, 1], filters: 32, kernelSize: 3, activation: 'relu' })); - model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 })); - model.add(tf.layers.conv2d({ filters: 64, kernelSize: 3, activation: 'relu' })); - model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 })); + + // Menambahkan layer konvolusi, pooling, flatten, dan dense + model.add(tf.layers.conv2d({ + inputShape: [28, 28, 1], + filters: 32, + kernelSize: 3, + activation: 'relu' + })); + model.add(tf.layers.maxPooling2d({ poolSize: 2 })); + + model.add(tf.layers.conv2d({ + filters: 64, + kernelSize: 3, + activation: 'relu' + })); + model.add(tf.layers.maxPooling2d({ poolSize: 2 })); + model.add(tf.layers.flatten()); - model.add(tf.layers.dense({ units: 128, activation: 'relu' })); - model.add(tf.layers.dense({ units: 10, activation: 'softmax' })); - + model.add(tf.layers.dense({ + units: 128, + activation: 'relu' + })); + model.add(tf.layers.dense({ + units: 10, + activation: 'softmax' // Output layer untuk 10 kelas + })); + + // Mengompilasi model model.compile({ - loss: 'categoricalCrossentropy', optimizer: tf.train.adam(), + loss: 'categoricalCrossentropy', metrics: ['accuracy'] }); - + return model; } async function train(model, data) { - const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; - const container = { - name: 'Model Training', - styles: { height: '1000px' } - }; + // Set metrics untuk callback + const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; + + // Membuat container untuk callback + const container = document.getElementById('fitCallbacksContainer'); const fitCallbacks = tfvis.show.fitCallbacks(container, metrics); - + const BATCH_SIZE = 512; const TRAIN_DATA_SIZE = 6000; const TEST_DATA_SIZE = 1000; - + + // Mendapatkan batch pelatihan dan mengubah ukuran const [trainXs, trainYs] = tf.tidy(() => { - const d = data.getTrainData(); + const d = data.getTrainData(); return [d.xs.reshape([TRAIN_DATA_SIZE, 28, 28, 1]), d.ys]; }); - const [testXs, testYs] = tf.tidy(() => { - const d = data.getTestData(); + + // Mendapatkan batch pengujian dan mengubah ukuran + const [testXs, testYs] = tf.tidy(() => { + const d = data.getTestData(); return [d.xs.reshape([TEST_DATA_SIZE, 28, 28, 1]), d.ys]; }); - + return model.fit(trainXs, trainYs, { batchSize: BATCH_SIZE, validationData: [testXs, testYs], @@ -53,13 +78,13 @@ async function train(model, data) { }); } -function setPosition(e){ - pos.x = e.clientX-100; - pos.y = e.clientY-100; +function setPosition(e) { + pos.x = e.clientX - 100; + pos.y = e.clientY - 100; } - + function draw(e) { - if(e.buttons!=1) return; + if (e.buttons != 1) return; ctx.beginPath(); ctx.lineWidth = 24; ctx.lineCap = 'round'; @@ -70,33 +95,33 @@ function draw(e) { ctx.stroke(); rawImage.src = canvas.toDataURL('image/png'); } - + function erase() { ctx.fillStyle = "black"; - ctx.fillRect(0,0,280,280); + ctx.fillRect(0, 0, 280, 280); } - + function save() { - var raw = tf.browser.fromPixels(rawImage,1); - var resized = tf.image.resizeBilinear(raw, [28,28]); + var raw = tf.browser.fromPixels(rawImage, 1); + var resized = tf.image.resizeBilinear(raw, [28, 28]); var tensor = resized.expandDims(0); - + var prediction = model.predict(tensor); var pIndex = tf.argMax(prediction, 1).dataSync(); - - var classNames = ["T-shirt/top", "Trouser", "Pullover", + + var classNames = ["T-shirt/top", "Trouser", "Pullover", "Dress", "Coat", "Sandal", "Shirt", - "Sneaker", "Bag", "Ankle boot"]; - + "Sneaker", "Bag", "Ankle boot"]; + alert(classNames[pIndex]); } - + function init() { canvas = document.getElementById('canvas'); rawImage = document.getElementById('canvasimg'); ctx = canvas.getContext("2d"); ctx.fillStyle = "black"; - ctx.fillRect(0,0,280,280); + ctx.fillRect(0, 0, 280, 280); canvas.addEventListener("mousemove", draw); canvas.addEventListener("mousedown", setPosition); canvas.addEventListener("mouseenter", setPosition); @@ -106,16 +131,16 @@ function init() { clearButton.addEventListener("click", erase); } - async function run() { const data = new FMnistData(); await data.load(); - const model = getModel(); - tfvis.show.modelSummary({name: 'Model Architecture'}, model); + model = getModel(); + tfvis.show.modelSummary({ name: 'Model Architecture' }, model); await train(model, data); await model.save('downloads://my_model'); init(); alert("Training is done, try classifying your drawings!"); } +// Menjalankan fungsi run setelah DOM siap document.addEventListener('DOMContentLoaded', run); From fdc7ac0eba02d7e32e736b8c4e40186af4af0963 Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Mon, 11 Nov 2024 11:26:50 +0700 Subject: [PATCH 7/8] Update C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js index 8ae1c834..e7a9c684 100755 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js @@ -54,19 +54,19 @@ async function train(model, data) { const fitCallbacks = tfvis.show.fitCallbacks(container, metrics); const BATCH_SIZE = 512; - const TRAIN_DATA_SIZE = 6000; - const TEST_DATA_SIZE = 1000; + const TRAIN_DATA_SIZE = 60000; // Ukuran data pelatihan + const TEST_DATA_SIZE = 10000; // Ukuran data pengujian // Mendapatkan batch pelatihan dan mengubah ukuran const [trainXs, trainYs] = tf.tidy(() => { const d = data.getTrainData(); - return [d.xs.reshape([TRAIN_DATA_SIZE, 28, 28, 1]), d.ys]; + return [d.xs, d.ys]; // Tidak perlu reshape jika sudah dalam bentuk tensor }); // Mendapatkan batch pengujian dan mengubah ukuran const [testXs, testYs] = tf.tidy(() => { const d = data.getTestData(); - return [d.xs.reshape([TEST_DATA_SIZE, 28, 28, 1]), d.ys]; + return [d.xs, d.ys]; // Tidak perlu reshape jika sudah dalam bentuk tensor }); return model.fit(trainXs, trainYs, { @@ -79,12 +79,12 @@ async function train(model, data) { } function setPosition(e) { - pos.x = e.clientX - 100; - pos.y = e.clientY - 100; + pos.x = e.clientX - canvas.offsetLeft; // Menghitung posisi dengan benar + pos.y = e.clientY - canvas.offsetTop; // Menghitung posisi dengan benar } function draw(e) { - if (e.buttons != 1) return; + if (e.buttons !== 1) return; // Menggunakan strict equality ctx.beginPath(); ctx.lineWidth = 24; ctx.lineCap = 'round'; @@ -98,16 +98,16 @@ function draw(e) { function erase() { ctx.fillStyle = "black"; - ctx.fillRect(0, 0, 280, 280); + ctx.fillRect(0, 0, canvas.width, canvas.height); // Menggunakan ukuran kanvas } -function save() { - var raw = tf.browser.fromPixels(rawImage, 1); +async function save() { + var raw = tf.browser.fromPixels(rawImage); var resized = tf.image.resizeBilinear(raw, [28, 28]); - var tensor = resized.expandDims(0); + var tensor = resized.expandDims(0).toFloat().div(tf.scalar(255)); // Normalisasi var prediction = model.predict(tensor); - var pIndex = tf.argMax(prediction, 1).dataSync(); + var pIndex = tf.argMax(prediction, 1).dataSync()[0]; // Ambil indeks pertama var classNames = ["T-shirt/top", "Trouser", "Pullover", "Dress", "Coat", "Sandal", "Shirt", @@ -119,9 +119,9 @@ function save() { function init() { canvas = document.getElementById('canvas'); rawImage = document.getElementById('canvasimg'); - ctx = canvas.getContext("2d"); + ctx = canvas.getContext("2d", { willReadFrequently: true }); // Menambahkan opsi ini ctx.fillStyle = "black"; - ctx.fillRect(0, 0, 280, 280); + ctx.fillRect(0, 0, canvas.width, canvas.height); // Menggunakan ukuran kanvas canvas.addEventListener("mousemove", draw); canvas.addEventListener("mousedown", setPosition); canvas.addEventListener("mouseenter", setPosition); From e187770a69b7d601105be8949892f068040690a2 Mon Sep 17 00:00:00 2001 From: sulisindriyani Date: Mon, 11 Nov 2024 11:51:01 +0700 Subject: [PATCH 8/8] Delete C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js --- .../W2/assignment/C1_W2_Assignment.js | 146 ------------------ 1 file changed, 146 deletions(-) delete mode 100755 C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js diff --git a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js b/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js deleted file mode 100755 index e7a9c684..00000000 --- a/C1_Browser-based-TF-JS/W2/assignment/C1_W2_Assignment.js +++ /dev/null @@ -1,146 +0,0 @@ -import { FMnistData } from './fashion-data.js'; - -var canvas, ctx, saveButton, clearButton; -var pos = { x: 0, y: 0 }; -var rawImage; -var model; - -function getModel() { - // Membuat model CNN - model = tf.sequential(); - - // Menambahkan layer konvolusi, pooling, flatten, dan dense - model.add(tf.layers.conv2d({ - inputShape: [28, 28, 1], - filters: 32, - kernelSize: 3, - activation: 'relu' - })); - model.add(tf.layers.maxPooling2d({ poolSize: 2 })); - - model.add(tf.layers.conv2d({ - filters: 64, - kernelSize: 3, - activation: 'relu' - })); - model.add(tf.layers.maxPooling2d({ poolSize: 2 })); - - model.add(tf.layers.flatten()); - model.add(tf.layers.dense({ - units: 128, - activation: 'relu' - })); - model.add(tf.layers.dense({ - units: 10, - activation: 'softmax' // Output layer untuk 10 kelas - })); - - // Mengompilasi model - model.compile({ - optimizer: tf.train.adam(), - loss: 'categoricalCrossentropy', - metrics: ['accuracy'] - }); - - return model; -} - -async function train(model, data) { - // Set metrics untuk callback - const metrics = ['loss', 'val_loss', 'acc', 'val_acc']; - - // Membuat container untuk callback - const container = document.getElementById('fitCallbacksContainer'); - const fitCallbacks = tfvis.show.fitCallbacks(container, metrics); - - const BATCH_SIZE = 512; - const TRAIN_DATA_SIZE = 60000; // Ukuran data pelatihan - const TEST_DATA_SIZE = 10000; // Ukuran data pengujian - - // Mendapatkan batch pelatihan dan mengubah ukuran - const [trainXs, trainYs] = tf.tidy(() => { - const d = data.getTrainData(); - return [d.xs, d.ys]; // Tidak perlu reshape jika sudah dalam bentuk tensor - }); - - // Mendapatkan batch pengujian dan mengubah ukuran - const [testXs, testYs] = tf.tidy(() => { - const d = data.getTestData(); - return [d.xs, d.ys]; // Tidak perlu reshape jika sudah dalam bentuk tensor - }); - - return model.fit(trainXs, trainYs, { - batchSize: BATCH_SIZE, - validationData: [testXs, testYs], - epochs: 10, - shuffle: true, - callbacks: fitCallbacks - }); -} - -function setPosition(e) { - pos.x = e.clientX - canvas.offsetLeft; // Menghitung posisi dengan benar - pos.y = e.clientY - canvas.offsetTop; // Menghitung posisi dengan benar -} - -function draw(e) { - if (e.buttons !== 1) return; // Menggunakan strict equality - ctx.beginPath(); - ctx.lineWidth = 24; - ctx.lineCap = 'round'; - ctx.strokeStyle = 'white'; - ctx.moveTo(pos.x, pos.y); - setPosition(e); - ctx.lineTo(pos.x, pos.y); - ctx.stroke(); - rawImage.src = canvas.toDataURL('image/png'); -} - -function erase() { - ctx.fillStyle = "black"; - ctx.fillRect(0, 0, canvas.width, canvas.height); // Menggunakan ukuran kanvas -} - -async function save() { - var raw = tf.browser.fromPixels(rawImage); - var resized = tf.image.resizeBilinear(raw, [28, 28]); - var tensor = resized.expandDims(0).toFloat().div(tf.scalar(255)); // Normalisasi - - var prediction = model.predict(tensor); - var pIndex = tf.argMax(prediction, 1).dataSync()[0]; // Ambil indeks pertama - - var classNames = ["T-shirt/top", "Trouser", "Pullover", - "Dress", "Coat", "Sandal", "Shirt", - "Sneaker", "Bag", "Ankle boot"]; - - alert(classNames[pIndex]); -} - -function init() { - canvas = document.getElementById('canvas'); - rawImage = document.getElementById('canvasimg'); - ctx = canvas.getContext("2d", { willReadFrequently: true }); // Menambahkan opsi ini - ctx.fillStyle = "black"; - ctx.fillRect(0, 0, canvas.width, canvas.height); // Menggunakan ukuran kanvas - canvas.addEventListener("mousemove", draw); - canvas.addEventListener("mousedown", setPosition); - canvas.addEventListener("mouseenter", setPosition); - saveButton = document.getElementById('sb'); - saveButton.addEventListener("click", save); - clearButton = document.getElementById('cb'); - clearButton.addEventListener("click", erase); -} - -async function run() { - const data = new FMnistData(); - await data.load(); - model = getModel(); - tfvis.show.modelSummary({ name: 'Model Architecture' }, model); - await train(model, data); - await model.save('downloads://my_model'); - init(); - alert("Training is done, try classifying your drawings!"); -} - -// Menjalankan fungsi run setelah DOM siap -document.addEventListener('DOMContentLoaded', run);