Skip to content

Commit 3221853

Browse files
committed
Merge branch 'main' of https://github.com/rogierz/DC01_mlinapp into main
2 parents 248c038 + c3580be commit 3221853

File tree

8 files changed

+59
-46
lines changed

8 files changed

+59
-46
lines changed

TODO.md

+8-8
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
| Cose da fare | Data scadenza | Assegnatario | Note | Status (TODO/WIP/DONE) |
22
| :------------------------------------------- | :-----------: | :----------: | :--: | :--------------------: |
33
| Implementare GAN | | | | WIP |
4-
| Dividere il dataset in train/val/test | | | | TODO |
5-
| Rivedere le label | | | | TODO |
6-
| Creare environment su legion | | | | TODO |
7-
| Creare callback sui modelli | | | | DONE |
8-
| Definire preprocess del dataset | | | | TODO |
4+
| Implementare training | | | | WIP |
95
| Pipeline di data augmentation | | | | TODO |
10-
| Implementare training | | | | TODO |
11-
| Visualizzazione maschere training/validation | | | | TODO |
126
| Script per incollare maschere | | | | TODO |
13-
| Scrivere al prof per overleaf e repo | | | | TODO |
7+
| Definire preprocess del dataset | | | | TODO |
8+
| Rivedere le label | | | | **TODO!!!** |
149
| Esperimenti | | | | TODO |
1510
| Scrivere il report | | | | TODO |
11+
| Creare environment su legion | | | | DONE |
12+
| Creare callback sui modelli | | | | DONE |
13+
| Visualizzazione maschere training/validation | | | | DONE |
14+
| Scrivere al prof per overleaf e repo | | | | DONE |
15+
| Dividere il dataset in train/val/test | | | | DONE |

dataset/annotations/Image33.json

+4-4
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"flags": {},
44
"shapes": [
55
{
6-
"label": "Spatting",
6+
"label": "Spattering",
77
"points": [
88
[
99
881.9875776397515,
@@ -27,7 +27,7 @@
2727
"flags": {}
2828
},
2929
{
30-
"label": "Spatting",
30+
"label": "Spattering",
3131
"points": [
3232
[
3333
370.21276595744683,
@@ -51,7 +51,7 @@
5151
"flags": {}
5252
},
5353
{
54-
"label": "Spatting",
54+
"label": "Spattering",
5555
"points": [
5656
[
5757
308.51063829787233,
@@ -99,7 +99,7 @@
9999
"flags": {}
100100
},
101101
{
102-
"label": "Spatting",
102+
"label": "Spattering",
103103
"points": [
104104
[
105105
415.6756756756757,

dataset/annotations/Image34.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"flags": {},
44
"shapes": [
55
{
6-
"label": "Vertical defect",
6+
"label": "Vertical",
77
"points": [
88
[
99
346.1038961038961,

dataset/annotations/Image42.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@
107107
"flags": {}
108108
},
109109
{
110-
"label": "Vertical defect",
110+
"label": "Vertical",
111111
"points": [
112112
[
113113
358.45454545454544,

dataset/annotations/Image46.json

+17-17
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"flags": {},
44
"shapes": [
55
{
6-
"label": "Spatting",
6+
"label": "Spattering",
77
"points": [
88
[
99
795.9213250517597,
@@ -27,7 +27,7 @@
2727
"flags": {}
2828
},
2929
{
30-
"label": "Spatting",
30+
"label": "Spattering",
3131
"points": [
3232
[
3333
826.5010351966874,
@@ -51,7 +51,7 @@
5151
"flags": {}
5252
},
5353
{
54-
"label": "Spatting",
54+
"label": "Spattering",
5555
"points": [
5656
[
5757
860.1541292845641,
@@ -75,7 +75,7 @@
7575
"flags": {}
7676
},
7777
{
78-
"label": "Spatting",
78+
"label": "Spattering",
7979
"points": [
8080
[
8181
478.40579710144925,
@@ -99,7 +99,7 @@
9999
"flags": {}
100100
},
101101
{
102-
"label": "Spatting",
102+
"label": "Spattering",
103103
"points": [
104104
[
105105
444.9689440993788,
@@ -123,7 +123,7 @@
123123
"flags": {}
124124
},
125125
{
126-
"label": "Spatting",
126+
"label": "Spattering",
127127
"points": [
128128
[
129129
383.01127214170685,
@@ -147,7 +147,7 @@
147147
"flags": {}
148148
},
149149
{
150-
"label": "Spatting",
150+
"label": "Spattering",
151151
"points": [
152152
[
153153
418.7784679089026,
@@ -171,7 +171,7 @@
171171
"flags": {}
172172
},
173173
{
174-
"label": "Spatting",
174+
"label": "Spattering",
175175
"points": [
176176
[
177177
425.28079710144925,
@@ -195,7 +195,7 @@
195195
"flags": {}
196196
},
197197
{
198-
"label": "Spatting",
198+
"label": "Spattering",
199199
"points": [
200200
[
201201
396.70936853002064,
@@ -219,7 +219,7 @@
219219
"flags": {}
220220
},
221221
{
222-
"label": "Spatting",
222+
"label": "Spattering",
223223
"points": [
224224
[
225225
501.17365424430636,
@@ -243,7 +243,7 @@
243243
"flags": {}
244244
},
245245
{
246-
"label": "Spatting",
246+
"label": "Spattering",
247247
"points": [
248248
[
249249
328.89751552795025,
@@ -267,7 +267,7 @@
267267
"flags": {}
268268
},
269269
{
270-
"label": "Spatting",
270+
"label": "Spattering",
271271
"points": [
272272
[
273273
463.32199821876765,
@@ -291,7 +291,7 @@
291291
"flags": {}
292292
},
293293
{
294-
"label": "Spatting",
294+
"label": "Spattering",
295295
"points": [
296296
[
297297
517.5119423528458,
@@ -315,7 +315,7 @@
315315
"flags": {}
316316
},
317317
{
318-
"label": "Spatting",
318+
"label": "Spattering",
319319
"points": [
320320
[
321321
845.077720207254,
@@ -339,7 +339,7 @@
339339
"flags": {}
340340
},
341341
{
342-
"label": "Spatting",
342+
"label": "Spattering",
343343
"points": [
344344
[
345345
956.476683937824,
@@ -363,7 +363,7 @@
363363
"flags": {}
364364
},
365365
{
366-
"label": "Spatting",
366+
"label": "Spattering",
367367
"points": [
368368
[
369369
820.4301075268817,
@@ -387,7 +387,7 @@
387387
"flags": {}
388388
},
389389
{
390-
"label": "Spatting",
390+
"label": "Spattering",
391391
"points": [
392392
[
393393
696.996593868964,

src/datasets/dataset.py

+8-12
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def map_fn(file:str, save_defects=False):
3535
for shape in annotations['shapes']:
3636
# use only one label name
3737
if shape['label'].upper() == 'VERTICAL DEFECT':
38-
shape['label'] = 'VERTICAL'
38+
shape['label'] = 'VERTICAL'
3939
if shape['label'].upper() == 'SPATTING':
4040
shape['label'] = 'SPATTERING'
4141

@@ -57,7 +57,7 @@ def make_dataset(tuples):
5757
return tf.data.Dataset.from_tensor_slices((x, y))
5858

5959
class AMDdataset():
60-
'''Additive Manufactoring dataset class'''
60+
"""Additive Manufactoring dataset class"""
6161

6262
def __init__(self, path, image_shape=(1280, 1024, 3)):
6363
self.path = path
@@ -70,7 +70,6 @@ def build(self):
7070
if len(folders) != len(REQUIRED_FOLDERS):
7171
raise FileNotFoundError(f'Directory {self.path} does not contain correct folders. It must contains {REQUIRED_FOLDERS}')
7272

73-
# TODO split the dataset and load it into 3 tf.dataset: self.train, self.val, self.test
7473
train = []
7574
test = []
7675
val = []
@@ -80,19 +79,16 @@ def build(self):
8079
files = np.array(glob(os.path.join(self.path, f,'*.jpg')))
8180
if len(files) == 0:
8281
continue
83-
n = len(files) // 3
82+
n = len(files) // 9
8483
idx = np.random.permutation(np.arange(len(files)))
8584

86-
test.extend(files[idx[:n]])
87-
val.extend(files[idx[n:n*2]])
88-
train.extend(files[idx[n*2:]])
85+
test.extend(files[idx[:3*n]]) # 3/9
86+
val.extend(files[idx[3*n:5*n]]) # 2/9
87+
train.extend(files[idx[5*n:]]) # 4/9
8988

9089
train = [map_fn(x, True) for x in train]
91-
test = [map_fn(x) for x in test]
92-
val = [map_fn(x, True) for x in val]
93-
94-
# img, mask = train[0]
95-
# apply_mask_on_image(img, mask, "../visualize")
90+
test = [map_fn(x) for x in test]
91+
val = [map_fn(x, True) for x in val]
9692

9793
# build the datasets
9894
self.train = make_dataset(train)

src/network/gan.py

+19-2
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,17 @@ def __init__(
9999
super(GAN, self).__init__(name=name,**kwargs)
100100
self.generator = Generator()
101101
self.discriminator = Discriminator()
102+
self.optimizer = {}
103+
self.loss = {}
104+
105+
def compile(self, d_optimizer, g_optimizer, d_loss_fn, g_loss_fn=None):
106+
super(GAN, self).compile()
107+
self.optimizer['discriminator'] = d_optimizer
108+
self.optimizer['generator'] = g_optimizer
109+
self.loss['discriminator'] = d_loss_fn
110+
self.loss['generator'] = d_loss_fn
111+
if g_loss_fn is not None:
112+
self.loss['generator'] = g_loss_fn
102113

103114
def train_step(self, inputs):
104115
inputs_with_defects = inputs # TODO add function to insert defects
@@ -112,8 +123,12 @@ def train_step(self, inputs):
112123
y_fake = tf.zeros_like(y_fake_pred)
113124

114125
# (the loss function is configured in compile())
115-
loss_d = self.loss['discriminator'](tf.concat(y_true, y_fake), tf.concat(y_true_pred, y_fake_pred), regularization_losses=self.losses)
116-
loss_g = self.loss['generator'](y_true, y_fake_pred, regularization_losses=self.losses)
126+
y_all = tf.concat(y_true, y_fake)
127+
# Add random noise to the labels - important trick!
128+
# y_all += 0.05 * tf.random.uniform(tf.shape(labels))
129+
130+
loss_d = self.loss['discriminator'](y_all, tf.concat(y_true_pred, y_fake_pred))
131+
loss_g = self.loss['generator'](y_true, y_fake_pred)
117132
loss = tf.reduce_sum(loss_g, 0.5 * loss_d)
118133

119134
# compute gradients
@@ -128,6 +143,8 @@ def train_step(self, inputs):
128143
# compute metrics
129144
out = {m.name: m.result() for m in self.metrics}
130145
out['loss'] = loss
146+
out['d_loss'] = loss_d
147+
out['g_loss'] = loss_g
131148
return out
132149

133150
def call(self, inputs):

src/train.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
if __name__ == '__main__':
1919
# make deterministic
20-
np.random.seed(0)
20+
np.random.seed(42)
2121
tf.keras.utils.set_random_seed(1)
2222
tf.config.experimental.enable_op_determinism()
2323
# parsing the arguments

0 commit comments

Comments
 (0)