文档服务地址:http://47.92.0.57:3000/ 周报索引地址:http://47.92.0.57:3000/s/NruNXRYmV

Commit 86d39706 by 王肇一

Cycle learning rate for unet

parent 515970c7
319_4
93_0
491_1
278_0
536_3
555_0
184_0
471_4
510_0
434_4
304_1
179_0
341_1
367_2
322_2
48_3
285_0
429_1
493_3
91_2
219_1
105_4
140_4
12_4
398_0
57_4
534_1
512_2
186_2
557_2
343_3
306_3
320_0
204_4
118_1
365_0
241_4
448_0
529_4
287_2
29_2
103_2
8_0
146_2
160_1
358_0
125_1
239_4
97_4
430_0
77_1
514_4
32_1
475_0
180_4
551_4
14_2
453_3
416_3
51_2
202_2
247_2
488_0
261_1
138_4
224_1
509_1
281_4
383_3
144_0
339_1
101_0
127_3
162_3
477_2
30_3
75_3
432_2
53_0
414_1
451_1
16_0
245_0
361_4
200_0
324_4
226_3
88_3
263_3
409_4
381_1
381_0
88_2
226_2
263_2
245_1
159_4
200_1
414_0
53_1
530_4
16_1
451_0
30_2
477_3
432_3
75_2
127_2
162_2
258_4
144_1
101_1
339_0
383_2
509_0
468_4
261_0
488_1
345_4
224_0
300_4
202_3
247_3
453_2
14_3
51_3
416_2
77_0
430_1
475_1
32_0
358_1
495_4
160_0
125_0
8_1
103_3
146_3
29_3
287_3
448_1
385_4
118_0
320_1
365_1
343_2
306_2
512_3
557_3
186_3
398_1
455_4
534_0
410_4
219_0
378_4
493_2
91_3
548_4
199_4
285_1
429_0
48_2
367_3
322_3
304_0
220_4
341_0
179_1
265_4
36_4
184_1
298_4
555_1
73_4
510_1
536_2
121_4
93_1
164_4
278_1
491_0
491_2
278_3
93_3
510_3
555_3
184_3
457_4
536_0
412_4
322_1
367_1
179_3
341_2
304_2
429_2
285_3
387_4
48_0
219_2
123_4
91_1
166_4
493_0
34_4
186_1
557_1
71_4
512_1
534_2
398_3
365_3
320_3
118_2
306_0
222_4
343_0
267_4
287_1
29_1
448_3
125_2
160_2
358_3
146_1
103_1
8_3
416_0
51_1
532_4
14_1
453_0
32_2
475_3
430_3
77_2
224_2
488_3
261_2
247_1
202_1
383_0
509_2
497_4
162_0
127_0
339_2
101_3
144_3
451_2
16_3
53_3
414_2
75_0
432_1
477_1
30_0
263_0
347_4
226_0
88_0
302_4
200_3
245_3
381_2
68_4
283_4
381_3
200_2
245_2
263_1
88_1
226_1
432_0
75_1
516_4
30_1
477_0
182_4
553_4
16_2
451_3
414_3
53_2
101_2
339_3
144_2
162_1
127_1
95_4
509_3
383_1
247_0
363_4
202_0
326_4
224_3
261_3
488_2
475_2
32_3
77_3
430_2
51_0
416_1
453_1
14_0
146_0
8_2
103_0
125_3
358_2
160_3
448_2
29_0
287_0
306_1
343_1
365_2
118_3
320_2
534_3
398_2
557_0
186_0
473_4
512_0
436_4
91_0
493_1
219_3
48_1
429_3
285_2
341_3
179_2
304_3
322_0
206_4
367_0
243_4
10_4
55_4
536_1
510_2
184_2
555_2
278_2
491_3
93_2
107_4
142_4
298_3
36_3
471_2
434_2
73_3
412_1
55_0
10_0
457_1
142_0
107_0
319_2
121_3
164_3
199_3
548_3
387_1
243_0
367_4
206_0
322_4
220_3
265_3
71_1
436_0
512_4
473_0
34_1
186_4
557_4
455_3
12_2
57_2
410_3
105_2
140_2
378_3
166_1
123_1
91_4
287_4
29_4
385_3
529_2
204_2
241_2
267_1
222_1
14_4
51_4
532_1
514_2
180_2
551_2
495_3
239_2
97_2
103_4
146_4
281_2
468_3
345_3
300_3
138_2
326_0
202_4
363_0
247_4
530_3
553_0
182_0
477_4
516_0
432_4
95_0
497_1
258_3
409_2
283_0
68_0
302_1
347_1
159_3
361_2
324_2
361_3
159_2
324_3
302_0
88_4
226_4
347_0
263_4
283_1
68_1
409_3
258_2
127_4
95_1
162_4
497_0
30_4
182_1
553_1
75_4
516_1
530_2
326_1
363_1
345_2
138_3
300_2
468_2
281_3
383_4
495_2
97_3
239_3
514_3
551_3
180_3
453_4
532_0
416_4
267_0
343_4
222_0
306_4
204_3
241_3
385_2
529_3
493_4
166_0
123_0
105_3
378_2
140_3
12_3
455_2
410_2
57_3
436_1
71_0
34_0
473_1
220_2
265_2
243_1
206_1
48_4
387_0
548_2
199_2
121_2
319_3
164_2
142_1
107_1
55_1
412_0
536_4
457_0
10_1
471_3
36_2
298_2
73_2
434_3
10_3
457_2
412_2
55_3
434_1
73_0
298_0
36_0
471_1
491_4
164_0
319_1
121_0
107_3
142_3
387_2
429_4
199_0
548_0
265_0
341_4
220_0
304_4
206_3
243_3
57_1
410_0
534_4
455_0
12_1
473_3
34_2
71_2
436_3
123_2
166_2
140_1
378_0
219_4
105_1
529_1
385_0
222_2
267_2
241_1
204_1
118_4
32_4
180_1
551_1
77_4
514_1
532_2
125_4
239_1
97_1
160_4
495_0
281_1
468_0
509_4
363_3
326_3
300_0
138_1
224_4
345_0
261_4
516_3
553_3
182_3
451_4
530_0
414_4
339_4
258_0
497_2
95_3
68_3
283_3
381_4
409_1
324_1
159_0
361_1
347_2
302_2
347_3
302_3
324_0
200_4
361_0
245_4
159_1
409_0
68_2
283_2
497_3
95_2
101_4
258_1
144_4
16_4
53_4
530_1
516_2
182_2
553_2
138_0
300_1
488_4
345_1
363_2
326_2
468_1
281_0
97_0
239_0
358_4
495_1
8_4
532_3
551_0
180_0
475_4
514_0
430_4
241_0
365_4
204_0
320_4
222_3
267_3
529_0
448_4
385_1
378_1
140_0
105_0
123_3
166_3
34_3
473_2
436_2
71_3
410_1
57_0
398_4
12_0
455_1
206_2
243_2
179_4
265_1
220_1
548_1
199_1
285_4
387_3
107_2
142_2
164_1
278_4
121_1
93_4
319_0
73_1
434_0
510_4
471_0
36_1
184_4
298_1
555_4
457_3
10_2
55_2
412_3
126_0
163_0
496_4
145_3
338_2
100_3
415_2
52_3
17_3
450_2
31_0
476_1
433_1
74_0
89_0
227_0
303_4
262_0
346_4
244_3
201_3
380_2
161_2
359_3
124_2
102_1
9_3
147_1
452_0
15_1
50_1
417_0
533_4
76_2
431_3
474_3
33_2
260_2
489_3
225_2
203_1
246_1
382_0
508_2
218_2
492_0
167_4
90_1
122_4
70_4
513_1
187_1
35_4
556_1
399_3
535_2
321_3
119_2
364_3
342_0
266_4
307_0
223_4
28_1
286_1
449_3
92_3
279_3
490_2
554_3
185_3
511_3
537_0
413_4
456_4
366_1
323_1
305_2
178_3
340_2
284_3
428_2
49_0
386_4
49_1
284_2
428_3
305_3
340_3
178_2
366_0
242_4
323_0
207_4
54_4
537_1
11_4
185_2
554_2
511_2
92_2
490_3
279_2
143_4
106_4
449_2
286_0
28_0
342_1
307_1
119_3
321_2
364_2
399_2
535_3
513_0
437_4
556_0
472_4
187_0
492_1
90_0
218_3
508_3
382_1
203_0
327_4
246_0
362_4
489_2
260_3
225_3
431_2
76_3
33_3
474_2
15_0
452_1
417_1
50_0
9_2
102_0
147_0
359_2
161_3
124_3
282_4
69_4
380_3
244_2
201_2
227_1
89_1
262_1
183_4
476_0
31_1
552_4
74_1
433_0
517_4
52_2
415_3
450_3
17_2
145_2
100_2
338_3
94_4
126_1
163_1
338_1
100_0
145_0
163_3
126_3
433_2
74_3
31_3
476_2
17_0
450_1
415_1
52_0
201_0
325_4
244_0
360_4
262_3
89_3
227_3
380_1
408_4
147_2
102_2
9_0
96_4
124_1
238_4
161_1
359_0
181_4
474_0
33_1
550_4
76_1
431_0
515_4
50_2
417_3
452_3
15_2
246_2
203_2
139_4
225_1
260_1
489_0
280_4
508_1
382_3
90_2
492_3
141_4
218_1
104_4
56_4
535_1
13_4
399_0
187_2
556_2
513_2
307_3
342_3
364_0
240_4
321_0
205_4
119_1
528_4
449_0
28_2
286_2
279_0
490_1
318_4
92_0
537_3
511_0
435_4
554_0
470_4
185_0
178_0
340_1
305_1
323_2
366_2
49_3
428_1
284_0
428_0
549_4
198_4
284_1
49_2
323_3
366_3
340_0
178_1
264_4
305_0
221_4
72_4
511_1
185_1
299_4
37_4
554_1
537_2
490_0
165_4
279_1
92_1
120_4
286_3
28_3
449_1
384_4
364_1
119_0
321_1
307_2
342_2
556_3
187_3
513_3
535_0
411_4
399_1
454_4
379_4
218_0
90_3
492_2
382_2
469_4
508_0
225_0
301_4
489_1
260_0
344_4
246_3
203_3
417_2
50_3
15_3
452_2
33_0
474_1
431_1
76_0
124_0
359_1
161_0
494_4
147_3
9_1
102_3
380_0
262_2
227_2
89_2
201_1
244_1
158_4
450_0
17_1
52_1
415_0
531_4
74_2
433_3
476_3
31_2
163_2
126_2
100_1
338_0
259_4
145_1
552_3
183_3
517_3
531_0
415_4
450_4
259_0
338_4
94_3
496_2
282_3
69_3
408_1
380_4
158_0
360_1
325_1
303_2
346_2
76_4
515_1
181_1
33_4
550_1
533_2
494_0
161_4
96_1
124_4
238_1
508_4
469_0
280_1
327_3
362_3
344_0
260_4
301_0
139_1
225_4
13_1
454_0
411_0
56_1
535_4
437_3
70_2
35_2
472_3
167_2
122_2
218_4
104_1
141_1
379_0
384_0
528_1
266_2
223_2
205_1
119_4
240_1
54_3
413_2
456_2
11_3
470_1
37_0
299_0
72_0
435_1
318_1
120_0
165_0
490_4
143_3
106_3
386_2
198_0
549_0
428_4
221_0
305_4
264_0
340_4
242_3
207_3
242_2
207_2
221_1
178_4
264_1
549_1
198_1
284_4
386_3
143_2
106_2
92_4
120_1
318_0
165_1
279_4
185_4
299_1
37_1
470_0
554_4
435_0
72_1
511_4
413_3
54_2
11_2
456_3
205_0
321_4
240_0
364_4
266_3
223_3
449_4
384_1
528_0
104_0
379_1
141_0
167_3
122_3
70_3
437_2
472_2
35_3
399_4
454_1
13_0
56_0
411_1
489_4
344_1
139_0
301_1
327_2
362_2
280_0
469_1
359_4
494_1
238_0
96_0
9_4
533_3
515_0
431_4
550_0
474_4
181_0
303_3
346_3
360_0
244_4
158_1
325_0
201_4
408_0
282_2
69_2
94_2
496_3
259_1
145_4
100_4
52_4
531_1
17_4
183_2
552_2
517_2
531_3
517_0
433_4
552_0
476_4
183_0
496_1
94_0
259_3
408_2
69_0
282_0
346_1
303_1
325_2
158_3
360_2
50_4
533_1
15_4
181_2
550_2
515_2
96_2
238_2
494_3
147_4
102_4
469_3
280_2
301_3
139_2
344_3
362_0
246_4
327_0
203_4
187_4
35_1
472_0
556_4
437_0
70_1
513_4
411_3
56_2
13_2
454_3
141_2
379_3
104_2
90_4
122_1
167_1
28_4
286_4
528_2
384_3
240_2
205_2
223_1
266_1
72_3
435_2
470_2
37_3
299_3
456_1
11_0
54_0
413_1
106_0
143_0
165_3
318_2
120_3
198_3
549_3
386_1
207_0
323_4
242_0
366_4
264_3
221_3
264_2
221_2
207_1
242_1
386_0
49_4
549_2
198_2
165_2
120_2
318_3
106_1
143_1
11_1
456_0
413_0
54_1
537_4
435_3
72_2
299_2
37_2
470_3
223_0
307_4
266_0
342_4
240_3
205_3
528_3
384_2
122_0
167_0
492_4
379_2
141_3
104_3
56_3
411_2
454_2
13_3
472_1
35_0
70_0
437_1
362_1
327_1
139_3
301_2
344_2
280_3
469_2
382_4
238_3
96_3
494_2
550_3
181_3
515_3
533_0
417_4
452_4
325_3
360_3
158_2
346_0
262_4
303_0
227_4
89_4
69_1
282_1
408_3
259_2
496_0
163_4
94_1
126_4
74_4
517_1
183_1
31_4
552_1
531_2
41_0
406_1
443_1
465_2
22_3
67_3
420_2
135_3
348_2
170_3
156_0
113_0
393_1
519_3
234_3
271_3
498_2
257_0
373_4
212_0
336_4
441_3
404_3
43_2
422_0
65_1
506_4
20_1
467_0
192_4
543_4
172_1
137_1
85_4
111_2
329_3
154_2
391_3
78_4
293_4
273_1
98_1
236_1
210_2
255_2
500_2
194_2
545_2
563_1
45_4
526_1
117_4
152_4
268_2
481_3
83_2
439_3
295_2
58_1
332_0
1_3
216_4
377_0
253_4
351_3
169_2
314_3
547_0
196_0
463_4
502_0
426_4
524_3
388_2
561_3
209_3
81_0
483_1
39_0
297_0
458_2
375_2
108_3
3_1
330_2
316_1
353_1
316_0
232_4
353_0
277_4
375_3
330_3
3_0
108_2
458_3
297_1
39_1
133_4
81_1
176_4
483_0
209_2
524_2
561_2
388_3
24_4
196_1
547_1
61_4
502_1
169_3
351_2
314_2
1_2
332_1
377_1
397_4
58_0
439_2
295_3
481_2
268_3
83_3
563_0
447_4
526_0
402_4
500_3
545_3
194_3
210_3
7_4
255_3
273_0
357_4
236_0
98_0
312_4
391_2
329_2
111_3
154_3
487_4
172_0
137_0
65_0
422_1
467_1
20_0
441_2
43_3
404_2
257_1
212_1
234_2
498_3
271_2
519_2
393_0
19_4
156_1
113_1
135_2
170_2
348_3
22_2
465_3
420_3
67_2
406_0
41_1
522_4
443_0
67_0
420_1
465_1
22_0
443_2
41_3
406_2
113_3
156_3
348_1
485_4
170_0
135_0
519_0
478_4
393_2
212_3
5_4
257_3
271_0
498_1
355_4
234_0
310_4
20_2
467_3
422_3
65_2
404_0
43_1
520_4
441_0
154_1
248_4
111_1
329_0
137_2
172_2
391_0
149_4
255_1
210_1
98_2
236_2
273_2
526_2
563_2
26_4
288_4
194_1
545_1
63_4
500_1
131_4
83_1
268_1
174_4
481_0
58_2
558_4
295_1
189_4
439_0
314_0
230_4
351_0
275_4
169_1
377_3
332_3
1_0
388_1
561_0
445_4
524_0
400_4
502_3
547_3
196_3
483_2
81_3
209_0
368_4
458_1
395_4
39_3
297_3
353_2
316_2
108_0
3_2
330_1
375_1
330_0
3_3
108_1
214_4
375_0
251_4
353_3
316_3
297_2
39_2
458_0
539_4
115_4
209_1
150_4
483_3
81_2
502_2
196_2
547_2
561_1
388_0
47_4
524_1
377_2
1_1
332_2
314_1
169_0
351_1
295_0
439_1
58_3
309_4
83_0
481_1
268_0
545_0
194_0
461_4
500_0
424_4
526_3
563_3
236_3
98_3
273_3
255_0
371_4
210_0
334_4
419_4
391_1
137_3
172_3
154_0
329_1
111_0
43_0
404_1
441_1
467_2
20_3
65_3
422_2
498_0
271_1
234_1
128_4
212_2
257_2
393_3
519_1
291_4
170_1
348_0
229_4
135_1
87_4
113_2
156_2
443_3
406_3
41_2
420_0
\ No newline at end of file
561
97
101
357
81
220
69
55
179
439
573
132
388
183
523
113
133
448
170
157
91
64
421
161
440
233
160
309
87
118
462
67
458
226
405
82
442
115
504
394
27
300
581
272
143
537
150
520
302
147
10
417
308
553
95
196
155
127
434
297
505
531
2
154
490
345
60
254
578
478
23
386
180
73
65
538
390
444
529
231
251
568
369
203
469
124
271
76
208
86
33
159
517
471
298
455
565
404
411
166
140
371
240
327
428
243
270
215
274
306
549
255
413
387
562
445
449
299
545
38
72
149
214
8
198
206
397
427
195
74
6
32
495
416
262
263
273
524
144
525
474
44
365
88
266
83
389
264
319
156
563
205
360
39
227
572
446
401
486
232
536
383
265
245
201
287
314
119
443
547
552
92
362
165
4
433
85
367
250
521
435
503
185
221
96
481
292
368
325
15
175
564
311
138
189
289
406
194
589
588
30
475
502
102
355
468
269
373
258
137
9
396
89
146
318
480
337
493
376
224
294
117
141
548
98
202
483
148
575
459
162
234
317
130
482
510
560
108
380
400
466
583
230
541
296
582
19
22
286
126
122
290
42
377
408
399
43
431
169
403
382
544
242
249
358
24
152
425
378
282
479
379
293
341
239
375
410
51
315
123
257
191
513
307
71
129
188
577
7
181
241
361
291
142
285
514
450
328
80
465
63
5
585
158
571
225
441
53
268
267
366
320
14
500
99
447
527
496
463
301
284
45
49
567
494
453
178
586
420
332
322
432
492
173
182
100
106
556
37
28
346
558
107
574
551
550
75
204
17
370
507
451
436
94
580
385
103
57
528
279
509
66
519
535
350
488
93
168
363
247
77
554
331
526
508
90
429
120
515
342
518
18
334
110
460
276
200
151
344
236
312
246
339
177
477
392
534
384
506
415
21
235
164
414
391
356
330
217
222
470
393
349
277
25
559
228
34
219
259
423
398
192
430
210
473
176
40
58
1
354
193
41
199
539
121
238
438
374
125
338
484
348
253
305
135
569
244
498
213
261
280
186
329
61
111
229
131
184
260
584
464
372
333
487
591
79
145
216
295
461
223
59
275
530
454
456
35
424
153
20
256
418
522
283
52
190
499
62
491
278
511
587
546
343
335
172
174
304
457
187
......@@ -42,7 +42,8 @@ class MultiUnet(nn.Module):
self.pool = nn.MaxPool2d(2)
self.outconv = nn.Sequential(
nn.Conv2d(self.res9.outc, n_classes, kernel_size = 1),
nn.Sigmoid()
nn.Softmax()
#nn.Sigmoid()
)
# self.outconv = nn.Conv2d(self.res9.outc, n_classes,kernel_size = 1)
......
......@@ -28,8 +28,8 @@ def train_net(net, device, epochs = 5, batch_size = 1, lr = 0.1):
val_loader = DataLoader(evalset, batch_size = batch_size, shuffle = False, num_workers = 8, pin_memory = True)
optimizer = optim.Adam(net.parameters(), lr = lr)
criterion = nn.BCELoss()#nn.BCEWithLogitsLoss()
scheduler = lr_scheduler.StepLR(optimizer,30,0.5)#lr_scheduler.ReduceLROnPlateau(optimizer, 'min')
criterion = nn.BCELoss()# nn.BCEWithLogitsLoss()
scheduler = lr_scheduler.StepLR(optimizer, 30, 0.5)# lr_scheduler.ReduceLROnPlateau(optimizer, 'min')
for epoch in range(epochs):
net.train()
......
......@@ -7,7 +7,7 @@ from torch.utils.data import DataLoader
from torch.optim import lr_scheduler
from ignite.contrib.handlers.param_scheduler import LRScheduler
from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator
from ignite.metrics import Accuracy, Loss, DiceCoefficient, ConfusionMatrix, RunningAverage
from ignite.metrics import Accuracy, Loss, DiceCoefficient, ConfusionMatrix, RunningAverage,mIoU
from ignite.contrib.handlers import ProgressBar
from argparse import ArgumentParser
......@@ -34,11 +34,13 @@ def run(train_batch_size, val_batch_size, epochs, lr):
optimizer = optim.Adam(model.parameters(), lr = lr)
cm = ConfusionMatrix(num_classes = 1)
dice = DiceCoefficient(cm)
iou = mIoU(cm)
loss = torch.nn.BCELoss() # torch.nn.NLLLoss()
scheduler = LRScheduler(lr_scheduler.ReduceLROnPlateau(optimizer))
scheduler = LRScheduler(lr_scheduler.StepLR(optimizer, 30, 0.5))
trainer = create_supervised_trainer(model, optimizer, loss, device = device)
evaluator = create_supervised_evaluator(model, metrics = {'accuracy': Accuracy(), 'dice': dice, 'nll': Loss(loss)},
evaluator = create_supervised_evaluator(model,
metrics = {'accuracy': Accuracy(), 'dice': dice, 'nll': Loss(loss)},
device = device)
RunningAverage(output_transform = lambda x: x).attach(trainer, 'loss')
trainer.add_event_handler(Events.EPOCH_COMPLETED, scheduler)
......
......@@ -47,8 +47,8 @@ def train_net(net, device, epochs = 5, batch_size = 1, lr = 0.1, save_cp = True)
# optimizer = optim.Adam(net.parameters(), lr=lr, weight_decay = 1e-8)
optimizer = optim.RMSprop(net.parameters(), lr = lr, weight_decay = 1e-8)
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, 'min')
# criterion = nn.BCEWithLogitsLoss()
#scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, 'min')
scheduler = lr_scheduler.CyclicLR(optimizer, base_lr = 1e-10, max_lr = 0.01)
if net.n_classes > 1:
criterion = nn.CrossEntropyLoss()
else:
......@@ -59,13 +59,6 @@ def train_net(net, device, epochs = 5, batch_size = 1, lr = 0.1, save_cp = True)
epoch_loss = 0
with tqdm(total = n_train, desc = f'Epoch {epoch + 1}/{epochs}', unit = 'img') as pbar:
for imgs,true_masks in train_loader:
# imgs = batch['image']
# true_masks = batch['mask']
# assert imgs.shape[1] == net.n_channels, \
# f'Network has been defined with {net.n_channels} input channels, ' \
# f'but loaded images have {imgs.shape[1]} channels. Please check that ' \
# 'the images are loaded correctly.'
imgs = imgs.to(device = device, dtype = torch.float32)
mask_type = torch.float32 if net.n_classes == 1 else torch.long
true_masks = true_masks.to(device = device, dtype = mask_type)
......@@ -80,11 +73,11 @@ def train_net(net, device, epochs = 5, batch_size = 1, lr = 0.1, save_cp = True)
optimizer.zero_grad()
loss.backward()
optimizer.step()
scheduler.step()
pbar.update(imgs.shape[0])
global_step += 1 # if global_step % (len(dataset) // (10 * batch_size)) == 0:
val_score = eval_net(net, val_loader, device, n_val)
scheduler.step(val_score)
#scheduler.step(val_score)
if net.n_classes > 1:
logging.info('Validation cross entropy: {}'.format(val_score))
writer.add_scalar('Loss/test', val_score, global_step)
......
......@@ -2,6 +2,7 @@ import torch
import torch.nn.functional as F
from tqdm import tqdm
from sklearn.metrics import jaccard_score
import numpy as np
from utils.dice_loss import dice_coeff, dice_coef
......@@ -42,6 +43,7 @@ def eval_jac(net, loader, device, n_val):
pred_masks = torch.round(pred_masks).cpu().detach().numpy()
true_masks = torch.round(true_masks).cpu().numpy()
pred_masks = np.array([1 if x>0 else 0 for x in pred_masks])
jac += jaccard_score(true_masks.flatten(), pred_masks.flatten())
pbar.update(imgs.shape[0])
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment