End of training
Browse files- README.md +129 -33
- config.json +1 -1
- model.safetensors +1 -1
- training_args.bin +1 -1
README.md
CHANGED
@@ -18,12 +18,12 @@ should probably proofread and complete it, then remove this comment. -->
|
|
18 |
|
19 |
This model is a fine-tuned version of [nvidia/mit-b0](https://huggingface.co/nvidia/mit-b0) on the PushkarA07/batch1-tiles dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
-
- Loss: 0.
|
22 |
-
- Mean Iou: 0.
|
23 |
-
- Mean Accuracy: 0.
|
24 |
-
- Overall Accuracy: 0.
|
25 |
-
- Accuracy Abnormality: 0.
|
26 |
-
- Iou Abnormality: 0.
|
27 |
|
28 |
## Model description
|
29 |
|
@@ -48,36 +48,132 @@ The following hyperparameters were used during training:
|
|
48 |
- seed: 42
|
49 |
- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
50 |
- lr_scheduler_type: linear
|
51 |
-
- num_epochs:
|
52 |
|
53 |
### Training results
|
54 |
|
55 |
-
| Training Loss | Epoch
|
56 |
-
|
57 |
-
| 0.
|
58 |
-
| 0.
|
59 |
-
| 0.
|
60 |
-
| 0.
|
61 |
-
| 0.
|
62 |
-
| 0.
|
63 |
-
| 0.
|
64 |
-
| 0.
|
65 |
-
| 0.
|
66 |
-
| 0.
|
67 |
-
| 0.
|
68 |
-
| 0.
|
69 |
-
| 0.
|
70 |
-
| 0.
|
71 |
-
| 0.
|
72 |
-
| 0.
|
73 |
-
| 0.
|
74 |
-
| 0.
|
75 |
-
| 0.
|
76 |
-
| 0.
|
77 |
-
| 0.
|
78 |
-
| 0.
|
79 |
-
| 0.
|
80 |
-
| 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
|
82 |
|
83 |
### Framework versions
|
|
|
18 |
|
19 |
This model is a fine-tuned version of [nvidia/mit-b0](https://huggingface.co/nvidia/mit-b0) on the PushkarA07/batch1-tiles dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.1618
|
22 |
+
- Mean Iou: 0.6159
|
23 |
+
- Mean Accuracy: 0.8898
|
24 |
+
- Overall Accuracy: 0.9837
|
25 |
+
- Accuracy Abnormality: 0.7946
|
26 |
+
- Iou Abnormality: 0.2483
|
27 |
|
28 |
## Model description
|
29 |
|
|
|
48 |
- seed: 42
|
49 |
- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
50 |
- lr_scheduler_type: linear
|
51 |
+
- num_epochs: 50
|
52 |
|
53 |
### Training results
|
54 |
|
55 |
+
| Training Loss | Epoch | Step | Validation Loss | Mean Iou | Mean Accuracy | Overall Accuracy | Accuracy Abnormality | Iou Abnormality |
|
56 |
+
|:-------------:|:-------:|:----:|:---------------:|:--------:|:-------------:|:----------------:|:--------------------:|:---------------:|
|
57 |
+
| 0.1619 | 0.4167 | 10 | 0.1513 | 0.6050 | 0.6725 | 0.9915 | 0.3491 | 0.2185 |
|
58 |
+
| 0.1373 | 0.8333 | 20 | 0.1805 | 0.6122 | 0.7254 | 0.9899 | 0.4573 | 0.2345 |
|
59 |
+
| 0.1233 | 1.25 | 30 | 0.0589 | 0.6153 | 0.6569 | 0.9931 | 0.3160 | 0.2374 |
|
60 |
+
| 0.074 | 1.6667 | 40 | 0.3388 | 0.5865 | 0.7899 | 0.9828 | 0.5942 | 0.1901 |
|
61 |
+
| 0.0685 | 2.0833 | 50 | 0.3790 | 0.6202 | 0.7526 | 0.9896 | 0.5123 | 0.2509 |
|
62 |
+
| 0.1008 | 2.5 | 60 | 0.2782 | 0.5968 | 0.7249 | 0.9880 | 0.4582 | 0.2056 |
|
63 |
+
| 0.0552 | 2.9167 | 70 | 0.3090 | 0.5947 | 0.7771 | 0.9851 | 0.5664 | 0.2044 |
|
64 |
+
| 0.0547 | 3.3333 | 80 | 0.2703 | 0.6245 | 0.7386 | 0.9906 | 0.4832 | 0.2584 |
|
65 |
+
| 0.0817 | 3.75 | 90 | 0.1983 | 0.6361 | 0.7386 | 0.9916 | 0.4821 | 0.2806 |
|
66 |
+
| 0.0504 | 4.1667 | 100 | 0.2145 | 0.6303 | 0.7884 | 0.9894 | 0.5846 | 0.2713 |
|
67 |
+
| 0.0547 | 4.5833 | 110 | 0.0654 | 0.6353 | 0.6768 | 0.9937 | 0.3555 | 0.2768 |
|
68 |
+
| 0.0372 | 5.0 | 120 | 0.1221 | 0.6464 | 0.7178 | 0.9931 | 0.4388 | 0.2997 |
|
69 |
+
| 0.0475 | 5.4167 | 130 | 0.1833 | 0.6634 | 0.7749 | 0.9925 | 0.5542 | 0.3342 |
|
70 |
+
| 0.0389 | 5.8333 | 140 | 0.2456 | 0.6301 | 0.7680 | 0.9901 | 0.5430 | 0.2702 |
|
71 |
+
| 0.042 | 6.25 | 150 | 0.1648 | 0.5974 | 0.6732 | 0.9907 | 0.3513 | 0.2042 |
|
72 |
+
| 0.04 | 6.6667 | 160 | 0.1478 | 0.6267 | 0.6996 | 0.9923 | 0.4029 | 0.2611 |
|
73 |
+
| 0.0309 | 7.0833 | 170 | 0.0605 | 0.6090 | 0.6342 | 0.9937 | 0.2698 | 0.2243 |
|
74 |
+
| 0.0787 | 7.5 | 180 | 0.0779 | 0.6639 | 0.7218 | 0.9940 | 0.4459 | 0.3339 |
|
75 |
+
| 0.0317 | 7.9167 | 190 | 0.1875 | 0.6402 | 0.7243 | 0.9924 | 0.4526 | 0.2881 |
|
76 |
+
| 0.0439 | 8.3333 | 200 | 0.1459 | 0.6316 | 0.7088 | 0.9923 | 0.4213 | 0.2709 |
|
77 |
+
| 0.0607 | 8.75 | 210 | 0.1903 | 0.6315 | 0.8029 | 0.9890 | 0.6143 | 0.2740 |
|
78 |
+
| 0.0262 | 9.1667 | 220 | 0.1657 | 0.6443 | 0.7969 | 0.9904 | 0.6008 | 0.2982 |
|
79 |
+
| 0.0391 | 9.5833 | 230 | 0.2042 | 0.6510 | 0.7836 | 0.9914 | 0.5730 | 0.3106 |
|
80 |
+
| 0.022 | 10.0 | 240 | 0.1800 | 0.6452 | 0.7819 | 0.9910 | 0.5701 | 0.2994 |
|
81 |
+
| 0.0248 | 10.4167 | 250 | 0.1799 | 0.6366 | 0.7956 | 0.9897 | 0.5989 | 0.2835 |
|
82 |
+
| 0.0289 | 10.8333 | 260 | 0.1100 | 0.6432 | 0.7579 | 0.9916 | 0.5211 | 0.2948 |
|
83 |
+
| 0.0256 | 11.25 | 270 | 0.1516 | 0.6449 | 0.7994 | 0.9904 | 0.6057 | 0.2995 |
|
84 |
+
| 0.0255 | 11.6667 | 280 | 0.1598 | 0.6303 | 0.7863 | 0.9894 | 0.5804 | 0.2713 |
|
85 |
+
| 0.0217 | 12.0833 | 290 | 0.1557 | 0.6312 | 0.7817 | 0.9897 | 0.5708 | 0.2728 |
|
86 |
+
| 0.0249 | 12.5 | 300 | 0.1799 | 0.6225 | 0.8075 | 0.9878 | 0.6247 | 0.2573 |
|
87 |
+
| 0.0285 | 12.9167 | 310 | 0.1267 | 0.6403 | 0.7901 | 0.9903 | 0.5873 | 0.2904 |
|
88 |
+
| 0.0239 | 13.3333 | 320 | 0.0714 | 0.6479 | 0.6997 | 0.9937 | 0.4017 | 0.3022 |
|
89 |
+
| 0.0233 | 13.75 | 330 | 0.1488 | 0.6475 | 0.8028 | 0.9905 | 0.6125 | 0.3046 |
|
90 |
+
| 0.0188 | 14.1667 | 340 | 0.1068 | 0.6587 | 0.7911 | 0.9918 | 0.5878 | 0.3258 |
|
91 |
+
| 0.0281 | 14.5833 | 350 | 0.2046 | 0.6054 | 0.8830 | 0.9821 | 0.7825 | 0.2287 |
|
92 |
+
| 0.0265 | 15.0 | 360 | 0.2981 | 0.5965 | 0.9171 | 0.9788 | 0.8547 | 0.2143 |
|
93 |
+
| 0.0172 | 15.4167 | 370 | 0.2661 | 0.6060 | 0.9004 | 0.9815 | 0.8181 | 0.2307 |
|
94 |
+
| 0.0154 | 15.8333 | 380 | 0.2225 | 0.5984 | 0.8872 | 0.9805 | 0.7926 | 0.2164 |
|
95 |
+
| 0.0252 | 16.25 | 390 | 0.1822 | 0.6164 | 0.8412 | 0.9857 | 0.6948 | 0.2472 |
|
96 |
+
| 0.0189 | 16.6667 | 400 | 0.2648 | 0.6006 | 0.8980 | 0.9805 | 0.8143 | 0.2207 |
|
97 |
+
| 0.0136 | 17.0833 | 410 | 0.2844 | 0.6013 | 0.9158 | 0.9799 | 0.8509 | 0.2228 |
|
98 |
+
| 0.025 | 17.5 | 420 | 0.1384 | 0.6483 | 0.8273 | 0.9899 | 0.6624 | 0.3068 |
|
99 |
+
| 0.0226 | 17.9167 | 430 | 0.1755 | 0.6376 | 0.7997 | 0.9897 | 0.6070 | 0.2856 |
|
100 |
+
| 0.0155 | 18.3333 | 440 | 0.1890 | 0.6185 | 0.8376 | 0.9861 | 0.6872 | 0.2509 |
|
101 |
+
| 0.0247 | 18.75 | 450 | 0.1583 | 0.6169 | 0.8259 | 0.9863 | 0.6633 | 0.2476 |
|
102 |
+
| 0.0225 | 19.1667 | 460 | 0.2150 | 0.6131 | 0.8936 | 0.9831 | 0.8030 | 0.2433 |
|
103 |
+
| 0.0304 | 19.5833 | 470 | 0.1890 | 0.6378 | 0.8729 | 0.9873 | 0.7569 | 0.2882 |
|
104 |
+
| 0.0176 | 20.0 | 480 | 0.2284 | 0.6018 | 0.9000 | 0.9807 | 0.8181 | 0.2230 |
|
105 |
+
| 0.0095 | 20.4167 | 490 | 0.1498 | 0.6163 | 0.8501 | 0.9853 | 0.7131 | 0.2474 |
|
106 |
+
| 0.03 | 20.8333 | 500 | 0.1366 | 0.6422 | 0.8542 | 0.9884 | 0.7181 | 0.2961 |
|
107 |
+
| 0.0178 | 21.25 | 510 | 0.1910 | 0.6260 | 0.8968 | 0.9850 | 0.8074 | 0.2671 |
|
108 |
+
| 0.0196 | 21.6667 | 520 | 0.2018 | 0.6174 | 0.9095 | 0.9832 | 0.8348 | 0.2518 |
|
109 |
+
| 0.0268 | 22.0833 | 530 | 0.2358 | 0.5935 | 0.8915 | 0.9793 | 0.8025 | 0.2078 |
|
110 |
+
| 0.0187 | 22.5 | 540 | 0.2115 | 0.6146 | 0.8980 | 0.9832 | 0.8117 | 0.2462 |
|
111 |
+
| 0.0156 | 22.9167 | 550 | 0.1576 | 0.6254 | 0.8596 | 0.9862 | 0.7312 | 0.2647 |
|
112 |
+
| 0.026 | 23.3333 | 560 | 0.1362 | 0.6446 | 0.8795 | 0.9879 | 0.7696 | 0.3013 |
|
113 |
+
| 0.0148 | 23.75 | 570 | 0.1640 | 0.6286 | 0.8897 | 0.9856 | 0.7924 | 0.2716 |
|
114 |
+
| 0.0096 | 24.1667 | 580 | 0.1256 | 0.6432 | 0.8557 | 0.9885 | 0.7210 | 0.2980 |
|
115 |
+
| 0.0211 | 24.5833 | 590 | 0.1994 | 0.5971 | 0.8474 | 0.9821 | 0.7108 | 0.2121 |
|
116 |
+
| 0.017 | 25.0 | 600 | 0.1699 | 0.6038 | 0.8469 | 0.9834 | 0.7085 | 0.2242 |
|
117 |
+
| 0.0221 | 25.4167 | 610 | 0.1701 | 0.6048 | 0.8441 | 0.9837 | 0.7025 | 0.2260 |
|
118 |
+
| 0.0216 | 25.8333 | 620 | 0.1728 | 0.6120 | 0.8843 | 0.9832 | 0.7840 | 0.2409 |
|
119 |
+
| 0.0192 | 26.25 | 630 | 0.1062 | 0.6500 | 0.8565 | 0.9892 | 0.7221 | 0.3110 |
|
120 |
+
| 0.0176 | 26.6667 | 640 | 0.1028 | 0.6559 | 0.8452 | 0.9900 | 0.6983 | 0.3219 |
|
121 |
+
| 0.0133 | 27.0833 | 650 | 0.0862 | 0.6552 | 0.8161 | 0.9908 | 0.6390 | 0.3196 |
|
122 |
+
| 0.0225 | 27.5 | 660 | 0.1046 | 0.6405 | 0.8491 | 0.9884 | 0.7080 | 0.2926 |
|
123 |
+
| 0.0127 | 27.9167 | 670 | 0.1943 | 0.5683 | 0.8519 | 0.9745 | 0.7275 | 0.1622 |
|
124 |
+
| 0.0134 | 28.3333 | 680 | 0.1028 | 0.6333 | 0.8138 | 0.9888 | 0.6364 | 0.2778 |
|
125 |
+
| 0.0207 | 28.75 | 690 | 0.1341 | 0.6222 | 0.8383 | 0.9866 | 0.6881 | 0.2578 |
|
126 |
+
| 0.0103 | 29.1667 | 700 | 0.1475 | 0.6159 | 0.8257 | 0.9862 | 0.6631 | 0.2456 |
|
127 |
+
| 0.0182 | 29.5833 | 710 | 0.1103 | 0.6339 | 0.8279 | 0.9884 | 0.6653 | 0.2795 |
|
128 |
+
| 0.0198 | 30.0 | 720 | 0.1642 | 0.5979 | 0.8511 | 0.9821 | 0.7182 | 0.2138 |
|
129 |
+
| 0.0277 | 30.4167 | 730 | 0.1497 | 0.6142 | 0.8672 | 0.9843 | 0.7486 | 0.2442 |
|
130 |
+
| 0.0144 | 30.8333 | 740 | 0.1471 | 0.6212 | 0.8639 | 0.9855 | 0.7407 | 0.2571 |
|
131 |
+
| 0.0277 | 31.25 | 750 | 0.1620 | 0.6210 | 0.8819 | 0.9848 | 0.7776 | 0.2572 |
|
132 |
+
| 0.0162 | 31.6667 | 760 | 0.1351 | 0.6281 | 0.8820 | 0.9858 | 0.7767 | 0.2705 |
|
133 |
+
| 0.0213 | 32.0833 | 770 | 0.1250 | 0.6340 | 0.8688 | 0.9870 | 0.7490 | 0.2810 |
|
134 |
+
| 0.0214 | 32.5 | 780 | 0.1603 | 0.6104 | 0.8804 | 0.9831 | 0.7762 | 0.2377 |
|
135 |
+
| 0.007 | 32.9167 | 790 | 0.1360 | 0.6180 | 0.8683 | 0.9848 | 0.7501 | 0.2512 |
|
136 |
+
| 0.0265 | 33.3333 | 800 | 0.1494 | 0.6281 | 0.8954 | 0.9853 | 0.8042 | 0.2709 |
|
137 |
+
| 0.0179 | 33.75 | 810 | 0.1267 | 0.6331 | 0.8673 | 0.9870 | 0.7460 | 0.2794 |
|
138 |
+
| 0.0153 | 34.1667 | 820 | 0.1530 | 0.6206 | 0.8603 | 0.9855 | 0.7333 | 0.2558 |
|
139 |
+
| 0.0236 | 34.5833 | 830 | 0.1268 | 0.6394 | 0.8705 | 0.9876 | 0.7517 | 0.2912 |
|
140 |
+
| 0.0137 | 35.0 | 840 | 0.1321 | 0.6367 | 0.8843 | 0.9868 | 0.7804 | 0.2867 |
|
141 |
+
| 0.0263 | 35.4167 | 850 | 0.1170 | 0.6293 | 0.8537 | 0.9869 | 0.7187 | 0.2718 |
|
142 |
+
| 0.0186 | 35.8333 | 860 | 0.1286 | 0.6394 | 0.8364 | 0.9887 | 0.6820 | 0.2902 |
|
143 |
+
| 0.0117 | 36.25 | 870 | 0.1506 | 0.6311 | 0.8701 | 0.9866 | 0.7519 | 0.2756 |
|
144 |
+
| 0.0104 | 36.6667 | 880 | 0.1351 | 0.6380 | 0.8475 | 0.9882 | 0.7048 | 0.2879 |
|
145 |
+
| 0.0137 | 37.0833 | 890 | 0.1565 | 0.6193 | 0.8743 | 0.9848 | 0.7622 | 0.2538 |
|
146 |
+
| 0.0222 | 37.5 | 900 | 0.1629 | 0.6161 | 0.8789 | 0.9841 | 0.7722 | 0.2481 |
|
147 |
+
| 0.0092 | 37.9167 | 910 | 0.1667 | 0.5870 | 0.8763 | 0.9785 | 0.7727 | 0.1956 |
|
148 |
+
| 0.0227 | 38.3333 | 920 | 0.1546 | 0.6032 | 0.8661 | 0.9824 | 0.7482 | 0.2241 |
|
149 |
+
| 0.011 | 38.75 | 930 | 0.1310 | 0.6329 | 0.8840 | 0.9864 | 0.7802 | 0.2795 |
|
150 |
+
| 0.0132 | 39.1667 | 940 | 0.1054 | 0.6186 | 0.8342 | 0.9862 | 0.6801 | 0.2509 |
|
151 |
+
| 0.0181 | 39.5833 | 950 | 0.1442 | 0.5947 | 0.8040 | 0.9837 | 0.6218 | 0.2058 |
|
152 |
+
| 0.0113 | 40.0 | 960 | 0.1567 | 0.6018 | 0.8580 | 0.9825 | 0.7318 | 0.2212 |
|
153 |
+
| 0.0162 | 40.4167 | 970 | 0.0834 | 0.6600 | 0.8382 | 0.9906 | 0.6837 | 0.3294 |
|
154 |
+
| 0.0152 | 40.8333 | 980 | 0.1106 | 0.6444 | 0.8671 | 0.9883 | 0.7442 | 0.3006 |
|
155 |
+
| 0.0199 | 41.25 | 990 | 0.1341 | 0.6323 | 0.8739 | 0.9866 | 0.7597 | 0.2780 |
|
156 |
+
| 0.0192 | 41.6667 | 1000 | 0.1732 | 0.5949 | 0.8747 | 0.9804 | 0.7676 | 0.2096 |
|
157 |
+
| 0.0136 | 42.0833 | 1010 | 0.1473 | 0.6284 | 0.8939 | 0.9854 | 0.8012 | 0.2714 |
|
158 |
+
| 0.0126 | 42.5 | 1020 | 0.0869 | 0.6555 | 0.8462 | 0.9900 | 0.7004 | 0.3211 |
|
159 |
+
| 0.0179 | 42.9167 | 1030 | 0.0928 | 0.6354 | 0.8351 | 0.9883 | 0.6797 | 0.2825 |
|
160 |
+
| 0.0206 | 43.3333 | 1040 | 0.1450 | 0.6171 | 0.8574 | 0.9851 | 0.7280 | 0.2491 |
|
161 |
+
| 0.0095 | 43.75 | 1050 | 0.1140 | 0.6306 | 0.8392 | 0.9876 | 0.6888 | 0.2737 |
|
162 |
+
| 0.0188 | 44.1667 | 1060 | 0.1254 | 0.6334 | 0.8655 | 0.9871 | 0.7423 | 0.2798 |
|
163 |
+
| 0.0209 | 44.5833 | 1070 | 0.1536 | 0.6153 | 0.8817 | 0.9839 | 0.7782 | 0.2468 |
|
164 |
+
| 0.018 | 45.0 | 1080 | 0.1528 | 0.6175 | 0.8839 | 0.9842 | 0.7823 | 0.2510 |
|
165 |
+
| 0.0069 | 45.4167 | 1090 | 0.1676 | 0.6130 | 0.8967 | 0.9829 | 0.8092 | 0.2431 |
|
166 |
+
| 0.0256 | 45.8333 | 1100 | 0.1366 | 0.6236 | 0.8620 | 0.9859 | 0.7364 | 0.2614 |
|
167 |
+
| 0.0146 | 46.25 | 1110 | 0.1316 | 0.6297 | 0.8754 | 0.9862 | 0.7630 | 0.2733 |
|
168 |
+
| 0.0099 | 46.6667 | 1120 | 0.1145 | 0.6333 | 0.8446 | 0.9877 | 0.6995 | 0.2789 |
|
169 |
+
| 0.012 | 47.0833 | 1130 | 0.1002 | 0.6323 | 0.8201 | 0.9885 | 0.6495 | 0.2761 |
|
170 |
+
| 0.0098 | 47.5 | 1140 | 0.1338 | 0.6232 | 0.8738 | 0.9854 | 0.7606 | 0.2611 |
|
171 |
+
| 0.0188 | 47.9167 | 1150 | 0.1078 | 0.6347 | 0.8114 | 0.9890 | 0.6313 | 0.2804 |
|
172 |
+
| 0.0261 | 48.3333 | 1160 | 0.1683 | 0.6158 | 0.9015 | 0.9832 | 0.8187 | 0.2485 |
|
173 |
+
| 0.0122 | 48.75 | 1170 | 0.1556 | 0.6212 | 0.8801 | 0.9849 | 0.7738 | 0.2576 |
|
174 |
+
| 0.0219 | 49.1667 | 1180 | 0.1272 | 0.6364 | 0.8799 | 0.9869 | 0.7713 | 0.2860 |
|
175 |
+
| 0.016 | 49.5833 | 1190 | 0.1807 | 0.6048 | 0.9139 | 0.9807 | 0.8462 | 0.2290 |
|
176 |
+
| 0.0141 | 50.0 | 1200 | 0.1618 | 0.6159 | 0.8898 | 0.9837 | 0.7946 | 0.2483 |
|
177 |
|
178 |
|
179 |
### Framework versions
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
"SegformerForSemanticSegmentation"
|
5 |
],
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "PushkarA07/segformer-b0-finetuned-batch1-30nov",
|
3 |
"architectures": [
|
4 |
"SegformerForSemanticSegmentation"
|
5 |
],
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14884776
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7662ce67d279c729cb160e9d3b5327abf5c1abcbd62a3c2aab3b31aa39b68eb7
|
3 |
size 14884776
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5304
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:95b26ff97308a202a3a2cd07ee736889452cb0a4205f155dcfaa08e4c25c2be0
|
3 |
size 5304
|