diff --git a/out.html b/out.html
new file mode 100644
index 0000000..7de9363
--- /dev/null
+++ b/out.html
@@ -0,0 +1,2237 @@
+spambase
+Loading spambase (Spambase Data Set)
+#instances=4601, type=, #features=57, #classes=[0 1], prevs=[0.606, 0.394]
+
+
+
+ |
+ base |
+ true |
+ estim |
+ errors |
+
+
+ |
+ 0 |
+ 1 |
+ T0 |
+ F1 |
+ F0 |
+ T1 |
+ T0 |
+ F1 |
+ F0 |
+ T1 |
+ mae |
+ rae |
+ mrae |
+ kld |
+ nkld |
+ f1e_true |
+ f1e_estim |
+
+
+
+
+ 0 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.1000 |
+ 0.9000 |
+ 0.0000 |
+ 0.0038 |
+ 0.0000 |
+ 0.9962 |
+ 0.0500 |
+ 0.4571 |
+ 0.4571 |
+ 0.2254 |
+ 0.1122 |
+ NaN |
+ 1.0000 |
+
+
+ 1 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.0800 |
+ 0.9200 |
+ 0.0000 |
+ 0.0008 |
+ 0.0000 |
+ 0.9992 |
+ 0.0400 |
+ 0.2981 |
+ 0.2981 |
+ 0.1641 |
+ 0.0819 |
+ NaN |
+ 0.9907 |
+
+
+ 2 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.1100 |
+ 0.8900 |
+ 0.0000 |
+ 0.0012 |
+ 0.0000 |
+ 0.9988 |
+ 0.0550 |
+ 0.3289 |
+ 0.3289 |
+ 0.2568 |
+ 0.1277 |
+ NaN |
+ 1.0000 |
+
+
+ 3 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.0900 |
+ 0.9100 |
+ 0.0000 |
+ 0.0073 |
+ 0.0000 |
+ 0.9927 |
+ 0.0450 |
+ 0.6231 |
+ 0.6231 |
+ 0.1960 |
+ 0.0977 |
+ NaN |
+ 0.9999 |
+
+
+ 4 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.1000 |
+ 0.9000 |
+ 0.0000 |
+ 0.0178 |
+ 0.0000 |
+ 0.9822 |
+ 0.0500 |
+ 1.1522 |
+ 1.1522 |
+ 0.2334 |
+ 0.1162 |
+ NaN |
+ 1.0000 |
+
+
+ 5 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.0600 |
+ 0.9400 |
+ 0.0000 |
+ 0.0007 |
+ 0.0000 |
+ 0.9993 |
+ 0.0300 |
+ 0.2796 |
+ 0.2796 |
+ 0.1086 |
+ 0.0542 |
+ NaN |
+ 1.0000 |
+
+
+ 6 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.1200 |
+ 0.8800 |
+ 0.0000 |
+ 0.0008 |
+ 0.0000 |
+ 0.9992 |
+ 0.0600 |
+ 0.3150 |
+ 0.3150 |
+ 0.2898 |
+ 0.1439 |
+ NaN |
+ 1.0000 |
+
+
+ 7 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.1100 |
+ 0.8900 |
+ 0.0000 |
+ 0.0005 |
+ 0.0000 |
+ 0.9995 |
+ 0.0550 |
+ 0.2965 |
+ 0.2965 |
+ 0.2568 |
+ 0.1277 |
+ NaN |
+ 1.0000 |
+
+
+ 8 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.0900 |
+ 0.9100 |
+ 0.0000 |
+ 0.0009 |
+ 0.0000 |
+ 0.9991 |
+ 0.0450 |
+ 0.3055 |
+ 0.3055 |
+ 0.1939 |
+ 0.0966 |
+ NaN |
+ 0.9761 |
+
+
+ 9 |
+ 0.0000 |
+ 1.0000 |
+ 0.0000 |
+ 0.0000 |
+ 0.1400 |
+ 0.8600 |
+ 0.0000 |
+ 0.0222 |
+ 0.0000 |
+ 0.9778 |
+ 0.0700 |
+ 1.3867 |
+ 1.3867 |
+ 0.3694 |
+ 0.1826 |
+ NaN |
+ 1.0000 |
+
+
+ 10 |
+ 0.1000 |
+ 0.9000 |
+ 0.0900 |
+ 0.0100 |
+ 0.0800 |
+ 0.8200 |
+ 0.0085 |
+ 0.2046 |
+ 0.0000 |
+ 0.7869 |
+ 0.0973 |
+ 3.7034 |
+ 3.7034 |
+ 0.4204 |
+ 0.2072 |
+ 0.3333 |
+ 0.9233 |
+
+
+ 11 |
+ 0.1000 |
+ 0.9000 |
+ 0.1000 |
+ 0.0000 |
+ 0.1100 |
+ 0.7900 |
+ 0.0288 |
+ 0.0769 |
+ 0.0000 |
+ 0.8943 |
+ 0.0906 |
+ 4.2863 |
+ 4.2863 |
+ 0.3676 |
+ 0.1817 |
+ 0.3548 |
+ 0.5715 |
+
+
+ 12 |
+ 0.1000 |
+ 0.9000 |
+ 0.1000 |
+ 0.0000 |
+ 0.1000 |
+ 0.8000 |
+ 0.0649 |
+ 0.0501 |
+ 0.0000 |
+ 0.8850 |
+ 0.0675 |
+ 2.8520 |
+ 2.8520 |
+ 0.2689 |
+ 0.1337 |
+ 0.3333 |
+ 0.2786 |
+
+
+ 13 |
+ 0.1000 |
+ 0.9000 |
+ 0.1000 |
+ 0.0000 |
+ 0.0600 |
+ 0.8400 |
+ 0.0182 |
+ 0.1656 |
+ 0.0000 |
+ 0.8161 |
+ 0.0828 |
+ 8.7145 |
+ 8.7145 |
+ 0.3317 |
+ 0.1643 |
+ 0.2308 |
+ 0.8196 |
+
+
+ 14 |
+ 0.1000 |
+ 0.9000 |
+ 0.0900 |
+ 0.0100 |
+ 0.0800 |
+ 0.8200 |
+ 0.0243 |
+ 0.1076 |
+ 0.0000 |
+ 0.8681 |
+ 0.0728 |
+ 2.0492 |
+ 2.0492 |
+ 0.2755 |
+ 0.1369 |
+ 0.3333 |
+ 0.6885 |
+
+
+ 15 |
+ 0.1000 |
+ 0.9000 |
+ 0.1000 |
+ 0.0000 |
+ 0.0800 |
+ 0.8200 |
+ 0.0767 |
+ 0.0219 |
+ 0.0000 |
+ 0.9015 |
+ 0.0517 |
+ 1.4088 |
+ 1.4088 |
+ 0.1811 |
+ 0.0903 |
+ 0.2857 |
+ 0.1248 |
+
+
+ 16 |
+ 0.1000 |
+ 0.9000 |
+ 0.0800 |
+ 0.0200 |
+ 0.0700 |
+ 0.8300 |
+ 0.0176 |
+ 0.0496 |
+ 0.0001 |
+ 0.9327 |
+ 0.0662 |
+ 0.7438 |
+ 0.7438 |
+ 0.1981 |
+ 0.0987 |
+ 0.3600 |
+ 0.5856 |
+
+
+ 17 |
+ 0.1000 |
+ 0.9000 |
+ 0.0800 |
+ 0.0200 |
+ 0.1300 |
+ 0.7700 |
+ 0.0210 |
+ 0.1940 |
+ 0.0000 |
+ 0.7850 |
+ 0.0945 |
+ 2.1586 |
+ 2.1586 |
+ 0.4788 |
+ 0.2349 |
+ 0.4839 |
+ 0.8218 |
+
+
+ 18 |
+ 0.1000 |
+ 0.9000 |
+ 0.0800 |
+ 0.0200 |
+ 0.1200 |
+ 0.7800 |
+ 0.0178 |
+ 0.1358 |
+ 0.0000 |
+ 0.8464 |
+ 0.0911 |
+ 1.6024 |
+ 1.6024 |
+ 0.4074 |
+ 0.2009 |
+ 0.4667 |
+ 0.7928 |
+
+
+ 19 |
+ 0.1000 |
+ 0.9000 |
+ 0.0700 |
+ 0.0300 |
+ 0.1000 |
+ 0.8000 |
+ 0.0352 |
+ 0.1377 |
+ 0.0000 |
+ 0.8271 |
+ 0.0674 |
+ 1.1316 |
+ 1.1316 |
+ 0.2906 |
+ 0.1443 |
+ 0.4815 |
+ 0.6616 |
+
+
+ 20 |
+ 0.2000 |
+ 0.8000 |
+ 0.2000 |
+ 0.0000 |
+ 0.1100 |
+ 0.6900 |
+ 0.1176 |
+ 0.2243 |
+ 0.0000 |
+ 0.6581 |
+ 0.1122 |
+ 11.5671 |
+ 11.5671 |
+ 0.4795 |
+ 0.2353 |
+ 0.2157 |
+ 0.4881 |
+
+
+ 21 |
+ 0.2000 |
+ 0.8000 |
+ 0.2000 |
+ 0.0000 |
+ 0.0600 |
+ 0.7400 |
+ 0.1503 |
+ 0.0440 |
+ 0.0003 |
+ 0.8054 |
+ 0.0547 |
+ 2.5108 |
+ 2.5108 |
+ 0.1455 |
+ 0.0726 |
+ 0.1304 |
+ 0.1284 |
+
+
+ 22 |
+ 0.2000 |
+ 0.8000 |
+ 0.1800 |
+ 0.0200 |
+ 0.0700 |
+ 0.7300 |
+ 0.0650 |
+ 0.1182 |
+ 0.0000 |
+ 0.8168 |
+ 0.0925 |
+ 1.4004 |
+ 1.4004 |
+ 0.2610 |
+ 0.1297 |
+ 0.2000 |
+ 0.4762 |
+
+
+ 23 |
+ 0.2000 |
+ 0.8000 |
+ 0.1900 |
+ 0.0100 |
+ 0.1100 |
+ 0.6900 |
+ 0.1518 |
+ 0.0209 |
+ 0.0000 |
+ 0.8273 |
+ 0.0741 |
+ 0.5186 |
+ 0.5186 |
+ 0.2696 |
+ 0.1340 |
+ 0.2400 |
+ 0.0643 |
+
+
+ 24 |
+ 0.2000 |
+ 0.8000 |
+ 0.1900 |
+ 0.0100 |
+ 0.1100 |
+ 0.6900 |
+ 0.0676 |
+ 0.3840 |
+ 0.0000 |
+ 0.5484 |
+ 0.1870 |
+ 6.6805 |
+ 6.6805 |
+ 0.6627 |
+ 0.3197 |
+ 0.2400 |
+ 0.7395 |
+
+
+ 25 |
+ 0.2000 |
+ 0.8000 |
+ 0.2000 |
+ 0.0000 |
+ 0.0800 |
+ 0.7200 |
+ 0.1245 |
+ 0.1006 |
+ 0.0000 |
+ 0.7749 |
+ 0.0778 |
+ 5.3763 |
+ 5.3763 |
+ 0.2668 |
+ 0.1326 |
+ 0.1667 |
+ 0.2878 |
+
+
+ 26 |
+ 0.2000 |
+ 0.8000 |
+ 0.1900 |
+ 0.0100 |
+ 0.1000 |
+ 0.7000 |
+ 0.1407 |
+ 0.0986 |
+ 0.0000 |
+ 0.7608 |
+ 0.0747 |
+ 1.7989 |
+ 1.7989 |
+ 0.2893 |
+ 0.1436 |
+ 0.2245 |
+ 0.2594 |
+
+
+ 27 |
+ 0.2000 |
+ 0.8000 |
+ 0.1900 |
+ 0.0100 |
+ 0.0900 |
+ 0.7100 |
+ 0.0992 |
+ 0.1236 |
+ 0.0000 |
+ 0.7772 |
+ 0.0904 |
+ 2.2699 |
+ 2.2699 |
+ 0.3055 |
+ 0.1516 |
+ 0.2083 |
+ 0.3838 |
+
+
+ 28 |
+ 0.2000 |
+ 0.8000 |
+ 0.2000 |
+ 0.0000 |
+ 0.0700 |
+ 0.7300 |
+ 0.1562 |
+ 0.0755 |
+ 0.0000 |
+ 0.7683 |
+ 0.0569 |
+ 4.0727 |
+ 4.0727 |
+ 0.2011 |
+ 0.1002 |
+ 0.1489 |
+ 0.1945 |
+
+
+ 29 |
+ 0.2000 |
+ 0.8000 |
+ 0.1900 |
+ 0.0100 |
+ 0.1000 |
+ 0.7000 |
+ 0.1202 |
+ 0.0460 |
+ 0.0000 |
+ 0.8338 |
+ 0.0849 |
+ 0.9743 |
+ 0.9743 |
+ 0.2652 |
+ 0.1318 |
+ 0.2245 |
+ 0.1605 |
+
+
+ 30 |
+ 0.3000 |
+ 0.7000 |
+ 0.2900 |
+ 0.0100 |
+ 0.0800 |
+ 0.6200 |
+ 0.2028 |
+ 0.1660 |
+ 0.0000 |
+ 0.6313 |
+ 0.0836 |
+ 2.9131 |
+ 2.9131 |
+ 0.2966 |
+ 0.1472 |
+ 0.1343 |
+ 0.2904 |
+
+
+ 31 |
+ 0.3000 |
+ 0.7000 |
+ 0.2900 |
+ 0.0100 |
+ 0.0500 |
+ 0.6500 |
+ 0.2359 |
+ 0.1008 |
+ 0.0000 |
+ 0.6634 |
+ 0.0521 |
+ 1.7908 |
+ 1.7908 |
+ 0.1491 |
+ 0.0744 |
+ 0.0938 |
+ 0.1760 |
+
+
+ 32 |
+ 0.3000 |
+ 0.7000 |
+ 0.2700 |
+ 0.0300 |
+ 0.0700 |
+ 0.6300 |
+ 0.1881 |
+ 0.2181 |
+ 0.0000 |
+ 0.5938 |
+ 0.0940 |
+ 1.6654 |
+ 1.6654 |
+ 0.2728 |
+ 0.1355 |
+ 0.1562 |
+ 0.3670 |
+
+
+ 33 |
+ 0.3000 |
+ 0.7000 |
+ 0.2800 |
+ 0.0200 |
+ 0.0600 |
+ 0.6400 |
+ 0.2094 |
+ 0.0879 |
+ 0.0000 |
+ 0.7028 |
+ 0.0653 |
+ 0.9956 |
+ 0.9956 |
+ 0.1552 |
+ 0.0774 |
+ 0.1250 |
+ 0.1734 |
+
+
+ 34 |
+ 0.3000 |
+ 0.7000 |
+ 0.2800 |
+ 0.0200 |
+ 0.0300 |
+ 0.6700 |
+ 0.1789 |
+ 0.0805 |
+ 0.0000 |
+ 0.7407 |
+ 0.0656 |
+ 0.9339 |
+ 0.9339 |
+ 0.0951 |
+ 0.0475 |
+ 0.0820 |
+ 0.1836 |
+
+
+ 35 |
+ 0.3000 |
+ 0.7000 |
+ 0.2900 |
+ 0.0100 |
+ 0.1000 |
+ 0.6000 |
+ 0.1698 |
+ 0.2419 |
+ 0.0000 |
+ 0.5884 |
+ 0.1159 |
+ 4.2090 |
+ 4.2090 |
+ 0.4438 |
+ 0.2183 |
+ 0.1594 |
+ 0.4160 |
+
+
+ 36 |
+ 0.3000 |
+ 0.7000 |
+ 0.3000 |
+ 0.0000 |
+ 0.0800 |
+ 0.6200 |
+ 0.2506 |
+ 0.0692 |
+ 0.0000 |
+ 0.6801 |
+ 0.0647 |
+ 3.7608 |
+ 3.7608 |
+ 0.2238 |
+ 0.1114 |
+ 0.1176 |
+ 0.1213 |
+
+
+ 37 |
+ 0.3000 |
+ 0.7000 |
+ 0.2400 |
+ 0.0600 |
+ 0.0700 |
+ 0.6300 |
+ 0.0637 |
+ 0.3139 |
+ 0.0000 |
+ 0.6225 |
+ 0.1269 |
+ 1.3927 |
+ 1.3927 |
+ 0.4190 |
+ 0.2065 |
+ 0.2131 |
+ 0.7114 |
+
+
+ 38 |
+ 0.3000 |
+ 0.7000 |
+ 0.2700 |
+ 0.0300 |
+ 0.0600 |
+ 0.6400 |
+ 0.1499 |
+ 0.2303 |
+ 0.0000 |
+ 0.6198 |
+ 0.1001 |
+ 1.7782 |
+ 1.7782 |
+ 0.2783 |
+ 0.1383 |
+ 0.1429 |
+ 0.4343 |
+
+
+ 39 |
+ 0.3000 |
+ 0.7000 |
+ 0.2800 |
+ 0.0200 |
+ 0.0400 |
+ 0.6600 |
+ 0.1516 |
+ 0.1818 |
+ 0.0000 |
+ 0.6666 |
+ 0.0842 |
+ 1.9554 |
+ 1.9554 |
+ 0.2127 |
+ 0.1060 |
+ 0.0968 |
+ 0.3749 |
+
+
+ 40 |
+ 0.4000 |
+ 0.6000 |
+ 0.3900 |
+ 0.0100 |
+ 0.0700 |
+ 0.5300 |
+ 0.2261 |
+ 0.3143 |
+ 0.0000 |
+ 0.4595 |
+ 0.1522 |
+ 5.4423 |
+ 5.4423 |
+ 0.4445 |
+ 0.2187 |
+ 0.0930 |
+ 0.4100 |
+
+
+ 41 |
+ 0.4000 |
+ 0.6000 |
+ 0.3500 |
+ 0.0500 |
+ 0.0500 |
+ 0.5500 |
+ 0.2613 |
+ 0.1893 |
+ 0.0000 |
+ 0.5494 |
+ 0.0697 |
+ 0.9233 |
+ 0.9233 |
+ 0.1652 |
+ 0.0824 |
+ 0.1250 |
+ 0.2660 |
+
+
+ 42 |
+ 0.4000 |
+ 0.6000 |
+ 0.3800 |
+ 0.0200 |
+ 0.0800 |
+ 0.5200 |
+ 0.2133 |
+ 0.2490 |
+ 0.0000 |
+ 0.5377 |
+ 0.1234 |
+ 2.6422 |
+ 2.6422 |
+ 0.3839 |
+ 0.1896 |
+ 0.1163 |
+ 0.3686 |
+
+
+ 43 |
+ 0.4000 |
+ 0.6000 |
+ 0.3900 |
+ 0.0100 |
+ 0.0500 |
+ 0.5500 |
+ 0.2247 |
+ 0.3492 |
+ 0.0000 |
+ 0.4261 |
+ 0.1696 |
+ 6.0405 |
+ 6.0405 |
+ 0.4388 |
+ 0.2159 |
+ 0.0714 |
+ 0.4373 |
+
+
+ 44 |
+ 0.4000 |
+ 0.6000 |
+ 0.3800 |
+ 0.0200 |
+ 0.0900 |
+ 0.5100 |
+ 0.2634 |
+ 0.2214 |
+ 0.0000 |
+ 0.5152 |
+ 0.1033 |
+ 2.3290 |
+ 2.3290 |
+ 0.3583 |
+ 0.1773 |
+ 0.1264 |
+ 0.2959 |
+
+
+ 45 |
+ 0.4000 |
+ 0.6000 |
+ 0.3800 |
+ 0.0200 |
+ 0.0600 |
+ 0.5400 |
+ 0.2125 |
+ 0.2861 |
+ 0.0000 |
+ 0.5014 |
+ 0.1331 |
+ 3.0184 |
+ 3.0184 |
+ 0.3653 |
+ 0.1806 |
+ 0.0952 |
+ 0.4024 |
+
+
+ 46 |
+ 0.4000 |
+ 0.6000 |
+ 0.3900 |
+ 0.0100 |
+ 0.0500 |
+ 0.5500 |
+ 0.3225 |
+ 0.1608 |
+ 0.0000 |
+ 0.5166 |
+ 0.0754 |
+ 2.7990 |
+ 2.7990 |
+ 0.2042 |
+ 0.1018 |
+ 0.0714 |
+ 0.1996 |
+
+
+ 47 |
+ 0.4000 |
+ 0.6000 |
+ 0.3600 |
+ 0.0400 |
+ 0.0500 |
+ 0.5500 |
+ 0.1931 |
+ 0.3664 |
+ 0.0000 |
+ 0.4405 |
+ 0.1632 |
+ 2.2041 |
+ 2.2041 |
+ 0.3819 |
+ 0.1887 |
+ 0.1111 |
+ 0.4868 |
+
+
+ 48 |
+ 0.4000 |
+ 0.6000 |
+ 0.3500 |
+ 0.0500 |
+ 0.0400 |
+ 0.5600 |
+ 0.2412 |
+ 0.1834 |
+ 0.0000 |
+ 0.5754 |
+ 0.0744 |
+ 0.9119 |
+ 0.9119 |
+ 0.1458 |
+ 0.0728 |
+ 0.1139 |
+ 0.2754 |
+
+
+ 49 |
+ 0.4000 |
+ 0.6000 |
+ 0.3800 |
+ 0.0200 |
+ 0.0800 |
+ 0.5200 |
+ 0.2901 |
+ 0.1835 |
+ 0.0000 |
+ 0.5264 |
+ 0.0849 |
+ 1.9318 |
+ 1.9318 |
+ 0.2863 |
+ 0.1422 |
+ 0.1163 |
+ 0.2403 |
+
+
+ 50 |
+ 0.5000 |
+ 0.5000 |
+ 0.4700 |
+ 0.0300 |
+ 0.0200 |
+ 0.4800 |
+ 0.2661 |
+ 0.2548 |
+ 0.0000 |
+ 0.4791 |
+ 0.1124 |
+ 1.9135 |
+ 1.9135 |
+ 0.2373 |
+ 0.1181 |
+ 0.0505 |
+ 0.3237 |
+
+
+ 51 |
+ 0.5000 |
+ 0.5000 |
+ 0.4600 |
+ 0.0400 |
+ 0.0700 |
+ 0.4300 |
+ 0.3066 |
+ 0.2872 |
+ 0.0000 |
+ 0.4062 |
+ 0.1236 |
+ 1.7029 |
+ 1.7029 |
+ 0.3295 |
+ 0.1633 |
+ 0.1068 |
+ 0.3190 |
+
+
+ 52 |
+ 0.5000 |
+ 0.5000 |
+ 0.4900 |
+ 0.0100 |
+ 0.0700 |
+ 0.4300 |
+ 0.4488 |
+ 0.0499 |
+ 0.0000 |
+ 0.5013 |
+ 0.0556 |
+ 0.9601 |
+ 0.9601 |
+ 0.1606 |
+ 0.0801 |
+ 0.0755 |
+ 0.0527 |
+
+
+ 53 |
+ 0.5000 |
+ 0.5000 |
+ 0.4700 |
+ 0.0300 |
+ 0.0100 |
+ 0.4900 |
+ 0.3117 |
+ 0.2813 |
+ 0.0000 |
+ 0.4070 |
+ 0.1257 |
+ 2.0871 |
+ 2.0871 |
+ 0.2263 |
+ 0.1127 |
+ 0.0408 |
+ 0.3109 |
+
+
+ 54 |
+ 0.5000 |
+ 0.5000 |
+ 0.4700 |
+ 0.0300 |
+ 0.0500 |
+ 0.4500 |
+ 0.3857 |
+ 0.1325 |
+ 0.0000 |
+ 0.4818 |
+ 0.0672 |
+ 1.0215 |
+ 1.0215 |
+ 0.1461 |
+ 0.0729 |
+ 0.0784 |
+ 0.1466 |
+
+
+ 55 |
+ 0.5000 |
+ 0.5000 |
+ 0.4500 |
+ 0.0500 |
+ 0.0900 |
+ 0.4100 |
+ 0.3611 |
+ 0.2164 |
+ 0.0000 |
+ 0.4224 |
+ 0.0894 |
+ 1.0498 |
+ 1.0498 |
+ 0.2897 |
+ 0.1438 |
+ 0.1346 |
+ 0.2306 |
+
+
+ 56 |
+ 0.5000 |
+ 0.5000 |
+ 0.4300 |
+ 0.0700 |
+ 0.0700 |
+ 0.4300 |
+ 0.2665 |
+ 0.3518 |
+ 0.0000 |
+ 0.3817 |
+ 0.1409 |
+ 1.2942 |
+ 1.2942 |
+ 0.3423 |
+ 0.1695 |
+ 0.1400 |
+ 0.3975 |
+
+
+ 57 |
+ 0.5000 |
+ 0.5000 |
+ 0.4600 |
+ 0.0400 |
+ 0.0300 |
+ 0.4700 |
+ 0.2969 |
+ 0.2003 |
+ 0.0000 |
+ 0.5028 |
+ 0.0966 |
+ 1.2099 |
+ 1.2099 |
+ 0.1690 |
+ 0.0843 |
+ 0.0707 |
+ 0.2523 |
+
+
+ 58 |
+ 0.5000 |
+ 0.5000 |
+ 0.4900 |
+ 0.0100 |
+ 0.0200 |
+ 0.4800 |
+ 0.4031 |
+ 0.0720 |
+ 0.0000 |
+ 0.5249 |
+ 0.0534 |
+ 1.3003 |
+ 1.3003 |
+ 0.0683 |
+ 0.0341 |
+ 0.0297 |
+ 0.0820 |
+
+
+ 59 |
+ 0.5000 |
+ 0.5000 |
+ 0.4900 |
+ 0.0100 |
+ 0.0600 |
+ 0.4400 |
+ 0.3352 |
+ 0.3022 |
+ 0.0000 |
+ 0.3626 |
+ 0.1461 |
+ 5.2225 |
+ 5.2225 |
+ 0.3921 |
+ 0.1936 |
+ 0.0667 |
+ 0.3107 |
+
+
+ 60 |
+ 0.6000 |
+ 0.4000 |
+ 0.5500 |
+ 0.0500 |
+ 0.0200 |
+ 0.3800 |
+ 0.3723 |
+ 0.2617 |
+ 0.0000 |
+ 0.3660 |
+ 0.1059 |
+ 1.2515 |
+ 1.2515 |
+ 0.1819 |
+ 0.0907 |
+ 0.0598 |
+ 0.2601 |
+
+
+ 61 |
+ 0.6000 |
+ 0.4000 |
+ 0.5500 |
+ 0.0500 |
+ 0.0600 |
+ 0.3400 |
+ 0.3944 |
+ 0.3478 |
+ 0.0000 |
+ 0.2577 |
+ 0.1489 |
+ 1.7143 |
+ 1.7143 |
+ 0.3410 |
+ 0.1689 |
+ 0.0909 |
+ 0.3060 |
+
+
+ 62 |
+ 0.6000 |
+ 0.4000 |
+ 0.5800 |
+ 0.0200 |
+ 0.0600 |
+ 0.3400 |
+ 0.4883 |
+ 0.1869 |
+ 0.0000 |
+ 0.3248 |
+ 0.0835 |
+ 1.9503 |
+ 1.9503 |
+ 0.2311 |
+ 0.1150 |
+ 0.0645 |
+ 0.1607 |
+
+
+ 63 |
+ 0.6000 |
+ 0.4000 |
+ 0.5900 |
+ 0.0100 |
+ 0.0300 |
+ 0.3700 |
+ 0.4691 |
+ 0.3098 |
+ 0.0000 |
+ 0.2211 |
+ 0.1499 |
+ 5.3611 |
+ 5.3611 |
+ 0.3474 |
+ 0.1719 |
+ 0.0328 |
+ 0.2482 |
+
+
+ 64 |
+ 0.6000 |
+ 0.4000 |
+ 0.5700 |
+ 0.0300 |
+ 0.0600 |
+ 0.3400 |
+ 0.4036 |
+ 0.3264 |
+ 0.0000 |
+ 0.2700 |
+ 0.1482 |
+ 2.4709 |
+ 2.4709 |
+ 0.3627 |
+ 0.1794 |
+ 0.0732 |
+ 0.2879 |
+
+
+ 65 |
+ 0.6000 |
+ 0.4000 |
+ 0.5700 |
+ 0.0300 |
+ 0.0100 |
+ 0.3900 |
+ 0.3251 |
+ 0.3488 |
+ 0.0000 |
+ 0.3262 |
+ 0.1594 |
+ 2.5904 |
+ 2.5904 |
+ 0.3243 |
+ 0.1607 |
+ 0.0339 |
+ 0.3491 |
+
+
+ 66 |
+ 0.6000 |
+ 0.4000 |
+ 0.5300 |
+ 0.0700 |
+ 0.0500 |
+ 0.3500 |
+ 0.2977 |
+ 0.4788 |
+ 0.0000 |
+ 0.2235 |
+ 0.2044 |
+ 1.7875 |
+ 1.7875 |
+ 0.4531 |
+ 0.2228 |
+ 0.1017 |
+ 0.4457 |
+
+
+ 67 |
+ 0.6000 |
+ 0.4000 |
+ 0.5600 |
+ 0.0400 |
+ 0.0600 |
+ 0.3400 |
+ 0.4491 |
+ 0.2367 |
+ 0.0000 |
+ 0.3142 |
+ 0.0983 |
+ 1.3912 |
+ 1.3912 |
+ 0.2413 |
+ 0.1201 |
+ 0.0820 |
+ 0.2086 |
+
+
+ 68 |
+ 0.6000 |
+ 0.4000 |
+ 0.5600 |
+ 0.0400 |
+ 0.0400 |
+ 0.3600 |
+ 0.4048 |
+ 0.3060 |
+ 0.0000 |
+ 0.2892 |
+ 0.1330 |
+ 1.8169 |
+ 1.8169 |
+ 0.2720 |
+ 0.1352 |
+ 0.0667 |
+ 0.2743 |
+
+
+ 69 |
+ 0.6000 |
+ 0.4000 |
+ 0.5900 |
+ 0.0100 |
+ 0.0300 |
+ 0.3700 |
+ 0.4462 |
+ 0.2388 |
+ 0.0000 |
+ 0.3150 |
+ 0.1144 |
+ 4.1244 |
+ 4.1244 |
+ 0.2503 |
+ 0.1245 |
+ 0.0328 |
+ 0.2111 |
+
+
+ 70 |
+ 0.7000 |
+ 0.3000 |
+ 0.6900 |
+ 0.0100 |
+ 0.0200 |
+ 0.2800 |
+ 0.6374 |
+ 0.0534 |
+ 0.0000 |
+ 0.3093 |
+ 0.0363 |
+ 0.9679 |
+ 0.9679 |
+ 0.0468 |
+ 0.0234 |
+ 0.0213 |
+ 0.0402 |
+
+
+ 71 |
+ 0.7000 |
+ 0.3000 |
+ 0.6600 |
+ 0.0400 |
+ 0.0100 |
+ 0.2900 |
+ 0.5646 |
+ 0.2108 |
+ 0.0000 |
+ 0.2246 |
+ 0.0854 |
+ 1.2071 |
+ 1.2071 |
+ 0.1229 |
+ 0.0614 |
+ 0.0365 |
+ 0.1573 |
+
+
+ 72 |
+ 0.7000 |
+ 0.3000 |
+ 0.6400 |
+ 0.0600 |
+ 0.0300 |
+ 0.2700 |
+ 0.5626 |
+ 0.1337 |
+ 0.0000 |
+ 0.3037 |
+ 0.0537 |
+ 0.5584 |
+ 0.5584 |
+ 0.0695 |
+ 0.0347 |
+ 0.0657 |
+ 0.1062 |
+
+
+ 73 |
+ 0.7000 |
+ 0.3000 |
+ 0.6400 |
+ 0.0600 |
+ 0.0100 |
+ 0.2900 |
+ 0.4890 |
+ 0.3280 |
+ 0.0000 |
+ 0.1831 |
+ 0.1340 |
+ 1.3465 |
+ 1.3465 |
+ 0.2152 |
+ 0.1072 |
+ 0.0519 |
+ 0.2512 |
+
+
+ 74 |
+ 0.7000 |
+ 0.3000 |
+ 0.6300 |
+ 0.0700 |
+ 0.0200 |
+ 0.2800 |
+ 0.4717 |
+ 0.2827 |
+ 0.0000 |
+ 0.2456 |
+ 0.1063 |
+ 1.0015 |
+ 1.0015 |
+ 0.1581 |
+ 0.0789 |
+ 0.0667 |
+ 0.2305 |
+
+
+ 75 |
+ 0.7000 |
+ 0.3000 |
+ 0.6800 |
+ 0.0200 |
+ 0.0300 |
+ 0.2700 |
+ 0.5914 |
+ 0.2123 |
+ 0.0000 |
+ 0.1963 |
+ 0.0961 |
+ 2.2366 |
+ 2.2366 |
+ 0.1947 |
+ 0.0970 |
+ 0.0355 |
+ 0.1522 |
+
+
+ 76 |
+ 0.7000 |
+ 0.3000 |
+ 0.6600 |
+ 0.0400 |
+ 0.0300 |
+ 0.2700 |
+ 0.6164 |
+ 0.1300 |
+ 0.0000 |
+ 0.2535 |
+ 0.0450 |
+ 0.7457 |
+ 0.7457 |
+ 0.0807 |
+ 0.0403 |
+ 0.0504 |
+ 0.0954 |
+
+
+ 77 |
+ 0.7000 |
+ 0.3000 |
+ 0.6600 |
+ 0.0400 |
+ 0.0100 |
+ 0.2900 |
+ 0.5485 |
+ 0.1940 |
+ 0.0000 |
+ 0.2575 |
+ 0.0770 |
+ 1.0917 |
+ 1.0917 |
+ 0.1061 |
+ 0.0530 |
+ 0.0365 |
+ 0.1503 |
+
+
+ 78 |
+ 0.7000 |
+ 0.3000 |
+ 0.6600 |
+ 0.0400 |
+ 0.0500 |
+ 0.2500 |
+ 0.5583 |
+ 0.2694 |
+ 0.0000 |
+ 0.1723 |
+ 0.1147 |
+ 1.6161 |
+ 1.6161 |
+ 0.2536 |
+ 0.1261 |
+ 0.0638 |
+ 0.1944 |
+
+
+ 79 |
+ 0.7000 |
+ 0.3000 |
+ 0.6800 |
+ 0.0200 |
+ 0.0300 |
+ 0.2700 |
+ 0.4831 |
+ 0.2938 |
+ 0.0000 |
+ 0.2231 |
+ 0.1369 |
+ 3.0667 |
+ 3.0667 |
+ 0.2896 |
+ 0.1438 |
+ 0.0355 |
+ 0.2332 |
+
+
+ 80 |
+ 0.8000 |
+ 0.2000 |
+ 0.7300 |
+ 0.0700 |
+ 0.0300 |
+ 0.1700 |
+ 0.5847 |
+ 0.3097 |
+ 0.0000 |
+ 0.1056 |
+ 0.1199 |
+ 1.1547 |
+ 1.1547 |
+ 0.2027 |
+ 0.1010 |
+ 0.0641 |
+ 0.2094 |
+
+
+ 81 |
+ 0.8000 |
+ 0.2000 |
+ 0.7300 |
+ 0.0700 |
+ 0.0300 |
+ 0.1700 |
+ 0.5118 |
+ 0.3332 |
+ 0.0000 |
+ 0.1550 |
+ 0.1316 |
+ 1.1873 |
+ 1.1873 |
+ 0.2297 |
+ 0.1144 |
+ 0.0641 |
+ 0.2456 |
+
+
+ 82 |
+ 0.8000 |
+ 0.2000 |
+ 0.7100 |
+ 0.0900 |
+ 0.0000 |
+ 0.2000 |
+ 0.5372 |
+ 0.2500 |
+ 0.0000 |
+ 0.2127 |
+ 0.0864 |
+ 0.4987 |
+ 0.4987 |
+ 0.0916 |
+ 0.0457 |
+ 0.0596 |
+ 0.1888 |
+
+
+ 83 |
+ 0.8000 |
+ 0.2000 |
+ 0.7500 |
+ 0.0500 |
+ 0.0700 |
+ 0.1300 |
+ 0.7568 |
+ 0.1519 |
+ 0.0000 |
+ 0.0913 |
+ 0.0544 |
+ 0.7703 |
+ 0.7703 |
+ 0.1843 |
+ 0.0919 |
+ 0.0741 |
+ 0.0912 |
+
+
+ 84 |
+ 0.8000 |
+ 0.2000 |
+ 0.7600 |
+ 0.0400 |
+ 0.0200 |
+ 0.1800 |
+ 0.7005 |
+ 0.2142 |
+ 0.0000 |
+ 0.0853 |
+ 0.0871 |
+ 1.3152 |
+ 1.3152 |
+ 0.1637 |
+ 0.0816 |
+ 0.0380 |
+ 0.1326 |
+
+
+ 85 |
+ 0.8000 |
+ 0.2000 |
+ 0.7500 |
+ 0.0500 |
+ 0.0100 |
+ 0.1900 |
+ 0.6569 |
+ 0.1493 |
+ 0.0000 |
+ 0.1938 |
+ 0.0516 |
+ 0.6539 |
+ 0.6539 |
+ 0.0554 |
+ 0.0277 |
+ 0.0385 |
+ 0.1021 |
+
+
+ 86 |
+ 0.8000 |
+ 0.2000 |
+ 0.7600 |
+ 0.0400 |
+ 0.0100 |
+ 0.1900 |
+ 0.6134 |
+ 0.2222 |
+ 0.0000 |
+ 0.1644 |
+ 0.0911 |
+ 1.2596 |
+ 1.2596 |
+ 0.1338 |
+ 0.0668 |
+ 0.0318 |
+ 0.1534 |
+
+
+ 87 |
+ 0.8000 |
+ 0.2000 |
+ 0.7500 |
+ 0.0500 |
+ 0.0100 |
+ 0.1900 |
+ 0.6629 |
+ 0.2320 |
+ 0.0000 |
+ 0.1051 |
+ 0.0910 |
+ 1.1313 |
+ 1.1313 |
+ 0.1401 |
+ 0.0699 |
+ 0.0385 |
+ 0.1489 |
+
+
+ 88 |
+ 0.8000 |
+ 0.2000 |
+ 0.7400 |
+ 0.0600 |
+ 0.0000 |
+ 0.2000 |
+ 0.5857 |
+ 0.3081 |
+ 0.0000 |
+ 0.1062 |
+ 0.1240 |
+ 1.1204 |
+ 1.1204 |
+ 0.1961 |
+ 0.0977 |
+ 0.0390 |
+ 0.2082 |
+
+
+ 89 |
+ 0.8000 |
+ 0.2000 |
+ 0.7400 |
+ 0.0600 |
+ 0.0200 |
+ 0.1800 |
+ 0.4581 |
+ 0.4695 |
+ 0.0000 |
+ 0.0724 |
+ 0.2048 |
+ 2.0152 |
+ 2.0152 |
+ 0.4265 |
+ 0.2101 |
+ 0.0513 |
+ 0.3389 |
+
+
+ 90 |
+ 0.9000 |
+ 0.1000 |
+ 0.8600 |
+ 0.0400 |
+ 0.0200 |
+ 0.0800 |
+ 0.8681 |
+ 0.1034 |
+ 0.0000 |
+ 0.0284 |
+ 0.0358 |
+ 0.7063 |
+ 0.7063 |
+ 0.0718 |
+ 0.0359 |
+ 0.0337 |
+ 0.0562 |
+
+
+ 91 |
+ 0.9000 |
+ 0.1000 |
+ 0.8200 |
+ 0.0800 |
+ 0.0200 |
+ 0.0800 |
+ 0.6954 |
+ 0.2312 |
+ 0.0002 |
+ 0.0732 |
+ 0.0756 |
+ 0.7002 |
+ 0.7002 |
+ 0.0945 |
+ 0.0472 |
+ 0.0575 |
+ 0.1426 |
+
+
+ 92 |
+ 0.9000 |
+ 0.1000 |
+ 0.8500 |
+ 0.0500 |
+ 0.0100 |
+ 0.0900 |
+ 0.7136 |
+ 0.2209 |
+ 0.0001 |
+ 0.0654 |
+ 0.0854 |
+ 1.0458 |
+ 1.0458 |
+ 0.1155 |
+ 0.0577 |
+ 0.0341 |
+ 0.1341 |
+
+
+ 93 |
+ 0.9000 |
+ 0.1000 |
+ 0.8300 |
+ 0.0700 |
+ 0.0100 |
+ 0.0900 |
+ 0.6537 |
+ 0.2912 |
+ 0.0000 |
+ 0.0552 |
+ 0.1106 |
+ 1.0483 |
+ 1.0483 |
+ 0.1549 |
+ 0.0773 |
+ 0.0460 |
+ 0.1822 |
+
+
+ 94 |
+ 0.9000 |
+ 0.1000 |
+ 0.8300 |
+ 0.0700 |
+ 0.0000 |
+ 0.1000 |
+ 0.8285 |
+ 0.0810 |
+ 0.0000 |
+ 0.0905 |
+ 0.0055 |
+ 0.0602 |
+ 0.0602 |
+ 0.0012 |
+ 0.0006 |
+ 0.0405 |
+ 0.0466 |
+
+
+ 95 |
+ 0.9000 |
+ 0.1000 |
+ 0.8500 |
+ 0.0500 |
+ 0.0000 |
+ 0.1000 |
+ 0.7245 |
+ 0.2244 |
+ 0.0000 |
+ 0.0511 |
+ 0.0872 |
+ 0.9459 |
+ 0.9459 |
+ 0.1230 |
+ 0.0614 |
+ 0.0286 |
+ 0.1341 |
+
+
+ 96 |
+ 0.9000 |
+ 0.1000 |
+ 0.8600 |
+ 0.0400 |
+ 0.0100 |
+ 0.0900 |
+ 0.6797 |
+ 0.3202 |
+ 0.0000 |
+ 0.0001 |
+ 0.1401 |
+ 2.0120 |
+ 2.0120 |
+ 0.4073 |
+ 0.2009 |
+ 0.0282 |
+ 0.1907 |
+
+
+ 97 |
+ 0.9000 |
+ 0.1000 |
+ 0.8500 |
+ 0.0500 |
+ 0.0000 |
+ 0.1000 |
+ 0.7974 |
+ 0.1166 |
+ 0.0000 |
+ 0.0859 |
+ 0.0333 |
+ 0.3517 |
+ 0.3517 |
+ 0.0257 |
+ 0.0128 |
+ 0.0286 |
+ 0.0681 |
+
+
+ 98 |
+ 0.9000 |
+ 0.1000 |
+ 0.8600 |
+ 0.0400 |
+ 0.0100 |
+ 0.0900 |
+ 0.7810 |
+ 0.2040 |
+ 0.0001 |
+ 0.0149 |
+ 0.0820 |
+ 1.2967 |
+ 1.2967 |
+ 0.1783 |
+ 0.0889 |
+ 0.0282 |
+ 0.1155 |
+
+
+ 99 |
+ 0.9000 |
+ 0.1000 |
+ 0.8400 |
+ 0.0600 |
+ 0.0000 |
+ 0.1000 |
+ 0.7152 |
+ 0.1607 |
+ 0.0000 |
+ 0.1242 |
+ 0.0624 |
+ 0.4817 |
+ 0.4817 |
+ 0.0525 |
+ 0.0263 |
+ 0.0345 |
+ 0.1010 |
+
+
+ 100 |
+ 1.0000 |
+ 0.0000 |
+ 0.9300 |
+ 0.0700 |
+ 0.0000 |
+ 0.0000 |
+ 0.9565 |
+ 0.0229 |
+ 0.0000 |
+ 0.0206 |
+ 0.0236 |
+ 1.1924 |
+ 1.1924 |
+ 0.0399 |
+ 0.0199 |
+ 0.0363 |
+ 0.0118 |
+
+
+ 101 |
+ 1.0000 |
+ 0.0000 |
+ 0.9400 |
+ 0.0600 |
+ 0.0000 |
+ 0.0000 |
+ 0.8954 |
+ 0.0736 |
+ 0.0000 |
+ 0.0310 |
+ 0.0223 |
+ 1.6128 |
+ 1.6128 |
+ 0.0234 |
+ 0.0117 |
+ 0.0309 |
+ 0.0395 |
+
+
+ 102 |
+ 1.0000 |
+ 0.0000 |
+ 0.9800 |
+ 0.0200 |
+ 0.0000 |
+ 0.0000 |
+ 0.9486 |
+ 0.0514 |
+ 0.0000 |
+ 0.0000 |
+ 0.0157 |
+ 0.3220 |
+ 0.3220 |
+ 0.0115 |
+ 0.0058 |
+ 0.0101 |
+ 0.0264 |
+
+
+ 103 |
+ 1.0000 |
+ 0.0000 |
+ 0.9400 |
+ 0.0600 |
+ 0.0000 |
+ 0.0000 |
+ 0.9317 |
+ 0.0628 |
+ 0.0000 |
+ 0.0054 |
+ 0.0041 |
+ 0.2842 |
+ 0.2842 |
+ 0.0018 |
+ 0.0009 |
+ 0.0309 |
+ 0.0326 |
+
+
+ 104 |
+ 1.0000 |
+ 0.0000 |
+ 0.9700 |
+ 0.0300 |
+ 0.0000 |
+ 0.0000 |
+ 0.7735 |
+ 0.2265 |
+ 0.0000 |
+ 0.0000 |
+ 0.0982 |
+ 1.4540 |
+ 1.4540 |
+ 0.1533 |
+ 0.0765 |
+ 0.0152 |
+ 0.1277 |
+
+
+ 105 |
+ 1.0000 |
+ 0.0000 |
+ 0.9300 |
+ 0.0700 |
+ 0.0000 |
+ 0.0000 |
+ 0.8494 |
+ 0.1275 |
+ 0.0000 |
+ 0.0231 |
+ 0.0403 |
+ 1.3676 |
+ 1.3676 |
+ 0.0330 |
+ 0.0165 |
+ 0.0363 |
+ 0.0698 |
+
+
+ 106 |
+ 1.0000 |
+ 0.0000 |
+ 0.9500 |
+ 0.0500 |
+ 0.0000 |
+ 0.0000 |
+ 0.9735 |
+ 0.0148 |
+ 0.0001 |
+ 0.0116 |
+ 0.0176 |
+ 0.7536 |
+ 0.7536 |
+ 0.0269 |
+ 0.0135 |
+ 0.0256 |
+ 0.0076 |
+
+
+ 107 |
+ 1.0000 |
+ 0.0000 |
+ 0.9400 |
+ 0.0600 |
+ 0.0000 |
+ 0.0000 |
+ 0.9179 |
+ 0.0492 |
+ 0.0000 |
+ 0.0329 |
+ 0.0165 |
+ 1.6944 |
+ 1.6944 |
+ 0.0241 |
+ 0.0120 |
+ 0.0309 |
+ 0.0261 |
+
+
+ 108 |
+ 1.0000 |
+ 0.0000 |
+ 0.9600 |
+ 0.0400 |
+ 0.0000 |
+ 0.0000 |
+ 0.9019 |
+ 0.0981 |
+ 0.0001 |
+ 0.0000 |
+ 0.0291 |
+ 0.3405 |
+ 0.3405 |
+ 0.0226 |
+ 0.0113 |
+ 0.0204 |
+ 0.0516 |
+
+
+ 109 |
+ 1.0000 |
+ 0.0000 |
+ 0.9400 |
+ 0.0600 |
+ 0.0000 |
+ 0.0000 |
+ 0.9443 |
+ 0.0236 |
+ 0.0003 |
+ 0.0317 |
+ 0.0182 |
+ 1.7433 |
+ 1.7433 |
+ 0.0387 |
+ 0.0194 |
+ 0.0309 |
+ 0.0125 |
+
+
+
+**************************************************
diff --git a/quacc/data.py b/quacc/data.py
index f8fad6d..715802b 100644
--- a/quacc/data.py
+++ b/quacc/data.py
@@ -1,7 +1,9 @@
+from typing import List, Optional
+
import numpy as np
+import quapy as qp
import scipy.sparse as sp
from quapy.data import LabelledCollection
-from typing import List, Optional
class ExtendedCollection(LabelledCollection):
@@ -12,3 +14,17 @@ class ExtendedCollection(LabelledCollection):
classes: Optional[List] = None,
):
super().__init__(instances, labels, classes=classes)
+
+def get_dataset(name):
+ datasets = {
+ "spambase": lambda: qp.datasets.fetch_UCIDataset(
+ "spambase", verbose=False
+ ).train_test,
+ "hp": lambda: qp.datasets.fetch_reviews("hp", tfidf=True).train_test,
+ "imdb": lambda: qp.datasets.fetch_reviews("imdb", tfidf=True).train_test,
+ }
+
+ try:
+ return datasets[name]()
+ except KeyError:
+ raise KeyError(f"{name} is not available as a dataset")
diff --git a/quacc/evaluation.py b/quacc/evaluation.py
index d12d098..029d476 100644
--- a/quacc/evaluation.py
+++ b/quacc/evaluation.py
@@ -1,16 +1,20 @@
+import itertools
from quapy.protocol import (
OnLabelledCollectionProtocol,
AbstractStochasticSeededProtocol,
)
-import quapy as qp
from typing import Iterable, Callable, Union
from .estimator import AccuracyEstimator
import pandas as pd
+import numpy as np
import quacc.error as error
-def estimate(estimator: AccuracyEstimator, protocol: AbstractStochasticSeededProtocol):
+def estimate(
+ estimator: AccuracyEstimator,
+ protocol: AbstractStochasticSeededProtocol,
+):
# ensure that the protocol returns a LabelledCollection for each iteration
protocol.collator = OnLabelledCollectionProtocol.get_collator("labelled_collection")
@@ -18,6 +22,9 @@ def estimate(estimator: AccuracyEstimator, protocol: AbstractStochasticSeededPro
for sample in protocol():
e_sample = estimator.extend(sample)
estim_prev = estimator.estimate(e_sample.X, ext=True)
+ # base_prevs.append(_prettyfloat(accuracy, sample.prevalence()))
+ # true_prevs.append(_prettyfloat(accuracy, e_sample.prevalence()))
+ # estim_prevs.append(_prettyfloat(accuracy, estim_prev))
base_prevs.append(sample.prevalence())
true_prevs.append(e_sample.prevalence())
estim_prevs.append(estim_prev)
@@ -25,6 +32,38 @@ def estimate(estimator: AccuracyEstimator, protocol: AbstractStochasticSeededPro
return base_prevs, true_prevs, estim_prevs
+_bprev_col_0 = ["base"]
+_bprev_col_1 = ["0", "1"]
+_prev_col_0 = ["true", "estim"]
+_prev_col_1 = ["T0", "F1", "F0", "T1"]
+_err_col_0 = ["errors"]
+
+
+def _report_columns(err_names):
+ bprev_cols = list(itertools.product(_bprev_col_0, _bprev_col_1))
+ prev_cols = list(itertools.product(_prev_col_0, _prev_col_1))
+
+ err_1 = err_names
+ err_cols = list(itertools.product(_err_col_0, err_1))
+
+ cols = bprev_cols + prev_cols + err_cols
+
+ return pd.MultiIndex.from_tuples(cols)
+
+
+def _dict_prev(base_prev, true_prev, estim_prev):
+ prev_cols = list(itertools.product(_bprev_col_0, _bprev_col_1)) + list(
+ itertools.product(_prev_col_0, _prev_col_1)
+ )
+
+ return {
+ k: v
+ for (k, v) in zip(
+ prev_cols, np.concatenate((base_prev, true_prev, estim_prev), axis=0)
+ )
+ }
+
+
def evaluation_report(
estimator: AccuracyEstimator,
protocol: AbstractStochasticSeededProtocol,
@@ -40,26 +79,25 @@ def evaluation_report(
]
assert all(hasattr(e, "__call__") for e in error_funcs), "invalid error function"
error_names = [e.__name__ for e in error_funcs]
+ error_cols = error_names.copy()
+ if "f1e" in error_cols:
+ error_cols.remove("f1e")
+ error_cols.extend(["f1e_true", "f1e_estim"])
+
+ # df_cols = ["base_prev", "true_prev", "estim_prev"] + error_names
+ df_cols = _report_columns(error_cols)
- df_cols = ["base_prev", "true_prev", "estim_prev"] + error_names
- if "f1e" in df_cols:
- df_cols.remove("f1e")
- df_cols.extend(["f1e_true", "f1e_estim"])
lst = []
for base_prev, true_prev, estim_prev in zip(base_prevs, true_prevs, estim_prevs):
- series = {
- "base_prev": base_prev,
- "true_prev": true_prev,
- "estim_prev": estim_prev,
- }
+ series = _dict_prev(base_prev, true_prev, estim_prev)
for error_name, error_metric in zip(error_names, error_funcs):
if error_name == "f1e":
- series["f1e_true"] = error_metric(true_prev)
- series["f1e_estim"] = error_metric(estim_prev)
+ series[("errors", "f1e_true")] = error_metric(true_prev)
+ series[("errors", "f1e_estim")] = error_metric(estim_prev)
continue
score = error_metric(true_prev, estim_prev)
- series[error_name] = score
+ series[("errors", error_name)] = score
lst.append(series)
diff --git a/quacc/main.py b/quacc/main.py
index 51f4b04..14fa7d0 100644
--- a/quacc/main.py
+++ b/quacc/main.py
@@ -1,158 +1,17 @@
-import numpy as np
+import pandas as pd
import quapy as qp
-import scipy.sparse as sp
-from quapy.data import LabelledCollection
from quapy.method.aggregative import SLD
-from quapy.protocol import APP, AbstractStochasticSeededProtocol
+from quapy.protocol import APP
from sklearn.linear_model import LogisticRegression
-from sklearn.model_selection import cross_val_predict
import quacc.evaluation as eval
from quacc.estimator import AccuracyEstimator
-qp.environ['SAMPLE_SIZE'] = 100
+from .data import get_dataset
+qp.environ["SAMPLE_SIZE"] = 100
-# Extended classes
-#
-# 0 ~ True 0
-# 1 ~ False 1
-# 2 ~ False 0
-# 3 ~ True 1
-# _____________________
-# | | |
-# | True 0 | False 1 |
-# |__________|__________|
-# | | |
-# | False 0 | True 1 |
-# |__________|__________|
-#
-def get_ex_class(classes, true_class, pred_class):
- return true_class * classes + pred_class
-
-
-def extend_collection(coll, pred_prob):
- n_classes = coll.n_classes
-
- # n_X = [ X | predicted probs. ]
- if isinstance(coll.X, sp.csr_matrix):
- pred_prob_csr = sp.csr_matrix(pred_prob)
- n_x = sp.hstack([coll.X, pred_prob_csr])
- elif isinstance(coll.X, np.ndarray):
- n_x = np.concatenate((coll.X, pred_prob), axis=1)
- else:
- raise ValueError("Unsupported matrix format")
-
- # n_y = (exptected y, predicted y)
- n_y = []
- for i, true_class in enumerate(coll.y):
- pred_class = pred_prob[i].argmax(axis=0)
- n_y.append(get_ex_class(n_classes, true_class, pred_class))
-
- return LabelledCollection(n_x, np.asarray(n_y), [*range(0, n_classes * n_classes)])
-
-
-def qf1e_binary(prev):
- recall = prev[0] / (prev[0] + prev[1])
- precision = prev[0] / (prev[0] + prev[2])
-
- return 1 - 2 * (precision * recall) / (precision + recall)
-
-
-def compute_errors(true_prev, estim_prev, n_instances):
- errors = {}
- _eps = 1 / (2 * n_instances)
- errors = {
- "mae": qp.error.mae(true_prev, estim_prev),
- "rae": qp.error.rae(true_prev, estim_prev, eps=_eps),
- "mrae": qp.error.mrae(true_prev, estim_prev, eps=_eps),
- "kld": qp.error.kld(true_prev, estim_prev, eps=_eps),
- "nkld": qp.error.nkld(true_prev, estim_prev, eps=_eps),
- "true_f1e": qf1e_binary(true_prev),
- "estim_f1e": qf1e_binary(estim_prev),
- }
-
- return errors
-
-
-def extend_and_quantify(
- model,
- q_model,
- train,
- test: LabelledCollection | AbstractStochasticSeededProtocol,
-):
- model.fit(*train.Xy)
-
- pred_prob_train = cross_val_predict(model, *train.Xy, method="predict_proba")
- _train = extend_collection(train, pred_prob_train)
-
- q_model.fit(_train)
-
- def quantify_extended(test):
- pred_prob_test = model.predict_proba(test.X)
- _test = extend_collection(test, pred_prob_test)
- _estim_prev = q_model.quantify(_test.instances)
- # check that _estim_prev has all the classes and eventually fill the missing
- # ones with 0
- for _cls in _test.classes_:
- if _cls not in q_model.classes_:
- _estim_prev = np.insert(_estim_prev, _cls, [0.0], axis=0)
- print(_estim_prev)
- return _test.prevalence(), _estim_prev
-
- if isinstance(test, LabelledCollection):
- _true_prev, _estim_prev = quantify_extended(test)
- _errors = compute_errors(_true_prev, _estim_prev, test.X.shape[0])
- return ([test.prevalence()], [_true_prev], [_estim_prev], [_errors])
-
- elif isinstance(test, AbstractStochasticSeededProtocol):
- orig_prevs, true_prevs, estim_prevs, errors = [], [], [], []
- for index in test.samples_parameters():
- sample = test.sample(index)
- _true_prev, _estim_prev = quantify_extended(sample)
-
- orig_prevs.append(sample.prevalence())
- true_prevs.append(_true_prev)
- estim_prevs.append(_estim_prev)
- errors.append(compute_errors(_true_prev, _estim_prev, sample.X.shape[0]))
-
- return orig_prevs, true_prevs, estim_prevs, errors
-
-
-def get_dataset(name):
- datasets = {
- "spambase": lambda: qp.datasets.fetch_UCIDataset(
- "spambase", verbose=False
- ).train_test,
- "hp": lambda: qp.datasets.fetch_reviews("hp", tfidf=True).train_test,
- "imdb": lambda: qp.datasets.fetch_reviews("imdb", tfidf=True).train_test,
- }
-
- try:
- return datasets[name]()
- except KeyError:
- raise KeyError(f"{name} is not available as a dataset")
-
-
-def test_1(dataset_name):
- train, test = get_dataset(dataset_name)
-
- orig_prevs, true_prevs, estim_prevs, errors = extend_and_quantify(
- LogisticRegression(),
- SLD(LogisticRegression()),
- train,
- APP(test, n_prevalences=11, repeats=1),
- )
-
- for orig_prev, true_prev, estim_prev, _errors in zip(
- orig_prevs, true_prevs, estim_prevs, errors
- ):
- print(f"original prevalence:\t{orig_prev}")
- print(f"true prevalence:\t{true_prev}")
- print(f"estimated prevalence:\t{estim_prev}")
- for name, err in _errors.items():
- print(f"{name}={err:.3f}")
- print()
+pd.set_option("display.float_format", "{:.4f}".format)
def test_2(dataset_name):
@@ -161,9 +20,8 @@ def test_2(dataset_name):
model.fit(*train.Xy)
estimator = AccuracyEstimator(model, SLD(LogisticRegression()))
estimator.fit(train)
- df = eval.evaluation_report(
- estimator, APP(test, n_prevalences=11, repeats=1)
- )
+ df = eval.evaluation_report(estimator, APP(test, n_prevalences=11, repeats=100))
+ # print(df.to_string())
print(df.to_string())
diff --git a/quacc/test_1.py b/quacc/test_1.py
new file mode 100644
index 0000000..00ea263
--- /dev/null
+++ b/quacc/test_1.py
@@ -0,0 +1,138 @@
+import numpy as np
+import scipy as sp
+import quapy as qp
+from quapy.data import LabelledCollection
+from quapy.method.aggregative import SLD
+from quapy.protocol import APP, AbstractStochasticSeededProtocol
+from sklearn.linear_model import LogisticRegression
+from sklearn.model_selection import cross_val_predict
+
+from .data import get_dataset
+
+# Extended classes
+#
+# 0 ~ True 0
+# 1 ~ False 1
+# 2 ~ False 0
+# 3 ~ True 1
+# _____________________
+# | | |
+# | True 0 | False 1 |
+# |__________|__________|
+# | | |
+# | False 0 | True 1 |
+# |__________|__________|
+#
+def get_ex_class(classes, true_class, pred_class):
+ return true_class * classes + pred_class
+
+
+def extend_collection(coll, pred_prob):
+ n_classes = coll.n_classes
+
+ # n_X = [ X | predicted probs. ]
+ if isinstance(coll.X, sp.csr_matrix):
+ pred_prob_csr = sp.csr_matrix(pred_prob)
+ n_x = sp.hstack([coll.X, pred_prob_csr])
+ elif isinstance(coll.X, np.ndarray):
+ n_x = np.concatenate((coll.X, pred_prob), axis=1)
+ else:
+ raise ValueError("Unsupported matrix format")
+
+ # n_y = (exptected y, predicted y)
+ n_y = []
+ for i, true_class in enumerate(coll.y):
+ pred_class = pred_prob[i].argmax(axis=0)
+ n_y.append(get_ex_class(n_classes, true_class, pred_class))
+
+ return LabelledCollection(n_x, np.asarray(n_y), [*range(0, n_classes * n_classes)])
+
+
+def qf1e_binary(prev):
+ recall = prev[0] / (prev[0] + prev[1])
+ precision = prev[0] / (prev[0] + prev[2])
+
+ return 1 - 2 * (precision * recall) / (precision + recall)
+
+
+def compute_errors(true_prev, estim_prev, n_instances):
+ errors = {}
+ _eps = 1 / (2 * n_instances)
+ errors = {
+ "mae": qp.error.mae(true_prev, estim_prev),
+ "rae": qp.error.rae(true_prev, estim_prev, eps=_eps),
+ "mrae": qp.error.mrae(true_prev, estim_prev, eps=_eps),
+ "kld": qp.error.kld(true_prev, estim_prev, eps=_eps),
+ "nkld": qp.error.nkld(true_prev, estim_prev, eps=_eps),
+ "true_f1e": qf1e_binary(true_prev),
+ "estim_f1e": qf1e_binary(estim_prev),
+ }
+
+ return errors
+
+
+def extend_and_quantify(
+ model,
+ q_model,
+ train,
+ test: LabelledCollection | AbstractStochasticSeededProtocol,
+):
+ model.fit(*train.Xy)
+
+ pred_prob_train = cross_val_predict(model, *train.Xy, method="predict_proba")
+ _train = extend_collection(train, pred_prob_train)
+
+ q_model.fit(_train)
+
+ def quantify_extended(test):
+ pred_prob_test = model.predict_proba(test.X)
+ _test = extend_collection(test, pred_prob_test)
+ _estim_prev = q_model.quantify(_test.instances)
+ # check that _estim_prev has all the classes and eventually fill the missing
+ # ones with 0
+ for _cls in _test.classes_:
+ if _cls not in q_model.classes_:
+ _estim_prev = np.insert(_estim_prev, _cls, [0.0], axis=0)
+ print(_estim_prev)
+ return _test.prevalence(), _estim_prev
+
+ if isinstance(test, LabelledCollection):
+ _true_prev, _estim_prev = quantify_extended(test)
+ _errors = compute_errors(_true_prev, _estim_prev, test.X.shape[0])
+ return ([test.prevalence()], [_true_prev], [_estim_prev], [_errors])
+
+ elif isinstance(test, AbstractStochasticSeededProtocol):
+ orig_prevs, true_prevs, estim_prevs, errors = [], [], [], []
+ for index in test.samples_parameters():
+ sample = test.sample(index)
+ _true_prev, _estim_prev = quantify_extended(sample)
+
+ orig_prevs.append(sample.prevalence())
+ true_prevs.append(_true_prev)
+ estim_prevs.append(_estim_prev)
+ errors.append(compute_errors(_true_prev, _estim_prev, sample.X.shape[0]))
+
+ return orig_prevs, true_prevs, estim_prevs, errors
+
+
+
+
+def test_1(dataset_name):
+ train, test = get_dataset(dataset_name)
+
+ orig_prevs, true_prevs, estim_prevs, errors = extend_and_quantify(
+ LogisticRegression(),
+ SLD(LogisticRegression()),
+ train,
+ APP(test, n_prevalences=11, repeats=1),
+ )
+
+ for orig_prev, true_prev, estim_prev, _errors in zip(
+ orig_prevs, true_prevs, estim_prevs, errors
+ ):
+ print(f"original prevalence:\t{orig_prev}")
+ print(f"true prevalence:\t{true_prev}")
+ print(f"estimated prevalence:\t{estim_prev}")
+ for name, err in _errors.items():
+ print(f"{name}={err:.3f}")
+ print()