diff --git a/__pycache__/run.cpython-38.pyc b/__pycache__/run.cpython-38.pyc index ce37f73..1be640d 100644 Binary files a/__pycache__/run.cpython-38.pyc and b/__pycache__/run.cpython-38.pyc differ diff --git a/cachedir/joblib/run/randomSearch/06d95882b621744b1f98a9e4a74c2c74/metadata.json b/cachedir/joblib/run/randomSearch/06d95882b621744b1f98a9e4a74c2c74/metadata.json new file mode 100644 index 0000000..cae42e4 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/06d95882b621744b1f98a9e4a74c2c74/metadata.json @@ -0,0 +1 @@ +{"duration": 19.4533109664917, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "KNeighborsClassifier(algorithm='kd_tree', metric='manhattan', n_neighbors=93)", "params": "{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'metric': ['chebyshev', 'manhattan', 'euclidean', 'minkowski'], 'algorithm': ['brute', 'kd_tree', 'ball_tree'], 'weights': ['uniform', 'distance']}", "eachAlgor": "'KNN'", "AlgorithmsIDsEnd": "0", "crossValidation": "10", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/1c55402796edef8a6b5bac46d0b25c13/output.pkl b/cachedir/joblib/run/randomSearch/1c55402796edef8a6b5bac46d0b25c13/output.pkl new file mode 100644 index 0000000..c0c29f8 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/1c55402796edef8a6b5bac46d0b25c13/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/1f00a5570321dc7e1f836094260ca780/output.pkl b/cachedir/joblib/run/randomSearch/1f00a5570321dc7e1f836094260ca780/output.pkl new file mode 100644 index 0000000..eb8295d Binary files /dev/null and b/cachedir/joblib/run/randomSearch/1f00a5570321dc7e1f836094260ca780/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/21741d7f2b7f4f9c7fb92b7383a23242/output.pkl b/cachedir/joblib/run/randomSearch/21741d7f2b7f4f9c7fb92b7383a23242/output.pkl new file mode 100644 index 0000000..dbedc06 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/21741d7f2b7f4f9c7fb92b7383a23242/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/2bb3a062a7dd745b6a072bf690932512/output.pkl b/cachedir/joblib/run/randomSearch/2bb3a062a7dd745b6a072bf690932512/output.pkl new file mode 100644 index 0000000..ea5d710 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/2bb3a062a7dd745b6a072bf690932512/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/33930d7096f07eb6b51da58caad51ed7/metadata.json b/cachedir/joblib/run/randomSearch/33930d7096f07eb6b51da58caad51ed7/metadata.json new file mode 100644 index 0000000..1faa278 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/33930d7096f07eb6b51da58caad51ed7/metadata.json @@ -0,0 +1 @@ +{"duration": 221.1900839805603, "input_args": {"XData": " LOC SM6_B(m) nN nO SpMax_A nX F04[C-N] HyWi_B(m) SdO F03[C-N] nCp SM6_L ... F03[C-O] SdssC nCIR F01[N-N] NssssC Psi_i_1d nN-N SpMax_B(m) B01[C-Br] F02[C-N] N-073 nCRX3\n0 1.185 9.085 0 0 2.263 3 0 3.642 0.000 0 1 9.902 ... 0 0.000 1 0 0 0.014 0 4.054 0 0 0 0\n1 0.000 8.179 2 0 2.194 0 6 3.526 0.000 6 0 10.054 ... 0 0.000 2 0 0 0.000 0 3.489 0 4 0 0\n2 0.762 8.297 0 3 2.424 0 0 3.339 21.884 0 0 10.226 ... 8 -0.686 3 0 0 0.004 0 3.693 0 0 0 0\n3 1.747 9.673 0 2 2.690 23 0 4.645 9.855 0 1 12.353 ... 2 -4.617 0 0 11 0.000 0 3.993 0 0 0 1\n4 1.824 9.825 0 2 2.700 27 0 4.795 9.894 0 1 12.519 ... 2 -4.724 0 0 13 0.000 0 4.005 0 0 0 1\n.. ... ... .. .. ... .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n832 1.125 7.878 0 1 2.136 0 0 2.990 0.000 0 0 9.311 ... 2 0.000 1 0 0 0.045 0 3.573 0 0 0 0\n833 1.187 8.046 1 1 2.222 0 2 3.105 0.000 2 0 9.668 ... 2 0.000 1 0 0 -0.025 0 3.666 0 2 0 0\n834 0.625 8.901 0 2 2.499 0 0 3.745 24.203 0 0 10.681 ... 8 -0.128 6 0 0 0.000 0 3.942 0 0 0 0\n835 3.866 8.778 0 6 2.361 0 0 4.201 11.747 0 1 10.735 ... 9 -0.347 1 0 0 0.000 0 3.497 0 0 0 0\n836 3.706 8.680 0 6 2.361 0 0 4.127 11.724 0 1 10.694 ... 9 -0.338 1 0 0 0.000 0 3.497 0 0 0 0\n\n[837 rows x 41 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "MLPClassifier(activation='identity', alpha=1e-05, hidden_layer_sizes=(103, 2),\n max_iter=100, random_state=42, solver='sgd',\n tol=0.00041000000000000005)", "params": "{'hidden_layer_sizes': [(60, 3), (61, 1), (62, 1), (63, 3), (64, 2), (65, 1), (66, 1), (67, 1), (68, 3), (69, 1), (70, 3), (71, 3), (72, 3), (73, 1), (74, 3), (75, 2), (76, 1), (77, 1), (78, 1), (79, 1), (80, 1), (81, 3), (82, 3), (83, 1), (84, 3), (85, 1), (86, 3), (87, 3), (88, 3), (89, 3), (90, 2), (91, 1), (92, 2), (93, 3), (94, 2), (95, 1), (96, 1), (97, 3), (98, 2), (99, 2), (100, 2), (101, 1), (102, 1), (103, 2), (104, 1), (105, 1), (106, 2), (107, 1), (108, 2), (109, 2), (110, 3), (111, 2), (112, 1), (113, 3), (114, 2), (115, 3), (116, 1), (117, 2), (118, 1), (119, 3)], 'alpha': [1e-05, 0.00021, 0.00041000000000000005, 0.0006100000000000001, 0.0008100000000000001], 'tol': [1e-05, 0.00041000000000000005, 0.0008100000000000001], 'max_iter': [100], 'activation': ['relu', 'identity', 'logistic', 'tanh'], 'solver': ['adam', 'sgd']}", "eachAlgor": "'MLP'", "AlgorithmsIDsEnd": "200", "crossValidation": "5", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/35a0689afd921e6a802735bdd14f8d40/output.pkl b/cachedir/joblib/run/randomSearch/35a0689afd921e6a802735bdd14f8d40/output.pkl new file mode 100644 index 0000000..151290b Binary files /dev/null and b/cachedir/joblib/run/randomSearch/35a0689afd921e6a802735bdd14f8d40/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/3801a69d6c6bdf2625bd265ebabc099a/output.pkl b/cachedir/joblib/run/randomSearch/3801a69d6c6bdf2625bd265ebabc099a/output.pkl new file mode 100644 index 0000000..9ee3414 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/3801a69d6c6bdf2625bd265ebabc099a/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/38aeb5fa81fe0b415ceada3a9628cda6/metadata.json b/cachedir/joblib/run/randomSearch/38aeb5fa81fe0b415ceada3a9628cda6/metadata.json new file mode 100644 index 0000000..351fce0 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/38aeb5fa81fe0b415ceada3a9628cda6/metadata.json @@ -0,0 +1 @@ +{"duration": 65.25900888442993, "input_args": {"XData": " LOC SM6_B(m) nN nO SpMax_A nX F04[C-N] HyWi_B(m) SdO F03[C-N] nCp SM6_L ... F03[C-O] SdssC nCIR F01[N-N] NssssC Psi_i_1d nN-N SpMax_B(m) B01[C-Br] F02[C-N] N-073 nCRX3\n0 1.185 9.085 0 0 2.263 3 0 3.642 0.000 0 1 9.902 ... 0 0.000 1 0 0 0.014 0 4.054 0 0 0 0\n1 0.000 8.179 2 0 2.194 0 6 3.526 0.000 6 0 10.054 ... 0 0.000 2 0 0 0.000 0 3.489 0 4 0 0\n2 0.762 8.297 0 3 2.424 0 0 3.339 21.884 0 0 10.226 ... 8 -0.686 3 0 0 0.004 0 3.693 0 0 0 0\n3 1.747 9.673 0 2 2.690 23 0 4.645 9.855 0 1 12.353 ... 2 -4.617 0 0 11 0.000 0 3.993 0 0 0 1\n4 1.824 9.825 0 2 2.700 27 0 4.795 9.894 0 1 12.519 ... 2 -4.724 0 0 13 0.000 0 4.005 0 0 0 1\n.. ... ... .. .. ... .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n832 1.125 7.878 0 1 2.136 0 0 2.990 0.000 0 0 9.311 ... 2 0.000 1 0 0 0.045 0 3.573 0 0 0 0\n833 1.187 8.046 1 1 2.222 0 2 3.105 0.000 2 0 9.668 ... 2 0.000 1 0 0 -0.025 0 3.666 0 2 0 0\n834 0.625 8.901 0 2 2.499 0 0 3.745 24.203 0 0 10.681 ... 8 -0.128 6 0 0 0.000 0 3.942 0 0 0 0\n835 3.866 8.778 0 6 2.361 0 0 4.201 11.747 0 1 10.735 ... 9 -0.347 1 0 0 0.000 0 3.497 0 0 0 0\n836 3.706 8.680 0 6 2.361 0 0 4.127 11.724 0 1 10.694 ... 9 -0.338 1 0 0 0.000 0 3.497 0 0 0 0\n\n[837 rows x 41 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "LogisticRegression(C=33, max_iter=400, penalty='none', random_state=42,\n solver='sag')", "params": "{'C': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_iter': [50, 100, 150, 200, 250, 300, 350, 400, 450], 'solver': ['lbfgs', 'newton-cg', 'sag', 'saga'], 'penalty': ['l2', 'none']}", "eachAlgor": "'LR'", "AlgorithmsIDsEnd": "100", "crossValidation": "5", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/12ed81ff43e60d39acfe8c23196f8092/metadata.json b/cachedir/joblib/run/randomSearch/394795b7c0f42fea4a08559c442933ec/metadata.json similarity index 71% rename from cachedir/joblib/run/randomSearch/12ed81ff43e60d39acfe8c23196f8092/metadata.json rename to cachedir/joblib/run/randomSearch/394795b7c0f42fea4a08559c442933ec/metadata.json index bf86ef8..67ce0dc 100644 --- a/cachedir/joblib/run/randomSearch/12ed81ff43e60d39acfe8c23196f8092/metadata.json +++ b/cachedir/joblib/run/randomSearch/394795b7c0f42fea4a08559c442933ec/metadata.json @@ -1 +1 @@ -{"duration": 77.61082220077515, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "LogisticRegression(C=76, max_iter=50, random_state=42, solver='saga')", "params": "{'C': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_iter': [50, 100, 150, 200, 250, 300, 350, 400, 450], 'solver': ['lbfgs', 'newton-cg', 'sag', 'saga'], 'penalty': ['l2', 'none']}", "eachAlgor": "'LR'", "AlgorithmsIDsEnd": "200", "crossValidation": "10", "randomSear": "200"}} \ No newline at end of file +{"duration": 83.79811096191406, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "LogisticRegression(C=74, max_iter=350, penalty='none', random_state=42,\n solver='newton-cg')", "params": "{'C': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_iter': [50, 100, 150, 200, 250, 300, 350, 400, 450], 'solver': ['lbfgs', 'newton-cg', 'sag', 'saga'], 'penalty': ['l2', 'none']}", "eachAlgor": "'LR'", "AlgorithmsIDsEnd": "200", "crossValidation": "10", "randomSear": "200"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/3d0c7e83f7046123bbd56793b15c3834/metadata.json b/cachedir/joblib/run/randomSearch/3d0c7e83f7046123bbd56793b15c3834/metadata.json new file mode 100644 index 0000000..a8fb562 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/3d0c7e83f7046123bbd56793b15c3834/metadata.json @@ -0,0 +1 @@ +{"duration": 83.98800611495972, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "GradientBoostingClassifier(criterion='mae', learning_rate=0.01,\n loss='exponential', n_estimators=21, random_state=42,\n subsample=0.7000000000000001)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'loss': ['deviance', 'exponential'], 'learning_rate': [0.01, 0.12, 0.23, 0.34, 0.45], 'subsample': [0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6, 0.7000000000000001, 0.8, 0.9], 'criterion': ['friedman_mse', 'mse', 'mae']}", "eachAlgor": "'GradB'", "AlgorithmsIDsEnd": "400", "crossValidation": "10", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/41eb9e04ff245b2484996e93dbd27cc5/metadata.json b/cachedir/joblib/run/randomSearch/41eb9e04ff245b2484996e93dbd27cc5/metadata.json new file mode 100644 index 0000000..60233a2 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/41eb9e04ff245b2484996e93dbd27cc5/metadata.json @@ -0,0 +1 @@ +{"duration": 282.2638130187988, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "MLPClassifier(alpha=0.0008100000000000001, hidden_layer_sizes=(81, 3),\n max_iter=100, random_state=42, tol=0.0008100000000000001)", "params": "{'hidden_layer_sizes': [(60, 3), (61, 1), (62, 1), (63, 3), (64, 2), (65, 1), (66, 1), (67, 1), (68, 3), (69, 1), (70, 3), (71, 3), (72, 3), (73, 1), (74, 3), (75, 2), (76, 1), (77, 1), (78, 1), (79, 1), (80, 1), (81, 3), (82, 3), (83, 1), (84, 3), (85, 1), (86, 3), (87, 3), (88, 3), (89, 3), (90, 2), (91, 1), (92, 2), (93, 3), (94, 2), (95, 1), (96, 1), (97, 3), (98, 2), (99, 2), (100, 2), (101, 1), (102, 1), (103, 2), (104, 1), (105, 1), (106, 2), (107, 1), (108, 2), (109, 2), (110, 3), (111, 2), (112, 1), (113, 3), (114, 2), (115, 3), (116, 1), (117, 2), (118, 1), (119, 3)], 'alpha': [1e-05, 0.00021, 0.00041000000000000005, 0.0006100000000000001, 0.0008100000000000001], 'tol': [1e-05, 0.00041000000000000005, 0.0008100000000000001], 'max_iter': [100], 'activation': ['relu', 'identity', 'logistic', 'tanh'], 'solver': ['adam', 'sgd']}", "eachAlgor": "'MLP'", "AlgorithmsIDsEnd": "400", "crossValidation": "10", "randomSear": "200"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/450e1077322e56cfc3517d46fc567126/metadata.json b/cachedir/joblib/run/randomSearch/450e1077322e56cfc3517d46fc567126/metadata.json new file mode 100644 index 0000000..deb3902 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/450e1077322e56cfc3517d46fc567126/metadata.json @@ -0,0 +1 @@ +{"duration": 62.298343896865845, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "LogisticRegression(C=36, max_iter=450, random_state=42)", "params": "{'C': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_iter': [50, 100, 150, 200, 250, 300, 350, 400, 450], 'solver': ['lbfgs', 'newton-cg', 'sag', 'saga'], 'penalty': ['l2', 'none']}", "eachAlgor": "'LR'", "AlgorithmsIDsEnd": "150", "crossValidation": "10", "randomSear": "150"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/47f8386eab2730da77a11b2a4f698ad7/metadata.json b/cachedir/joblib/run/randomSearch/47f8386eab2730da77a11b2a4f698ad7/metadata.json new file mode 100644 index 0000000..b062478 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/47f8386eab2730da77a11b2a4f698ad7/metadata.json @@ -0,0 +1 @@ +{"duration": 199.5159022808075, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "RandomForestClassifier(criterion='entropy', max_depth=11, n_estimators=26,\n random_state=42)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_depth': [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19], 'criterion': ['gini', 'entropy']}", "eachAlgor": "'RF'", "AlgorithmsIDsEnd": "600", "crossValidation": "10", "randomSear": "200"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/5701f474535efb82abf5085f37a3d5f5/metadata.json b/cachedir/joblib/run/randomSearch/5701f474535efb82abf5085f37a3d5f5/metadata.json new file mode 100644 index 0000000..4d56b65 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/5701f474535efb82abf5085f37a3d5f5/metadata.json @@ -0,0 +1 @@ +{"duration": 42.37714505195618, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "LogisticRegression(C=98, max_iter=450, penalty='none', random_state=42,\n solver='newton-cg')", "params": "{'C': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_iter': [50, 100, 150, 200, 250, 300, 350, 400, 450], 'solver': ['lbfgs', 'newton-cg', 'sag', 'saga'], 'penalty': ['l2', 'none']}", "eachAlgor": "'LR'", "AlgorithmsIDsEnd": "100", "crossValidation": "10", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/5cc3b100cfa31b8ade7a9449f9d21fa9/metadata.json b/cachedir/joblib/run/randomSearch/5cc3b100cfa31b8ade7a9449f9d21fa9/metadata.json new file mode 100644 index 0000000..a89ced0 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/5cc3b100cfa31b8ade7a9449f9d21fa9/metadata.json @@ -0,0 +1 @@ +{"duration": 18.435396671295166, "input_args": {"XData": " LOC SM6_B(m) nN nO SpMax_A nX F04[C-N] HyWi_B(m) SdO F03[C-N] nCp SM6_L C% B03[C-Cl] J_Dz(e) SpPosA_B(p) B04[C-Br] Me ... Mi Psi_i_A SpMax_L nHDon nArCOOR C-026 F03[C-O] SdssC nCIR F01[N-N] NssssC Psi_i_1d nN-N SpMax_B(m) B01[C-Br] F02[C-N] N-073 nCRX3\n0 1.185 9.085 0 0 2.263 3 0 3.642 0.000 0 1 9.902 46.7 1 3.1934 1.359 0 1.034 ... 1.100 2.483 4.825 0 0 2 0 0.000 1 0 0 0.014 0 4.054 0 0 0 0\n1 0.000 8.179 2 0 2.194 0 6 3.526 0.000 6 0 10.054 35.1 0 1.8929 1.209 0 0.974 ... 1.139 1.744 4.562 0 0 0 0 0.000 2 0 0 0.000 0 3.489 0 4 0 0\n2 0.762 8.297 0 3 2.424 0 0 3.339 21.884 0 0 10.226 42.1 0 2.3934 1.204 0 1.027 ... 1.120 2.773 4.982 0 0 0 8 -0.686 3 0 0 0.004 0 3.693 0 0 0 0\n3 1.747 9.673 0 2 2.690 23 0 4.645 9.855 0 1 12.353 31.6 0 7.7233 0.906 0 1.291 ... 1.348 5.741 6.316 1 0 0 2 -4.617 0 0 11 0.000 0 3.993 0 0 0 1\n4 1.824 9.825 0 2 2.700 27 0 4.795 9.894 0 1 12.519 31.8 0 7.9184 0.906 0 1.292 ... 1.350 5.742 6.330 1 0 0 2 -4.724 0 0 13 0.000 0 4.005 0 0 0 1\n.. ... ... .. .. ... .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n832 1.125 7.878 0 1 2.136 0 0 2.990 0.000 0 0 9.311 43.8 0 3.0778 1.253 0 0.991 ... 1.117 2.146 4.499 0 0 1 2 0.000 1 0 0 0.045 0 3.573 0 0 0 0\n833 1.187 8.046 1 1 2.222 0 2 3.105 0.000 2 0 9.668 38.9 0 3.2726 1.230 0 0.998 ... 1.132 2.315 4.783 2 0 2 2 0.000 1 0 0 -0.025 0 3.666 0 2 0 0\n834 0.625 8.901 0 2 2.499 0 0 3.745 24.203 0 0 10.681 58.3 0 2.3715 1.312 0 1.008 ... 1.087 2.500 5.310 0 0 0 8 -0.128 6 0 0 0.000 0 3.942 0 0 0 0\n835 3.866 8.778 0 6 2.361 0 0 4.201 11.747 0 1 10.735 32.4 0 1.9452 1.166 0 0.992 ... 1.140 2.300 4.977 3 0 0 9 -0.347 1 0 0 0.000 0 3.497 0 0 0 0\n836 3.706 8.680 0 6 2.361 0 0 4.127 11.724 0 1 10.694 31.4 0 1.9472 1.153 0 0.993 ... 1.143 2.321 4.977 3 0 0 9 -0.338 1 0 0 0.000 0 3.497 0 0 0 0\n\n[837 rows x 41 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "KNeighborsClassifier(algorithm='kd_tree', metric='chebyshev', n_neighbors=29)", "params": "{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'metric': ['chebyshev', 'manhattan', 'euclidean', 'minkowski'], 'algorithm': ['brute', 'kd_tree', 'ball_tree'], 'weights': ['uniform', 'distance']}", "eachAlgor": "'KNN'", "AlgorithmsIDsEnd": "500", "crossValidation": "5", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/6350a22d7fe281ce33f7ff59a8fb9bf9/metadata.json b/cachedir/joblib/run/randomSearch/6350a22d7fe281ce33f7ff59a8fb9bf9/metadata.json new file mode 100644 index 0000000..26aff4c --- /dev/null +++ b/cachedir/joblib/run/randomSearch/6350a22d7fe281ce33f7ff59a8fb9bf9/metadata.json @@ -0,0 +1 @@ +{"duration": 80.31160593032837, "input_args": {"XData": " LOC SM6_B(m) nN nO SpMax_A nX F04[C-N] HyWi_B(m) SdO F03[C-N] nCp SM6_L C% B03[C-Cl] J_Dz(e) SpPosA_B(p) B04[C-Br] Me ... Mi Psi_i_A SpMax_L nHDon nArCOOR C-026 F03[C-O] SdssC nCIR F01[N-N] NssssC Psi_i_1d nN-N SpMax_B(m) B01[C-Br] F02[C-N] N-073 nCRX3\n0 1.185 9.085 0 0 2.263 3 0 3.642 0.000 0 1 9.902 46.7 1 3.1934 1.359 0 1.034 ... 1.100 2.483 4.825 0 0 2 0 0.000 1 0 0 0.014 0 4.054 0 0 0 0\n1 0.000 8.179 2 0 2.194 0 6 3.526 0.000 6 0 10.054 35.1 0 1.8929 1.209 0 0.974 ... 1.139 1.744 4.562 0 0 0 0 0.000 2 0 0 0.000 0 3.489 0 4 0 0\n2 0.762 8.297 0 3 2.424 0 0 3.339 21.884 0 0 10.226 42.1 0 2.3934 1.204 0 1.027 ... 1.120 2.773 4.982 0 0 0 8 -0.686 3 0 0 0.004 0 3.693 0 0 0 0\n3 1.747 9.673 0 2 2.690 23 0 4.645 9.855 0 1 12.353 31.6 0 7.7233 0.906 0 1.291 ... 1.348 5.741 6.316 1 0 0 2 -4.617 0 0 11 0.000 0 3.993 0 0 0 1\n4 1.824 9.825 0 2 2.700 27 0 4.795 9.894 0 1 12.519 31.8 0 7.9184 0.906 0 1.292 ... 1.350 5.742 6.330 1 0 0 2 -4.724 0 0 13 0.000 0 4.005 0 0 0 1\n.. ... ... .. .. ... .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n832 1.125 7.878 0 1 2.136 0 0 2.990 0.000 0 0 9.311 43.8 0 3.0778 1.253 0 0.991 ... 1.117 2.146 4.499 0 0 1 2 0.000 1 0 0 0.045 0 3.573 0 0 0 0\n833 1.187 8.046 1 1 2.222 0 2 3.105 0.000 2 0 9.668 38.9 0 3.2726 1.230 0 0.998 ... 1.132 2.315 4.783 2 0 2 2 0.000 1 0 0 -0.025 0 3.666 0 2 0 0\n834 0.625 8.901 0 2 2.499 0 0 3.745 24.203 0 0 10.681 58.3 0 2.3715 1.312 0 1.008 ... 1.087 2.500 5.310 0 0 0 8 -0.128 6 0 0 0.000 0 3.942 0 0 0 0\n835 3.866 8.778 0 6 2.361 0 0 4.201 11.747 0 1 10.735 32.4 0 1.9452 1.166 0 0.992 ... 1.140 2.300 4.977 3 0 0 9 -0.347 1 0 0 0.000 0 3.497 0 0 0 0\n836 3.706 8.680 0 6 2.361 0 0 4.127 11.724 0 1 10.694 31.4 0 1.9472 1.153 0 0.993 ... 1.143 2.321 4.977 3 0 0 9 -0.338 1 0 0 0.000 0 3.497 0 0 0 0\n\n[837 rows x 41 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "LogisticRegression(C=11, max_iter=50, random_state=42, solver='newton-cg')", "params": "{'C': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_iter': [50, 100, 150, 200, 250, 300, 350, 400, 450], 'solver': ['lbfgs', 'newton-cg', 'sag', 'saga'], 'penalty': ['l2', 'none']}", "eachAlgor": "'LR'", "AlgorithmsIDsEnd": "600", "crossValidation": "5", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/6e8f3aaf26a54d6bc4443f029925bfed/output.pkl b/cachedir/joblib/run/randomSearch/6e8f3aaf26a54d6bc4443f029925bfed/output.pkl new file mode 100644 index 0000000..e77df17 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/6e8f3aaf26a54d6bc4443f029925bfed/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/7be38466d3402f657c3aee2462a63204/output.pkl b/cachedir/joblib/run/randomSearch/7be38466d3402f657c3aee2462a63204/output.pkl new file mode 100644 index 0000000..3e11017 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/7be38466d3402f657c3aee2462a63204/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/86196624e1a300660087125afd8d738e/output.pkl b/cachedir/joblib/run/randomSearch/86196624e1a300660087125afd8d738e/output.pkl index 720dc8d..bc6259e 100644 Binary files a/cachedir/joblib/run/randomSearch/86196624e1a300660087125afd8d738e/output.pkl and b/cachedir/joblib/run/randomSearch/86196624e1a300660087125afd8d738e/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/8d22a33ee711963bc1f69e25e06b7ddd/output.pkl b/cachedir/joblib/run/randomSearch/8d22a33ee711963bc1f69e25e06b7ddd/output.pkl new file mode 100644 index 0000000..4aca0ea Binary files /dev/null and b/cachedir/joblib/run/randomSearch/8d22a33ee711963bc1f69e25e06b7ddd/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/8e7e524d6ec6eb43d22ffc2852f448fa/metadata.json b/cachedir/joblib/run/randomSearch/8e7e524d6ec6eb43d22ffc2852f448fa/metadata.json new file mode 100644 index 0000000..be49dca --- /dev/null +++ b/cachedir/joblib/run/randomSearch/8e7e524d6ec6eb43d22ffc2852f448fa/metadata.json @@ -0,0 +1 @@ +{"duration": 408.41527676582336, "input_args": {"XData": " LOC SM6_B(m) nN nO SpMax_A nX F04[C-N] HyWi_B(m) SdO F03[C-N] nCp SM6_L ... F03[C-O] SdssC nCIR F01[N-N] NssssC Psi_i_1d nN-N SpMax_B(m) B01[C-Br] F02[C-N] N-073 nCRX3\n0 1.185 9.085 0 0 2.263 3 0 3.642 0.000 0 1 9.902 ... 0 0.000 1 0 0 0.014 0 4.054 0 0 0 0\n1 0.000 8.179 2 0 2.194 0 6 3.526 0.000 6 0 10.054 ... 0 0.000 2 0 0 0.000 0 3.489 0 4 0 0\n2 0.762 8.297 0 3 2.424 0 0 3.339 21.884 0 0 10.226 ... 8 -0.686 3 0 0 0.004 0 3.693 0 0 0 0\n3 1.747 9.673 0 2 2.690 23 0 4.645 9.855 0 1 12.353 ... 2 -4.617 0 0 11 0.000 0 3.993 0 0 0 1\n4 1.824 9.825 0 2 2.700 27 0 4.795 9.894 0 1 12.519 ... 2 -4.724 0 0 13 0.000 0 4.005 0 0 0 1\n.. ... ... .. .. ... .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n832 1.125 7.878 0 1 2.136 0 0 2.990 0.000 0 0 9.311 ... 2 0.000 1 0 0 0.045 0 3.573 0 0 0 0\n833 1.187 8.046 1 1 2.222 0 2 3.105 0.000 2 0 9.668 ... 2 0.000 1 0 0 -0.025 0 3.666 0 2 0 0\n834 0.625 8.901 0 2 2.499 0 0 3.745 24.203 0 0 10.681 ... 8 -0.128 6 0 0 0.000 0 3.942 0 0 0 0\n835 3.866 8.778 0 6 2.361 0 0 4.201 11.747 0 1 10.735 ... 9 -0.347 1 0 0 0.000 0 3.497 0 0 0 0\n836 3.706 8.680 0 6 2.361 0 0 4.127 11.724 0 1 10.694 ... 9 -0.338 1 0 0 0.000 0 3.497 0 0 0 0\n\n[837 rows x 41 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "GradientBoostingClassifier(learning_rate=0.12, loss='exponential',\n n_estimators=37, random_state=42, subsample=0.4)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'loss': ['deviance', 'exponential'], 'learning_rate': [0.01, 0.12, 0.23, 0.34, 0.45], 'subsample': [0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6, 0.7000000000000001, 0.8, 0.9], 'criterion': ['friedman_mse', 'mse', 'mae']}", "eachAlgor": "'GradB'", "AlgorithmsIDsEnd": "400", "crossValidation": "5", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/9c9dc8dad506f66ed9749d93d63f3644/metadata.json b/cachedir/joblib/run/randomSearch/9c9dc8dad506f66ed9749d93d63f3644/metadata.json new file mode 100644 index 0000000..ac4c811 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/9c9dc8dad506f66ed9749d93d63f3644/metadata.json @@ -0,0 +1 @@ +{"duration": 240.18228578567505, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "MLPClassifier(activation='identity', alpha=0.00041000000000000005,\n hidden_layer_sizes=(66, 1), max_iter=100, random_state=42,\n tol=0.00041000000000000005)", "params": "{'hidden_layer_sizes': [(60, 3), (61, 1), (62, 1), (63, 3), (64, 2), (65, 1), (66, 1), (67, 1), (68, 3), (69, 1), (70, 3), (71, 3), (72, 3), (73, 1), (74, 3), (75, 2), (76, 1), (77, 1), (78, 1), (79, 1), (80, 1), (81, 3), (82, 3), (83, 1), (84, 3), (85, 1), (86, 3), (87, 3), (88, 3), (89, 3), (90, 2), (91, 1), (92, 2), (93, 3), (94, 2), (95, 1), (96, 1), (97, 3), (98, 2), (99, 2), (100, 2), (101, 1), (102, 1), (103, 2), (104, 1), (105, 1), (106, 2), (107, 1), (108, 2), (109, 2), (110, 3), (111, 2), (112, 1), (113, 3), (114, 2), (115, 3), (116, 1), (117, 2), (118, 1), (119, 3)], 'alpha': [1e-05, 0.00021, 0.00041000000000000005, 0.0006100000000000001, 0.0008100000000000001], 'tol': [1e-05, 0.00041000000000000005, 0.0008100000000000001], 'max_iter': [100], 'activation': ['relu', 'identity', 'logistic', 'tanh'], 'solver': ['adam', 'sgd']}", "eachAlgor": "'MLP'", "AlgorithmsIDsEnd": "300", "crossValidation": "10", "randomSear": "150"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/a2ef680d38f3195fc6c56553ff901379/output.pkl b/cachedir/joblib/run/randomSearch/a2ef680d38f3195fc6c56553ff901379/output.pkl new file mode 100644 index 0000000..e90cf6e Binary files /dev/null and b/cachedir/joblib/run/randomSearch/a2ef680d38f3195fc6c56553ff901379/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/a4e25e4148e3136b295ffbec3719902c/output.pkl b/cachedir/joblib/run/randomSearch/a4e25e4148e3136b295ffbec3719902c/output.pkl new file mode 100644 index 0000000..f6081f9 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/a4e25e4148e3136b295ffbec3719902c/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/a4eff1312220b934927e8b35405bc525/output.pkl b/cachedir/joblib/run/randomSearch/a4eff1312220b934927e8b35405bc525/output.pkl new file mode 100644 index 0000000..379214a Binary files /dev/null and b/cachedir/joblib/run/randomSearch/a4eff1312220b934927e8b35405bc525/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/b224cec55c8ee312740350f8f29ce124/metadata.json b/cachedir/joblib/run/randomSearch/b224cec55c8ee312740350f8f29ce124/metadata.json new file mode 100644 index 0000000..2ec0f3b --- /dev/null +++ b/cachedir/joblib/run/randomSearch/b224cec55c8ee312740350f8f29ce124/metadata.json @@ -0,0 +1 @@ +{"duration": 72.29002404212952, "input_args": {"XData": " LOC SM6_B(m) nN nO SpMax_A nX F04[C-N] HyWi_B(m) SdO F03[C-N] nCp SM6_L ... F03[C-O] SdssC nCIR F01[N-N] NssssC Psi_i_1d nN-N SpMax_B(m) B01[C-Br] F02[C-N] N-073 nCRX3\n0 1.185 9.085 0 0 2.263 3 0 3.642 0.000 0 1 9.902 ... 0 0.000 1 0 0 0.014 0 4.054 0 0 0 0\n1 0.000 8.179 2 0 2.194 0 6 3.526 0.000 6 0 10.054 ... 0 0.000 2 0 0 0.000 0 3.489 0 4 0 0\n2 0.762 8.297 0 3 2.424 0 0 3.339 21.884 0 0 10.226 ... 8 -0.686 3 0 0 0.004 0 3.693 0 0 0 0\n3 1.747 9.673 0 2 2.690 23 0 4.645 9.855 0 1 12.353 ... 2 -4.617 0 0 11 0.000 0 3.993 0 0 0 1\n4 1.824 9.825 0 2 2.700 27 0 4.795 9.894 0 1 12.519 ... 2 -4.724 0 0 13 0.000 0 4.005 0 0 0 1\n.. ... ... .. .. ... .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n832 1.125 7.878 0 1 2.136 0 0 2.990 0.000 0 0 9.311 ... 2 0.000 1 0 0 0.045 0 3.573 0 0 0 0\n833 1.187 8.046 1 1 2.222 0 2 3.105 0.000 2 0 9.668 ... 2 0.000 1 0 0 -0.025 0 3.666 0 2 0 0\n834 0.625 8.901 0 2 2.499 0 0 3.745 24.203 0 0 10.681 ... 8 -0.128 6 0 0 0.000 0 3.942 0 0 0 0\n835 3.866 8.778 0 6 2.361 0 0 4.201 11.747 0 1 10.735 ... 9 -0.347 1 0 0 0.000 0 3.497 0 0 0 0\n836 3.706 8.680 0 6 2.361 0 0 4.127 11.724 0 1 10.694 ... 9 -0.338 1 0 0 0.000 0 3.497 0 0 0 0\n\n[837 rows x 41 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "RandomForestClassifier(max_depth=4, n_estimators=56, random_state=42)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_depth': [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19], 'criterion': ['gini', 'entropy']}", "eachAlgor": "'RF'", "AlgorithmsIDsEnd": "300", "crossValidation": "5", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/b63f4ad6ce95d0904e77a19156023dae/output.pkl b/cachedir/joblib/run/randomSearch/b63f4ad6ce95d0904e77a19156023dae/output.pkl new file mode 100644 index 0000000..5f7cc64 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/b63f4ad6ce95d0904e77a19156023dae/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/c0b6ae93ae3358140cb42192f5140723/metadata.json b/cachedir/joblib/run/randomSearch/c0b6ae93ae3358140cb42192f5140723/metadata.json new file mode 100644 index 0000000..8efaa3f --- /dev/null +++ b/cachedir/joblib/run/randomSearch/c0b6ae93ae3358140cb42192f5140723/metadata.json @@ -0,0 +1 @@ +{"duration": 95.35076880455017, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "RandomForestClassifier(criterion='entropy', max_depth=6, n_estimators=52,\n random_state=42)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_depth': [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19], 'criterion': ['gini', 'entropy']}", "eachAlgor": "'RF'", "AlgorithmsIDsEnd": "300", "crossValidation": "10", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/4a4d3498f45d1d7ad153ae029decbdcf/metadata.json b/cachedir/joblib/run/randomSearch/c4575447d0f5391b949d4431042dc0d4/metadata.json similarity index 70% rename from cachedir/joblib/run/randomSearch/4a4d3498f45d1d7ad153ae029decbdcf/metadata.json rename to cachedir/joblib/run/randomSearch/c4575447d0f5391b949d4431042dc0d4/metadata.json index e1a302a..42e8544 100644 --- a/cachedir/joblib/run/randomSearch/4a4d3498f45d1d7ad153ae029decbdcf/metadata.json +++ b/cachedir/joblib/run/randomSearch/c4575447d0f5391b949d4431042dc0d4/metadata.json @@ -1 +1 @@ -{"duration": 22.581008911132812, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "KNeighborsClassifier(algorithm='kd_tree', metric='manhattan', n_neighbors=24)", "params": "{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'metric': ['chebyshev', 'manhattan', 'euclidean', 'minkowski'], 'algorithm': ['brute', 'kd_tree', 'ball_tree'], 'weights': ['uniform', 'distance']}", "eachAlgor": "'KNN'", "AlgorithmsIDsEnd": "0", "crossValidation": "10", "randomSear": "200"}} \ No newline at end of file +{"duration": 16.793073177337646, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "KNeighborsClassifier(algorithm='brute', metric='chebyshev', n_neighbors=54,\n weights='distance')", "params": "{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'metric': ['chebyshev', 'manhattan', 'euclidean', 'minkowski'], 'algorithm': ['brute', 'kd_tree', 'ball_tree'], 'weights': ['uniform', 'distance']}", "eachAlgor": "'KNN'", "AlgorithmsIDsEnd": "0", "crossValidation": "10", "randomSear": "150"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/c704ef1151c66da638ef2f9a7ba51e1e/output.pkl b/cachedir/joblib/run/randomSearch/c704ef1151c66da638ef2f9a7ba51e1e/output.pkl new file mode 100644 index 0000000..4eaa76a Binary files /dev/null and b/cachedir/joblib/run/randomSearch/c704ef1151c66da638ef2f9a7ba51e1e/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/c80650c01cabdf0eab92e0080711d757/output.pkl b/cachedir/joblib/run/randomSearch/c80650c01cabdf0eab92e0080711d757/output.pkl new file mode 100644 index 0000000..26a4740 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/c80650c01cabdf0eab92e0080711d757/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/c8c22619e5e5cc0c11a346d6e73b0548/output.pkl b/cachedir/joblib/run/randomSearch/c8c22619e5e5cc0c11a346d6e73b0548/output.pkl new file mode 100644 index 0000000..566e9f0 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/c8c22619e5e5cc0c11a346d6e73b0548/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/cf8d5e1faada89f54ec408ad11684bef/metadata.json b/cachedir/joblib/run/randomSearch/cf8d5e1faada89f54ec408ad11684bef/metadata.json new file mode 100644 index 0000000..f095adc --- /dev/null +++ b/cachedir/joblib/run/randomSearch/cf8d5e1faada89f54ec408ad11684bef/metadata.json @@ -0,0 +1 @@ +{"duration": 16.24565291404724, "input_args": {"XData": " LOC SM6_B(m) nN nO SpMax_A nX F04[C-N] HyWi_B(m) SdO F03[C-N] nCp SM6_L ... F03[C-O] SdssC nCIR F01[N-N] NssssC Psi_i_1d nN-N SpMax_B(m) B01[C-Br] F02[C-N] N-073 nCRX3\n0 1.185 9.085 0 0 2.263 3 0 3.642 0.000 0 1 9.902 ... 0 0.000 1 0 0 0.014 0 4.054 0 0 0 0\n1 0.000 8.179 2 0 2.194 0 6 3.526 0.000 6 0 10.054 ... 0 0.000 2 0 0 0.000 0 3.489 0 4 0 0\n2 0.762 8.297 0 3 2.424 0 0 3.339 21.884 0 0 10.226 ... 8 -0.686 3 0 0 0.004 0 3.693 0 0 0 0\n3 1.747 9.673 0 2 2.690 23 0 4.645 9.855 0 1 12.353 ... 2 -4.617 0 0 11 0.000 0 3.993 0 0 0 1\n4 1.824 9.825 0 2 2.700 27 0 4.795 9.894 0 1 12.519 ... 2 -4.724 0 0 13 0.000 0 4.005 0 0 0 1\n.. ... ... .. .. ... .. ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...\n832 1.125 7.878 0 1 2.136 0 0 2.990 0.000 0 0 9.311 ... 2 0.000 1 0 0 0.045 0 3.573 0 0 0 0\n833 1.187 8.046 1 1 2.222 0 2 3.105 0.000 2 0 9.668 ... 2 0.000 1 0 0 -0.025 0 3.666 0 2 0 0\n834 0.625 8.901 0 2 2.499 0 0 3.745 24.203 0 0 10.681 ... 8 -0.128 6 0 0 0.000 0 3.942 0 0 0 0\n835 3.866 8.778 0 6 2.361 0 0 4.201 11.747 0 1 10.735 ... 9 -0.347 1 0 0 0.000 0 3.497 0 0 0 0\n836 3.706 8.680 0 6 2.361 0 0 4.127 11.724 0 1 10.694 ... 9 -0.338 1 0 0 0.000 0 3.497 0 0 0 0\n\n[837 rows x 41 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "KNeighborsClassifier(algorithm='brute', metric='chebyshev', n_neighbors=6)", "params": "{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'metric': ['chebyshev', 'manhattan', 'euclidean', 'minkowski'], 'algorithm': ['brute', 'kd_tree', 'ball_tree'], 'weights': ['uniform', 'distance']}", "eachAlgor": "'KNN'", "AlgorithmsIDsEnd": "0", "crossValidation": "5", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/d0947f1cb31ddee9a6ac84d242952072/output.pkl b/cachedir/joblib/run/randomSearch/d0947f1cb31ddee9a6ac84d242952072/output.pkl new file mode 100644 index 0000000..e36c1b3 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/d0947f1cb31ddee9a6ac84d242952072/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/d22e03918cbc8389dcdfdb56753a304a/metadata.json b/cachedir/joblib/run/randomSearch/d22e03918cbc8389dcdfdb56753a304a/metadata.json new file mode 100644 index 0000000..25875a2 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/d22e03918cbc8389dcdfdb56753a304a/metadata.json @@ -0,0 +1 @@ +{"duration": 165.04137682914734, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "GradientBoostingClassifier(criterion='mae', learning_rate=0.45,\n loss='exponential', n_estimators=82, random_state=42,\n subsample=0.2)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'loss': ['deviance', 'exponential'], 'learning_rate': [0.01, 0.12, 0.23, 0.34, 0.45], 'subsample': [0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6, 0.7000000000000001, 0.8, 0.9], 'criterion': ['friedman_mse', 'mse', 'mae']}", "eachAlgor": "'GradB'", "AlgorithmsIDsEnd": "800", "crossValidation": "10", "randomSear": "200"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/d9441100431531e5a3447a697adb8ea6/metadata.json b/cachedir/joblib/run/randomSearch/d9441100431531e5a3447a697adb8ea6/metadata.json new file mode 100644 index 0000000..7eaa309 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/d9441100431531e5a3447a697adb8ea6/metadata.json @@ -0,0 +1 @@ +{"duration": 139.34292888641357, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "MLPClassifier(activation='tanh', alpha=0.00041000000000000005,\n hidden_layer_sizes=(92, 2), max_iter=100, random_state=42,\n tol=0.00041000000000000005)", "params": "{'hidden_layer_sizes': [(60, 3), (61, 1), (62, 1), (63, 3), (64, 2), (65, 1), (66, 1), (67, 1), (68, 3), (69, 1), (70, 3), (71, 3), (72, 3), (73, 1), (74, 3), (75, 2), (76, 1), (77, 1), (78, 1), (79, 1), (80, 1), (81, 3), (82, 3), (83, 1), (84, 3), (85, 1), (86, 3), (87, 3), (88, 3), (89, 3), (90, 2), (91, 1), (92, 2), (93, 3), (94, 2), (95, 1), (96, 1), (97, 3), (98, 2), (99, 2), (100, 2), (101, 1), (102, 1), (103, 2), (104, 1), (105, 1), (106, 2), (107, 1), (108, 2), (109, 2), (110, 3), (111, 2), (112, 1), (113, 3), (114, 2), (115, 3), (116, 1), (117, 2), (118, 1), (119, 3)], 'alpha': [1e-05, 0.00021, 0.00041000000000000005, 0.0006100000000000001, 0.0008100000000000001], 'tol': [1e-05, 0.00041000000000000005, 0.0008100000000000001], 'max_iter': [100], 'activation': ['relu', 'identity', 'logistic', 'tanh'], 'solver': ['adam', 'sgd']}", "eachAlgor": "'MLP'", "AlgorithmsIDsEnd": "200", "crossValidation": "10", "randomSear": "100"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/d94838b56ebf61f3e904b72aa1167ab0/output.pkl b/cachedir/joblib/run/randomSearch/d94838b56ebf61f3e904b72aa1167ab0/output.pkl new file mode 100644 index 0000000..168c56f Binary files /dev/null and b/cachedir/joblib/run/randomSearch/d94838b56ebf61f3e904b72aa1167ab0/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/d9b2ab877c77f63dce34100682f80929/output.pkl b/cachedir/joblib/run/randomSearch/d9b2ab877c77f63dce34100682f80929/output.pkl new file mode 100644 index 0000000..5775c01 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/d9b2ab877c77f63dce34100682f80929/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/dd54b69c3c7688e911657b8e733e50ab/output.pkl b/cachedir/joblib/run/randomSearch/dd54b69c3c7688e911657b8e733e50ab/output.pkl index 251907a..3a1283b 100644 Binary files a/cachedir/joblib/run/randomSearch/dd54b69c3c7688e911657b8e733e50ab/output.pkl and b/cachedir/joblib/run/randomSearch/dd54b69c3c7688e911657b8e733e50ab/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/e08b33eb170c53ac03d119cab9b2100f/metadata.json b/cachedir/joblib/run/randomSearch/e08b33eb170c53ac03d119cab9b2100f/metadata.json new file mode 100644 index 0000000..1d89354 --- /dev/null +++ b/cachedir/joblib/run/randomSearch/e08b33eb170c53ac03d119cab9b2100f/metadata.json @@ -0,0 +1 @@ +{"duration": 190.53866386413574, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "RandomForestClassifier(criterion='entropy', max_depth=13, n_estimators=71,\n random_state=42)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'max_depth': [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19], 'criterion': ['gini', 'entropy']}", "eachAlgor": "'RF'", "AlgorithmsIDsEnd": "450", "crossValidation": "10", "randomSear": "150"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/e34982f08c06e138e2202e6e5ba833b0/output.pkl b/cachedir/joblib/run/randomSearch/e34982f08c06e138e2202e6e5ba833b0/output.pkl new file mode 100644 index 0000000..2f5cb53 Binary files /dev/null and b/cachedir/joblib/run/randomSearch/e34982f08c06e138e2202e6e5ba833b0/output.pkl differ diff --git a/cachedir/joblib/run/randomSearch/f1525731493793574c4bab01f5bc906f/metadata.json b/cachedir/joblib/run/randomSearch/f1525731493793574c4bab01f5bc906f/metadata.json new file mode 100644 index 0000000..74f56ac --- /dev/null +++ b/cachedir/joblib/run/randomSearch/f1525731493793574c4bab01f5bc906f/metadata.json @@ -0,0 +1 @@ +{"duration": 20.35997724533081, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "KNeighborsClassifier(algorithm='brute', metric='euclidean', n_neighbors=72,\n weights='distance')", "params": "{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'metric': ['chebyshev', 'manhattan', 'euclidean', 'minkowski'], 'algorithm': ['brute', 'kd_tree', 'ball_tree'], 'weights': ['uniform', 'distance']}", "eachAlgor": "'KNN'", "AlgorithmsIDsEnd": "0", "crossValidation": "10", "randomSear": "200"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/ff5710335de9f50465330e3ca239e4af/metadata.json b/cachedir/joblib/run/randomSearch/ff5710335de9f50465330e3ca239e4af/metadata.json new file mode 100644 index 0000000..9a83faf --- /dev/null +++ b/cachedir/joblib/run/randomSearch/ff5710335de9f50465330e3ca239e4af/metadata.json @@ -0,0 +1 @@ +{"duration": 174.78210496902466, "input_args": {"XData": " Fbs Slope Trestbps Exang Thalach Age Chol Sex Oldpeak Restecg Cp Ca Thal\n0 1 0 145 0 150 63 233 1 2.3 0 3 0 1\n1 0 0 130 0 187 37 250 1 3.5 1 2 0 2\n2 0 2 130 0 172 41 204 0 1.4 0 1 0 2\n3 0 2 120 0 178 56 236 1 0.8 1 1 0 2\n4 0 2 120 1 163 57 354 0 0.6 1 0 0 2\n.. ... ... ... ... ... ... ... ... ... ... .. .. ...\n298 0 1 140 1 123 57 241 0 0.2 1 0 0 3\n299 0 1 110 0 132 45 264 1 1.2 1 3 0 3\n300 1 1 144 0 141 68 193 1 3.4 1 0 2 3\n301 0 1 130 1 115 57 131 1 1.2 1 0 1 3\n302 0 1 130 0 174 57 236 0 0.0 0 1 1 2\n\n[303 rows x 13 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]", "clf": "GradientBoostingClassifier(learning_rate=0.34, loss='exponential',\n n_estimators=63, random_state=42,\n subsample=0.7000000000000001)", "params": "{'n_estimators': [20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99], 'loss': ['deviance', 'exponential'], 'learning_rate': [0.01, 0.12, 0.23, 0.34, 0.45], 'subsample': [0.1, 0.2, 0.30000000000000004, 0.4, 0.5, 0.6, 0.7000000000000001, 0.8, 0.9], 'criterion': ['friedman_mse', 'mse', 'mae']}", "eachAlgor": "'GradB'", "AlgorithmsIDsEnd": "600", "crossValidation": "10", "randomSear": "150"}} \ No newline at end of file diff --git a/cachedir/joblib/run/randomSearch/func_code.py b/cachedir/joblib/run/randomSearch/func_code.py index fc6ef34..9ae6ef2 100644 --- a/cachedir/joblib/run/randomSearch/func_code.py +++ b/cachedir/joblib/run/randomSearch/func_code.py @@ -1,6 +1,7 @@ # first line: 728 @memory.cache def randomSearch(XData, yData, clf, params, eachAlgor, AlgorithmsIDsEnd,crossValidation,randomSear): + print('search') print(clf) search = RandomizedSearchCV( estimator=clf, param_distributions=params, n_iter=randomSear, diff --git a/frontend/src/components/Ensemble.vue b/frontend/src/components/Ensemble.vue index 3b0e58f..f4f937a 100644 --- a/frontend/src/components/Ensemble.vue +++ b/frontend/src/components/Ensemble.vue @@ -328,6 +328,7 @@ export default { pushModelsRemainingTempCM.push(allModels[i]) } } + console.log(ClassifierIDsListCM) EventBus.$emit('RemainingPointsCM', pushModelsRemainingTempCM) EventBus.$emit('callValidationData', ResultsAll) EventBus.$emit('SendSelectedPointsUpdateIndicatorCM', ClassifierIDsListCM) diff --git a/frontend/src/components/History.vue b/frontend/src/components/History.vue index bd13d5d..d756ab6 100644 --- a/frontend/src/components/History.vue +++ b/frontend/src/components/History.vue @@ -23,6 +23,7 @@ export default { PerFCM: [], storedEnsem: [], storedCM: [], + previouslyIDs: [], percentageOverall: [], values: [0,0,0,0,0,0,50,50,50,50,50,0,50,50,50,50,50,0], valuesStage2: [0,0,0,0,0,0,50,50,50,50,50,0,50,50,50,50,50,0,25,25,25,25,25,0,25,25,25,25,25,0,25,25,25,25,25,0,25,25,25,25,25,0], @@ -39,16 +40,85 @@ export default { svgLeg.selectAll("*").remove(); }, computePerformanceDiffS () { - var colorsforScatterPlot = this.PerF - var mergedStoreEnsembleLoc = [].concat.apply([], this.storedEnsem) - var mergedStoreEnsembleLocFormatted = [] - for (let i = 0; i < mergedStoreEnsembleLoc.length; i++) { - mergedStoreEnsembleLocFormatted.push(parseInt(mergedStoreEnsembleLoc[i].replace(/\D/g,''))) + + var max = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + var min = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + var tempDataKNNC = [] + var tempDataLRC = [] + var tempDataMLPC = [] + var tempDataRFC = [] + var tempDataGradBC = [] + var tempDataKNNM = [] + var tempDataLRM = [] + var tempDataMLPM = [] + var tempDataRFM = [] + var tempDataGradBM = [] + var splitData = [] + console.log(this.previouslyIDs) + for (let i = 0; i < this.previouslyIDs.length; i++) { + let tempSplit = this.previouslyIDs[i].split(/([0-9]+)/) + if (tempSplit[0] == 'KNNC') { + tempDataKNNC.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'LRC') { + tempDataLRC.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'MLPC') { + tempDataMLPC.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'RFC') { + tempDataRFC.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'GradBC') { + tempDataGradBC.push(this.previouslyIDs[i]) + } else if (tempSplit[0] == 'KNNM') { + tempDataKNNM.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'LRM') { + tempDataLRM.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'MLPM') { + tempDataMLPM.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'RFM') { + tempDataRFM.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'GradBM') { + tempDataGradBM.push(this.previouslyIDs[i]) + } + else { + } } - - colorsforScatterPlot = mergedStoreEnsembleLocFormatted.map((item) => colorsforScatterPlot[item]) - var max = Math.max.apply(Math, colorsforScatterPlot) - var min = Math.min.apply(Math, colorsforScatterPlot) + splitData.push(tempDataKNNC) + splitData.push(tempDataLRC) + splitData.push(tempDataMLPC) + splitData.push(tempDataRFC) + splitData.push(tempDataGradBC) + splitData.push(tempDataKNNM) + splitData.push(tempDataLRM) + splitData.push(tempDataMLPM) + splitData.push(tempDataRFM) + splitData.push(tempDataGradBM) + + for (let i = 0; i < splitData.length; i++) { + var colorsforScatterPlot = this.PerF + if (splitData[i].length != 0) { + var mergedStoreEnsembleLoc = [].concat.apply([], splitData[i]) + var mergedStoreEnsembleLocFormatted = [] + for (let j = 0; j < mergedStoreEnsembleLoc.length; j++) { + mergedStoreEnsembleLocFormatted.push(parseInt(mergedStoreEnsembleLoc[j].replace(/\D/g,''))) + } + + colorsforScatterPlot = mergedStoreEnsembleLocFormatted.map((item) => colorsforScatterPlot[item]) + + max[i] = Math.max.apply(Math, colorsforScatterPlot) + min[i] = Math.min.apply(Math, colorsforScatterPlot) + } + + } + + console.log(max) + console.log(min) var countMax = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] var countMin = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] @@ -57,199 +127,200 @@ export default { let tempSplit = this.storedCM[i].split(/([0-9]+)/) if (tempSplit[0] == 'KNNCC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[0]) { countMax[0] = countMax[0] + 1 - } else if (this.PerFCM[i] < min) { + } else if (this.PerFCM[i] < min[0]) { countMin[0] = countMin[0] + 1 } else { continue } } else if (tempSplit[0] == 'KNNCM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[0]) { countMax[1] = countMax[1] + 1 - } else if (this.PerFCM[i] < min) { + } else if (this.PerFCM[i] < min[0]) { countMin[1] = countMin[1] + 1 } else { continue } } else if (tempSplit[0] == 'LRCC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[1]) { countMax[2] = countMax[2] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[1]) { countMin[2] = countMin[2] + 1 } else { continue } } else if (tempSplit[0] == 'LRCM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[1]) { countMax[3] = countMax[3] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[1]) { countMin[3] = countMin[3] + 1 } else { continue } } else if (tempSplit[0] == 'MLPCC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[2]) { countMax[4] = countMax[4] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[2]) { countMin[4] = countMin[4] + 1 } else { continue } } else if (tempSplit[0] == 'MLPCM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[2]) { countMax[5] = countMax[5] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[2]) { countMin[5] = countMin[5] + 1 } else { continue } } else if (tempSplit[0] == 'RFCC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[3]) { countMax[6] = countMax[6] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[3]) { countMin[6] = countMin[6] + 1 } } else if (tempSplit[0] == 'RFCM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[3]) { countMax[7] = countMax[7] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[3]) { countMin[7] = countMin[7] + 1 } else { continue } } else if (tempSplit[0] == 'GradBCC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[4]) { countMax[8] = countMax[8] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[4]) { countMin[8] = countMin[8] + 1 } else { continue } } else if (tempSplit[0] == 'GradBCM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[4]) { countMax[9] = countMax[9] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[4]) { countMin[9] = countMin[9] + 1 } else { continue } } else if (tempSplit[0] == 'KNNMC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[5]) { countMax[10] = countMax[10] + 1 - } else if (this.PerFCM[i] < min) { + } else if (this.PerFCM[i] < min[5]) { countMin[10] = countMin[10] + 1 } else { continue } } else if (tempSplit[0] == 'KNNMM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[5]) { countMax[11] = countMax[11] + 1 - } else if (this.PerFCM[i] < min) { + } else if (this.PerFCM[i] < min[5]) { countMin[11] = countMin[11] + 1 } else { continue } } else if (tempSplit[0] == 'LRMC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[6]) { countMax[12] = countMax[12] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[6]) { countMin[12] = countMin[12] + 1 } else { continue } } else if (tempSplit[0] == 'LRMM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[6]) { countMax[13] = countMax[13] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[6]) { countMin[13] = countMin[13] + 1 } else { continue } } else if (tempSplit[0] == 'MLPMC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[7]) { countMax[14] = countMax[14] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[7]) { countMin[14] = countMin[14] + 1 } else { continue } } else if (tempSplit[0] == 'MLPMM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[7]) { countMax[15] = countMax[15] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[7]) { countMin[15] = countMin[15] + 1 } else { continue } } else if (tempSplit[0] == 'RFMC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[8]) { countMax[16] = countMax[16] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[8]) { countMin[16] = countMin[16] + 1 } } else if (tempSplit[0] == 'RFMM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[8]) { countMax[17] = countMax[17] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[8]) { countMin[17] = countMin[17] + 1 } else { continue } } else if (tempSplit[0] == 'GradBMC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[9]) { countMax[18] = countMax[18] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[9]) { countMin[18] = countMin[18] + 1 } else { continue } } else { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[9]) { countMax[19] = countMax[19] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[9]) { countMin[19] = countMin[19] + 1 } else { continue } } } - + console.log(countMax) + console.log(countMin) // var percentage = [] // for (let j = 0; j < countMax.length; j++) { // if (j >= 15) { @@ -279,8 +350,7 @@ export default { // } // } //CORRECT -console.log(countMax) -console.log(countMin) + var percentage = [] for (let j = 0; j < countMax.length; j++) { if (j >= 15) { @@ -314,7 +384,7 @@ console.log(countMin) }, SankeyViewStage3 () { var valuesLoc = this.valuesStage2 - console.log(valuesLoc) + var localStep = 2 var numberofModels = 6 var units = "Models"; @@ -440,9 +510,9 @@ console.log(countMin) var colorDiff colorDiff = d3v5.scaleSequential(d3v5.interpolatePRGn).domain([-100, 100]) var percentage = this.percentageOverall - console.log(percentage) + var previousPercentage = this.storePreviousPercentage - console.log(previousPercentage) + // add in the links var link = svg.append("g").selectAll(".link") .data(graph.links) @@ -630,19 +700,60 @@ console.log(countMin) }, computePerformanceDiff () { - var colorsforScatterPlot = this.PerF - var mergedStoreEnsembleLoc = [].concat.apply([], this.storedEnsem) - var mergedStoreEnsembleLocFormatted = [] - for (let i = 0; i < mergedStoreEnsembleLoc.length; i++) { - mergedStoreEnsembleLocFormatted.push(parseInt(mergedStoreEnsembleLoc[i].replace(/\D/g,''))) + var max = [0, 0, 0, 0, 0] + var min = [0, 0, 0, 0, 0] + var tempDataKNN = [] + var tempDataLR = [] + var tempDataMLP = [] + var tempDataRF = [] + var tempDataGradB = [] + var splitData = [] + + for (let i = 0; i < this.previouslyIDs.length; i++) { + let tempSplit = this.previouslyIDs[i].split(/([0-9]+)/) + if (tempSplit[0] == 'KNN') { + tempDataKNN.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'LR') { + tempDataLR.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'MLP') { + tempDataMLP.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'RF') { + tempDataRF.push(this.previouslyIDs[i]) + } + else if (tempSplit[0] == 'GradB') { + tempDataGradB.push(this.previouslyIDs[i]) + } + else { + } } - - colorsforScatterPlot = mergedStoreEnsembleLocFormatted.map((item) => colorsforScatterPlot[item]) + splitData.push(tempDataKNN) + splitData.push(tempDataLR) + splitData.push(tempDataMLP) + splitData.push(tempDataRF) + splitData.push(tempDataGradB) + + for (let i = 0; i < splitData.length; i++) { + var colorsforScatterPlot = this.PerF + if (splitData[i].length != 0) { + var mergedStoreEnsembleLoc = [].concat.apply([], splitData[i]) + var mergedStoreEnsembleLocFormatted = [] + for (let j = 0; j < mergedStoreEnsembleLoc.length; j++) { + mergedStoreEnsembleLocFormatted.push(parseInt(mergedStoreEnsembleLoc[j].replace(/\D/g,''))) + } - var max = Math.max.apply(Math, colorsforScatterPlot) - var min = Math.min.apply(Math, colorsforScatterPlot) + colorsforScatterPlot = mergedStoreEnsembleLocFormatted.map((item) => colorsforScatterPlot[item]) + max[i] = Math.max.apply(Math, colorsforScatterPlot) + min[i] = Math.min.apply(Math, colorsforScatterPlot) + } + + } + console.log(max) + console.log(min) var countMax = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] var countMin = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] @@ -650,104 +761,104 @@ console.log(countMin) let tempSplit = this.storedCM[i].split(/([0-9]+)/) if (tempSplit[0] == 'KNNC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[0]) { countMax[0] = countMax[0] + 1 - } else if (this.PerFCM[i] < min) { + } else if (this.PerFCM[i] < min[0]) { countMin[0] = countMin[0] + 1 } else { continue } } else if (tempSplit[0] == 'KNNM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[0]) { countMax[1] = countMax[1] + 1 - } else if (this.PerFCM[i] < min) { + } else if (this.PerFCM[i] < min[0]) { countMin[1] = countMin[1] + 1 } else { continue } } else if (tempSplit[0] == 'LRC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[1]) { countMax[2] = countMax[2] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[1]) { countMin[2] = countMin[2] + 1 } else { continue } } else if (tempSplit[0] == 'LRM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[1]) { countMax[3] = countMax[3] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[1]) { countMin[3] = countMin[3] + 1 } else { continue } } else if (tempSplit[0] == 'MLPC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[2]) { countMax[4] = countMax[4] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[2]) { countMin[4] = countMin[4] + 1 } else { continue } } else if (tempSplit[0] == 'MLPM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[2]) { countMax[5] = countMax[5] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[2]) { countMin[5] = countMin[5] + 1 } else { continue } } else if (tempSplit[0] == 'RFC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[3]) { countMax[6] = countMax[6] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[3]) { countMin[6] = countMin[6] + 1 } } else if (tempSplit[0] == 'RFM') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[3]) { countMax[7] = countMax[7] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[3]) { countMin[7] = countMin[7] + 1 } else { continue } } else if (tempSplit[0] == 'GradBC') { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[4]) { countMax[8] = countMax[8] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[4]) { countMin[8] = countMin[8] + 1 } else { continue } } else { - if (this.PerFCM[i] > max) { + if (this.PerFCM[i] > max[4]) { countMax[9] = countMax[9] + 1 } - else if (this.PerFCM[i] < min) { + else if (this.PerFCM[i] < min[4]) { countMin[9] = countMin[9] + 1 } else { continue } } } -console.log(countMax) -console.log(countMin) + console.log(countMax) + console.log(countMin) // var percentage = [] // for (let j = 0; j < countMax.length; j++) { // if (j >= 5) { @@ -1357,6 +1468,10 @@ console.log(countMin) mounted() { //EventBus.$on('emittedEventCallingSankeyLegend', this.LegendStable) + EventBus.$on('updateRandomS', data => { this.RandomSearLoc = data }) + EventBus.$on('updateStage1', data => { this.values = data }) + EventBus.$on('updateStage2', data => { this.valuesStage2 = data }) + EventBus.$on('emittedEventCallingSankeyStage2', this.SankeyViewStage2) EventBus.$on('emittedEventCallingSankeyStage3', this.SankeyViewStage3) @@ -1375,6 +1490,8 @@ console.log(countMin) EventBus.$on('ResponsiveandChange', data => { this.WH = data}) + EventBus.$on('SendModelsAll', data => { this.previouslyIDs = data }) + EventBus.$on('SendPerformance', data => { this.PerF = data}) EventBus.$on('SendPerformanceCM', data => { diff --git a/frontend/src/components/Main.vue b/frontend/src/components/Main.vue index 63a666c..3a0916b 100755 --- a/frontend/src/components/Main.vue +++ b/frontend/src/components/Main.vue @@ -40,7 +40,7 @@ - Solution Space of Hyper-Parameters + Hyper-Parameters' Space [Sel: {{OverSelLength}} / All: {{OverAllLength}}]Projection1 @@ -88,7 +88,7 @@ - Predictive Results for Majority-Voting EnsembleActive2 + Performance for Majority-Voting EnsembleActive2 @@ -311,12 +311,13 @@ export default Vue.extend({ this.firstTimeExec = false EventBus.$emit('callAlgorithhms') this.Status = " (S) Stage 1" - } else { + } else { + var IDsPreviously = JSON.parse(this.OverviewResults[16]) var Performance = JSON.parse(this.OverviewResults[1]) - console.log(this.storeEnsemblePermanently) + EventBus.$emit('SendModelsAll', IDsPreviously) + EventBus.$emit('SendPerformance', Performance) EventBus.$emit('SendStoredEnsembleHist', this.storeEnsemblePermanently) EventBus.$emit('SendStoredEnsemble', this.storeEnsemblePermanently) - EventBus.$emit('SendPerformance', Performance) EventBus.$emit('emittedEventCallingCrossoverMutation', this.OverviewResults) this.PredictSelEnsem = [] this.storeBothEnsCM[1] = this.OverviewResults @@ -706,8 +707,11 @@ export default Vue.extend({ axios.post(path, postData, axiosConfig) .then(response => { console.log('File name was sent successfully!') - this.CMNumberofModelsOFFICIAL = [0,0,0,0,0,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0], - this.CMNumberofModelsOFFICIALS2 = [0,0,0,0,0,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0], + this.CMNumberofModelsOFFICIAL = [0,0,0,0,0,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0] + this.CMNumberofModelsOFFICIALS2 = [0,0,0,0,0,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,this.RandomSear/2,0,Math.floor(this.RandomSear/4),this.RandomSear/4,Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),0,Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),0,Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),0,Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),Math.floor(this.RandomSear/4),0] + EventBus.$emit('updateRandomS', this.RandomSear) + EventBus.$emit('updateStage1', this.CMNumberofModelsOFFICIAL) + EventBus.$emit('updateStage2', this.CMNumberofModelsOFFICIALS2) this.SendAlgorithmsToServer() }) .catch(error => { @@ -923,7 +927,7 @@ export default Vue.extend({ this.storeEnsemblePermanently.push(this.storeEnsemble[i]) } var mergedStoreEnsembleLoc = [].concat.apply([], this.storeEnsemblePermanently) - console.log(mergedStoreEnsembleLoc) + if (this.CurrentStage == 1) { var postData = { RemainingPoints: this.unselectedRemainingPoints, diff --git a/frontend/src/components/Predictions.vue b/frontend/src/components/Predictions.vue index 8337e2f..09b30dd 100644 --- a/frontend/src/components/Predictions.vue +++ b/frontend/src/components/Predictions.vue @@ -64,7 +64,6 @@ export default { } getIndices.push(clTemp) } - } else { var tempFirst = [] @@ -78,12 +77,10 @@ export default { getIndices.push(tempFirst) getIndices.push(tempLast) } - if (this.RetrieveValueFi == "heartC") { getIndices.reverse() } - var predictions = JSON.parse(this.GetResultsAll[12]) var KNNPred = predictions[0] var LRPred = predictions[1] @@ -227,8 +224,13 @@ export default { var cellSpacing = 2; var cellSize = 4 + if (!this.flag) { + var lengthOverall = classStore.length + } else { + var lengthOverall = 2028 + } // === First call === // - databind(classStore, size, sqrtSize); // ...then update the databind function + databind(classStore, size, sqrtSize, lengthOverall); // ...then update the databind function var t = d3.timer(function(elapsed) { draw(); @@ -238,7 +240,7 @@ export default { // === Bind and draw functions === // - function databind(data, size, sqrtSize) { + function databind(data, size, sqrtSize, lengthOverallLocal) { colourScale = d3.scaleSequential(d3.interpolateGreens).domain([0, 100]) @@ -253,7 +255,7 @@ export default { return groupSpacing * x0 + (cellSpacing + cellSize) * (x1 + x0 * 10); }) .attr('y', function(d, i) { - var y0 = Math.floor(i / 2028), y1 = Math.floor(i % size / sqrtSize); + var y0 = Math.floor(i / lengthOverallLocal), y1 = Math.floor(i % size / sqrtSize); return groupSpacing * y0 + (cellSpacing + cellSize) * (y1 + y0 * 10); }) .attr('width', 0) @@ -503,8 +505,14 @@ export default { var cellSpacing = 2; var cellSize = 4 + if (!this.flag) { + var lengthOverall = classStore.length + } else { + var lengthOverall = 2028 + } + // === First call === // - databind(classStore, size, sqrtSize); // ...then update the databind function + databind(classStore, size, sqrtSize, lengthOverall); // ...then update the databind function var t = d3.timer(function(elapsed) { draw(); @@ -514,7 +522,7 @@ export default { // === Bind and draw functions === // - function databind(data, size, sqrtSize) { + function databind(data, size, sqrtSize, lengthOverallLocal) { colourScale = d3.scaleSequential(d3.interpolatePRGn).domain([-100, 100]) @@ -529,7 +537,7 @@ export default { return groupSpacing * x0 + (cellSpacing + cellSize) * (x1 + x0 * 10); }) .attr('y', function(d, i) { - var y0 = Math.floor(i / 2028), y1 = Math.floor(i % size / sqrtSize); + var y0 = Math.floor(i / lengthOverallLocal), y1 = Math.floor(i % size / sqrtSize); return groupSpacing * y0 + (cellSpacing + cellSize) * (y1 + y0 * 10); }) .attr('width', 0) diff --git a/frontend/src/components/ValidationController.vue b/frontend/src/components/ValidationController.vue index c61ff42..04fdd2a 100644 --- a/frontend/src/components/ValidationController.vue +++ b/frontend/src/components/ValidationController.vue @@ -104,6 +104,7 @@ export default { activeLines.push('meanSelection') } } else { + var valid = JSON.parse(this.ResultsValid[3]) var mergedStoreEnsembleLoc = [].concat.apply([], this.storedEnsemble) @@ -139,8 +140,8 @@ export default { } if (this.selectedEnsem.length != 0) { if (this.selectedEnsem.includes(mergedStoreEnsembleLoc[i])) { - sumGlobalSel[j] = sumGlobalSel[j] + tempValid[i] - countValuesSel[j] = countValuesSel[j] + 1 + sumGlobalSel[j-measure] = sumGlobalSel[j-measure] + tempValid[i] + countValuesSel[j-measure] = countValuesSel[j-measure] + 1 } } } diff --git a/frontend/src/components/VotingResults.vue b/frontend/src/components/VotingResults.vue index cd558f0..c7357b5 100644 --- a/frontend/src/components/VotingResults.vue +++ b/frontend/src/components/VotingResults.vue @@ -172,8 +172,8 @@ export default { .attr('class', 'score') .text(function(d){return d[rCol];}); - chart.append("text").attr("x",width/3).attr("y", 20).attr("class","title").text(info[0]); - chart.append("text").attr("x",width/3+rightOffset).attr("y", 20).attr("class","title").text(info[1]); + chart.append("text").attr("x",width/3).attr("y", 20).attr("class","title").text(info[0]+' (%)'); + chart.append("text").attr("x",width/3+rightOffset).attr("y", 20).attr("class","title").text(info[1]+' (%)'); chart.append("text").attr("x",width+labelArea/3).attr("y", 20).attr("class","title").text("Metrics"); }, legendColFinal () { diff --git a/insertMongo.py b/insertMongo.py index d681e76..a379e62 100644 --- a/insertMongo.py +++ b/insertMongo.py @@ -10,7 +10,7 @@ def import_content(filepath): mng_client = pymongo.MongoClient('localhost', 27017) mng_db = mng_client['mydb'] #collection_name = 'StanceCTest' - collection_name = 'biodegC' + collection_name = 'biodegCTest' db_cm = mng_db[collection_name] cdir = os.path.dirname(__file__) file_res = os.path.join(cdir, filepath) @@ -21,5 +21,5 @@ def import_content(filepath): db_cm.insert(data_json) if __name__ == "__main__": - filepath = '/Users/anchaa/Documents/Research/HyperSearVis_code/new_data_sets/biodeg.csv' + filepath = '/Users/anchaa/Documents/Research/HyperSearVis_code/new_data_sets/biodegtest.csv' import_content(filepath) \ No newline at end of file diff --git a/new_data_sets/biodegtest.csv b/new_data_sets/biodegtest.csv index 45ad5e4..3bac0cd 100644 --- a/new_data_sets/biodegtest.csv +++ b/new_data_sets/biodegtest.csv @@ -1,1057 +1,219 @@ -SpMax_L,J_Dz(e),nHM,F01[N-N],F04[C-N],NssssC,nCb-,C%,nCp,nO,F03[C-N],SdssC,HyWi_B(m),LOC,SM6_L,F03[C-O],Me,Mi,nN-N,nArNO2,nCRX3,SpPosA_B(p),nCIR,B01[C-Br],B03[C-Cl],N-073,SpMax_A,Psi_i_1d,B04[C-Br],SdO,TI2_L,nCrt,C-026,F02[C-N],nHDon,SpMax_B(m),Psi_i_A,nN,SM6_B(m),nArCOOR,nX,Class*,,, -3.802,2.4475,0,0,0,0,0,30.4,2,0,0,0,2.661,1.95,8.394,0,0.96,1.144,0,0,0,1.195,0,0,0,0,1.848,0.108,0,0,2.885,0,0,0,0,2.881,1.643,0,6.741,0,0,0,,, -3,2.5664,0,0,0,0,0,28.6,0,1,0,0,2.022,0,7.286,0,1.014,1.149,0,0,0,0.946,1,0,0,0,2,-0.541,0,0,0.444,0,0,0,0,3.119,2.167,0,6.827,0,0,0,,, -4.414,3.0215,0,0,0,0,1,46.7,1,0,0,0,2.853,0.802,9.183,0,0.969,1.111,0,0,0,1.323,1,0,0,0,2.101,-0.074,0,0,0.975,0,0,0,0,3.549,1.952,0,7.773,0,0,0,,, -3.879,2.7312,0,0,2,0,0,28.6,2,0,2,0,2.922,2.281,8.743,0,0.966,1.151,0,0,0,1.179,0,0,0,0,1.902,0.038,0,0,3.685,0,0,2,1,2.957,1.722,1,7.078,0,0,0,,, -5.175,3.753,0,0,0,1,0,34.5,3,1,0,1.296,3.211,1.859,10.165,2,0.975,1.136,0,0,0,1.224,0,0,0,0,2.175,0.013,0,0,3.341,0,0,0,1,3.401,2.265,0,7.963,0,0,0,,, -3.919,3.1183,0,0,0,0,0,27.6,2,3,0,0,3.178,2.55,9.002,1,0.998,1.151,0,0,0,1.116,0,0,0,0,1.932,0.014,0,0,4.489,0,0,0,1,3.056,2.318,0,7.456,0,0,0,,, -3.618,2.4524,0,0,0,0,0,30.8,1,1,0,0,2.452,1.522,7.853,1,0.989,1.144,0,0,0,1.189,0,0,0,0,1.732,0.363,0,9.405,2.094,0,0,0,0,3.058,2.8,0,6.889,0,0,0,,, -5.125,3.6783,0,0,0,1,0,30,3,2,0,0.025,2.925,1.299,10.01,2,0.998,1.146,0,0,0,1.177,0,0,0,0,2.149,-0.078,0,10.299,2.097,0,0,0,1,3.422,2.927,0,7.748,0,0,0,,, -4.404,3.5332,0,0,0,0,0,30.4,0,4,0,-0.595,3.272,2.187,9.589,6,1.027,1.145,0,0,0,1.128,0,0,0,0,2.053,0.015,0,21.015,3.667,0,0,0,0,3.402,2.985,0,8.054,0,0,0,,, -4.56,4.4123,0,0,0,0,0,33.3,4,4,0,-0.945,3.979,3.022,10.432,10,0.987,1.139,0,0,0,1.183,0,0,0,0,2.156,0,0,23.321,6.935,0,0,0,0,3.542,2.25,0,8.631,0,0,0,,, -4.383,3.1398,0,0,0,0,0,31.6,1,3,0,-0.678,3.809,3.249,10.063,4,0.98,1.142,0,0,0,1.173,0,0,0,0,2.054,0,0,10.339,7.892,0,0,0,2,3.329,2.214,0,8.27,0,0,0,,, -3.975,2.9106,0,0,1,0,0,33.3,1,0,1,0,3.74,3.322,9.707,0,0.965,1.14,0,0,0,1.228,0,0,0,0,1.978,0,0,0,8.122,0,0,1,0,3.24,1.8,1,8.146,0,0,0,,, -4.732,2.9545,2,0,0,0,1,46.7,1,0,0,0,3.439,1.187,9.642,0,1.012,1.103,0,0,0,1.336,1,0,1,0,2.194,-0.025,0,0,1.659,0,0,0,0,3.981,2.358,0,8.774,0,2,0,,, -4.783,3.3094,0,0,0,0,2,41.2,0,2,0,0,3.118,1.187,9.668,4,1.011,1.122,0,0,0,1.216,1,0,0,0,2.222,-0.026,0,0,1.358,0,2,0,1,3.674,2.537,0,8.062,0,0,0,,, -4.499,2.9058,0,0,0,0,1,43.8,1,1,0,0,2.99,1.125,9.311,2,0.991,1.117,0,0,0,1.243,1,0,0,0,2.136,0.044,0,0,1.481,0,0,0,1,3.556,2.396,0,7.845,0,0,0,,, -4.896,3.4023,0,0,0,0,3,40.9,0,3,0,0,3.404,1.299,10.027,6,1.018,1.123,0,0,0,1.219,1,0,0,0,2.301,0.004,0,10.378,1.541,0,2,0,0,3.744,2.583,0,8.369,0,0,0,,, -4.77,3.0868,0,0,0,0,2,44.4,1,2,0,-0.875,3.239,1.185,9.882,4,1.011,1.116,0,0,0,1.248,1,0,0,0,2.236,0.014,0,10.312,1.74,0,0,0,1,3.704,2.8,0,8.277,0,0,0,,, -4.796,3.1573,0,0,0,0,2,44.4,1,2,0,-0.872,3.239,1.185,9.882,4,1.011,1.116,0,0,0,1.248,1,0,0,0,2.243,0.014,0,10.353,1.686,0,0,0,1,3.71,2.8,0,8.277,0,0,0,,, -4.882,2.872,0,0,0,0,2,37.5,2,6,0,-1.14,3.8,2.123,10.361,10,1.036,1.13,0,0,0,1.161,1,0,0,0,2.305,0,0,22.748,3.779,0,0,0,2,3.755,2.981,0,8.711,2,0,0,,, -4.607,3.1387,0,0,2,0,2,50,0,1,2,0,3.168,1.187,9.631,2,1.014,1.11,0,0,0,1.295,1,0,0,0,2.194,-0.027,0,0,1.544,0,1,1,1,3.692,2.87,1,8.242,0,0,0,,, -5.313,3.4001,1,0,0,0,2,36.8,1,3,0,0,3.493,1.16,10.388,6,1.031,1.116,0,0,0,1.257,1,0,0,0,2.311,-0.008,0,20.999,1.833,0,1,0,1,4.735,3.114,0,9.506,0,0,0,,, -4.641,3.0853,0,0,0,0,2,44.4,0,2,0,0,3.233,1.261,9.714,4,1.011,1.116,0,0,0,1.249,1,0,0,0,2.214,0.014,0,10.165,1.783,0,1,0,0,3.664,2.583,0,8.192,0,0,0,,, -4.928,2.4843,0,0,0,0,0,34.2,1,3,0,0.171,3.606,2.125,10.295,6,0.992,1.137,0,0,0,1.199,1,0,0,0,2.36,0,0,22.913,3.3,2,0,0,0,3.538,2.375,0,8.342,0,0,0,,, -3.618,3.4601,0,0,0,0,0,33.3,0,0,0,0,2.591,0,8.412,0,1.01,1.157,0,0,0,1.306,1,0,0,1,2,-0.238,0,0,0.579,0,0,2,1,3.52,2.3,2,7.59,0,0,0,,, -4.414,3.2788,0,0,0,0,0,41.7,0,1,1,0,2.899,0.802,9.183,1,1.016,1.128,0,0,0,1.239,1,0,0,0,2.101,-0.083,0,0,0.975,0,1,2,1,3.589,2.667,1,7.854,0,0,0,,, -3.732,2.4884,0,0,0,0,0,33.3,1,0,0,0,2.57,1.585,8.16,0,0.961,1.138,0,0,0,1.247,0,0,0,0,1.802,-0.197,0,0,2.488,0,0,0,0,2.991,1.917,0,6.88,0,0,0,,, -4,2.4305,1,0,0,0,0,27.3,2,0,0,0,3.494,0.811,8.318,0,0.979,1.137,0,0,0,1.386,0,1,0,0,1.732,-0.48,0,0,1,0,0,0,0,6.874,2.021,0,11.569,0,1,0,,, -3.618,2.2408,0,0,1,0,0,25,2,0,1,0,2.359,1.522,7.853,0,0.97,1.161,0,0,0,1.133,0,0,0,0,1.732,0.323,0,0,2.094,0,0,1,2,2.811,2.1,1,6.385,0,0,0,,, -3.966,2.856,0,0,0,0,0,31.4,2,1,0,0,3.553,3.146,9.522,1,0.968,1.143,0,0,0,1.187,0,0,0,0,1.97,0,0,0,6.909,0,0,0,1,2.988,1.794,0,7.753,0,0,0,,, -3.618,2.5915,0,0,0,0,0,23.1,1,2,0,0,2.435,1.522,7.853,0,1.015,1.16,0,0,0,1.065,0,0,0,0,1.732,0.402,0,0,2.094,0,0,0,1,2.919,2.9,0,6.634,0,0,0,,, -4.228,3.2893,0,0,0,0,0,29.4,2,2,0,-0.193,2.793,1.557,8.891,2,1.004,1.147,0,0,0,1.14,0,0,0,0,1.932,0.142,0,9.98,2.535,0,0,0,0,3.346,2.738,0,7.465,0,0,0,,, -4.236,3.0679,0,0,1,0,0,32.8,1,1,1,-0.156,3.929,3.554,10.042,1,0.97,1.141,0,0,0,1.209,0,0,0,0,1.995,0,0,10.599,9.701,0,0,1,2,3.309,1.903,1,8.382,0,0,0,,, -4.278,3.167,0,0,0,0,0,30,0,4,0,-1.74,3.192,1.922,9.464,4,1.036,1.146,0,0,0,1.114,0,0,0,0,2,-0.027,0,19.793,3.677,0,0,0,2,3.359,3.533,0,7.994,0,0,0,,, -4.346,3.6387,0,0,0,0,0,31.6,0,3,0,-0.41,3.074,1.975,9.16,3,1.021,1.142,0,0,0,1.153,0,0,0,0,2,0.05,0,10.291,3.165,0,0,0,0,3.424,2.852,0,7.785,0,0,0,,, -4.655,4.0026,0,0,0,0,0,31.8,2,3,0,-0.11,3.176,1.761,9.723,6,1.013,1.142,0,0,0,1.161,0,0,0,0,2.107,-0.027,0,10.614,2.989,0,0,0,1,3.531,2.967,0,8.001,0,0,0,,, -4.233,3.6203,0,0,0,0,0,37.5,1,2,0,-0.914,2.983,1.906,9.011,2,1.012,1.13,0,0,0,1.207,0,0,0,0,1.95,-0.086,0,9.753,2.999,0,0,0,1,3.444,3.083,0,7.828,0,0,0,,, -4.691,4.0085,0,0,0,0,0,33.3,0,4,0,-2.748,3.146,1.436,9.666,6,1.064,1.139,0,0,0,1.141,0,0,0,0,2.117,0.053,0,19.662,2.428,0,0,0,2,3.564,3.944,0,8.128,0,0,0,,, -4.495,3.6519,0,0,0,0,0,32,2,3,0,-0.558,3.25,2.187,9.562,5,1.007,1.141,0,0,0,1.172,0,0,0,0,2.074,0.014,0,21.071,3.568,0,0,0,0,3.461,2.803,0,8.013,0,0,0,,, -4.414,3.0837,0,0,1,0,1,42.9,0,0,2,0,2.868,0.802,9.183,0,0.982,1.125,0,0,0,1.282,1,0,0,0,2.101,-0.077,0,0,0.975,0,1,2,2,3.557,2.238,1,7.79,0,0,0,,, -4.675,2.6951,0,0,0,0,1,44,1,2,0,0.083,3.474,1.911,10.014,7,0.998,1.116,0,0,0,1.264,1,0,0,0,2.189,-0.002,0,10.994,3.45,0,0,0,0,3.6,2.462,0,8.445,0,0,0,,, -4.517,2.6642,0,0,0,0,1,42.1,1,1,0,0,3.086,1.409,9.391,1,0.987,1.12,0,0,0,1.256,1,0,0,0,2.149,-0.024,0,0,2.036,0,0,0,1,3.553,2.296,0,7.898,0,0,0,,, -4.732,2.3414,0,0,0,0,1,47.1,0,1,0,0,3.124,0,9.688,2,0.992,1.11,0,0,0,1.243,2,0,0,0,2.348,0.013,0,0,1.659,0,0,0,0,3.639,2,0,8.158,0,0,0,,, -4.788,2.7203,0,0,2,0,1,40.9,0,3,2,-1.424,3.499,1.911,10.014,3,1.028,1.127,0,0,0,1.201,1,0,0,0,2.23,-0.002,0,21.295,3.219,0,0,2,2,3.664,3,1,8.485,0,0,0,,, -4.953,3.2896,0,0,1,0,2,40,0,2,3,-0.907,3.324,1.273,9.975,4,1.015,1.129,0,0,0,1.214,1,0,0,0,2.288,-0.008,0,10.541,1.602,0,1,2,2,3.736,2.773,1,8.324,0,0,0,,, -5.313,2.5728,0,0,0,1,0,34.5,3,1,0,1.477,3.215,1.16,10.388,2,0.975,1.136,0,0,0,1.211,1,0,0,0,2.311,-0.007,0,0,1.833,1,0,0,1,3.478,2.068,0,7.975,0,0,0,,, -4.774,2.8791,0,0,0,0,1,45,1,1,0,0.209,3.211,1.461,9.724,3,0.987,1.114,0,0,0,1.264,1,0,0,0,2.214,0.014,0,11.013,1.964,0,0,0,0,3.652,2.383,0,8.162,0,0,0,,, -5.096,3.1703,0,0,0,0,0,40,2,4,0,-1.5,3.433,1.124,10.278,10,1.042,1.125,0,0,0,1.223,1,0,0,0,2.358,0.004,0,32.786,1.482,1,0,0,0,3.742,3.208,0,8.487,0,0,0,,, -4.876,3.9788,0,0,0,0,0,22.7,2,5,0,0,3.16,1.761,9.801,6,1.043,1.161,0,0,0,1.008,0,0,0,0,2.189,-0.027,0,0,2.313,0,0,0,5,3.341,3.7,0,7.66,0,0,0,,, -5.035,2.2675,0,0,0,0,2,50,0,2,0,-0.271,3.703,0.625,10.385,6,0.998,1.104,0,0,0,1.277,2,0,0,0,2.328,0,0,11.895,3.098,0,0,0,1,3.73,2.458,0,8.718,0,0,0,,, -4.214,3.0457,0,0,0,0,0,23.1,1,2,1,-0.836,2.68,1.459,8.755,2,1.031,1.166,0,0,0,1.08,0,0,0,0,1.902,-0.255,0,9.519,2.052,0,0,1,3,3.328,3.611,1,7.365,0,0,0,,, -3.879,2.6225,1,0,0,0,0,30.8,2,0,0,0,3.142,2.281,8.743,0,0.972,1.142,0,0,0,1.223,0,0,1,0,1.902,0.037,0,0,3.685,0,0,0,0,3.542,1.846,0,7.944,0,1,0,,, -3.732,2.7566,0,0,0,0,0,25,2,2,0,0,2.597,1.585,8.16,0,1.005,1.156,0,0,0,1.077,0,0,0,0,1.802,-0.237,0,0,2.488,0,0,0,1,2.961,2.667,0,6.802,0,0,0,,, -4.236,3.0785,0,0,0,0,0,33.3,1,2,0,-0.664,3.764,3.284,9.867,2,0.975,1.138,0,0,0,1.199,0,0,0,0,1.993,0,0,10.324,8.066,0,0,0,1,3.329,2.083,0,8.257,0,0,0,,, -4.347,3.0077,0,0,0,0,0,31.7,1,2,0,-0.071,3.477,2.79,9.619,3,0.979,1.142,0,0,0,1.178,0,0,0,0,2.017,0.001,0,10.797,5.864,0,0,0,0,3.357,2.078,0,7.954,0,0,0,,, -4.343,3.3851,0,0,0,0,0,28.6,0,4,0,-2.153,3.013,1.5,9.309,4,1.069,1.149,0,0,0,1.075,0,0,0,0,2,-0.092,0,19.281,2.682,0,0,0,2,3.408,4.042,0,7.893,0,0,0,,, -4.343,3.6524,0,0,0,0,0,31.3,1,3,0,-0.501,2.977,1.906,9.058,3,1.032,1.143,0,0,0,1.126,0,0,0,0,1.989,-0.089,0,10.119,2.682,0,0,0,1,3.423,3.271,0,7.697,0,0,0,,, -5.421,4.9033,0,0,0,1,0,28.6,0,7,0,-5.019,3.504,1.669,10.487,10,1.087,1.149,0,0,0,1.084,0,0,0,0,2.327,0.005,0,30.485,2.211,0,0,0,4,3.629,4.173,0,8.471,0,0,0,,, -4.947,4.4275,0,0,0,0,0,25,0,6,0,-3.537,3.249,1.371,9.956,6,1.101,1.156,0,0,0,1.036,0,0,0,0,2.214,-0.03,0,19.544,2.232,0,0,0,4,3.565,4.4,0,8.152,0,0,0,,, -4.17,2.6683,0,0,0,0,0,26.7,2,1,0,0,2.4,0.971,8.597,1,0.983,1.152,0,0,0,1.103,0,0,0,0,1.848,0.343,0,0,1.542,0,0,0,1,3.022,2.567,0,6.702,0,0,0,,, -4.842,3.1241,0,0,0,0,2,44.4,0,4,0,-2.126,3.451,1.258,10.136,8,1.053,1.116,0,0,0,1.194,1,0,0,0,2.278,0.005,0,20.669,2.113,0,0,0,2,3.753,3.389,0,8.529,0,0,0,,, -4.517,2.7667,0,0,2,0,1,50,0,0,1,0,3.137,1.409,9.391,0,0.985,1.109,0,0,0,1.339,1,0,0,0,2.149,-0.025,0,0,2.036,0,0,1,0,3.578,2.407,1,8.128,0,0,0,,, -4.77,2.4954,0,0,0,0,0,38.5,2,0,0,2.904,3.133,1.185,9.882,0,0.964,1.128,0,0,0,1.272,1,0,0,0,2.236,0.012,0,0,1.74,1,0,0,0,3.454,1.817,0,7.92,0,0,0,,, -4.739,2.5436,0,0,0,0,0,35.7,1,2,0,0.266,3.336,1.695,9.972,7,0.99,1.134,0,0,0,1.205,1,0,0,0,2.212,0.004,0,11.089,2.815,0,0,0,0,3.547,2.306,0,8.108,0,0,0,,, -3.618,2.445,0,0,0,0,0,21.4,1,1,0,0,2.41,1.522,7.853,0,0.997,1.169,0,0,0,1.079,0,0,0,0,1.732,0.375,0,0,2.094,0,0,1,2,2.872,2.7,1,6.534,0,0,0,,, -4.303,3.1827,0,0,0,0,0,25,2,2,2,0,2.996,1.837,9.389,0,0.998,1.159,0,0,0,1.112,0,0,0,0,2,0.044,0,0,3.193,0,0,2,3,3.139,2.685,1,7.43,0,0,0,,, -3.919,3.2139,0,0,0,0,0,25.9,1,4,0,0,3.201,2.55,9.002,0,1.014,1.154,0,0,0,1.091,0,0,0,0,1.932,0.015,0,0,4.489,0,0,0,1,3.069,2.5,0,7.529,0,0,0,,, -4.17,3.2439,1,0,0,0,0,25,1,2,0,-0.98,2.842,0.971,8.597,0,1.093,1.149,0,0,0,1.11,0,0,0,0,1.848,0.442,0,9.236,1.542,0,0,0,1,3.644,4.056,0,8.039,0,1,0,,, -5.072,2.524,0,0,0,0,2,36.4,2,4,0,-0.888,4.154,3.02,10.632,12,0.986,1.132,0,0,0,1.212,1,0,0,0,2.351,0,0,24.704,6.968,0,0,0,0,3.794,2.167,0,8.904,2,0,0,,, -5.089,1.8308,0,0,0,0,2,40,0,4,0,-0.809,4.049,0.49,10.803,14,0.996,1.125,0,0,0,1.213,3,0,0,0,2.373,0,0,25.001,4.39,0,0,0,0,3.8,2.222,0,8.91,2,0,0,,, -4.91,2.8789,0,0,0,0,3,52.6,0,1,0,0,3.357,0.629,10.06,2,0.993,1.098,0,0,0,1.319,3,0,0,0,2.333,0.004,0,0,1.207,0,1,0,1,3.86,2.273,0,8.487,0,0,0,,, -4,3.1669,0,0,0,0,0,45.5,0,0,1,0,2.737,0,8.623,0,0.988,1.121,0,0,0,1.287,1,0,0,0,2,0.137,0,0,0.667,0,0,2,0,3.482,2.167,1,7.627,0,0,0,,, -4.977,2.0833,0,0,0,0,0,31,1,6,0,-0.343,3.988,3.337,10.606,9,1,1.143,0,0,0,1.142,1,0,0,0,2.361,0,0,11.629,7.693,0,0,0,3,3.497,2.458,0,8.579,0,0,0,,, -6.496,4.6405,0,0,0,8,0,33.3,2,0,0,0,4.224,0.88,12.028,0,1.303,1.365,0,0,2,0.898,1,0,0,0,2.798,0,0,0,1.166,2,0,0,0,4.055,5.75,0,9.257,0,16,1,,, -5.998,1.8789,6,0,0,3,0,44.4,0,1,0,0.642,4.308,0.749,11.637,3,1.054,1.103,0,0,0,1.332,15,0,1,0,2.859,0,0,0,1.117,6,0,0,0,4.405,2.355,0,9.843,0,6,1,,, -5.437,2.538,5,0,0,1,4,50,1,0,0,0,4.239,0.954,10.89,0,1.029,1.094,0,0,1,1.375,2,0,1,0,2.424,0,0,0,2.444,0,2,0,0,4.252,2.411,0,9.708,0,5,1,,, -5.462,2.4473,0,2,11,2,6,40.8,6,1,12,0,4.083,1.008,11.343,3,0.987,1.128,0,0,0,1.289,4,0,0,1,2.506,0,0,0,2.666,0,2,10,1,4.02,2.104,3,9.254,0,0,1,,, -5.462,2.4518,1,2,11,2,7,40.8,6,1,12,0,4.195,1.003,11.392,3,0.993,1.127,0,0,0,1.298,4,0,1,1,2.51,0,0,0,2.826,0,3,10,1,4.068,2.171,3,9.454,0,1,1,,, -5.994,2.4353,7,0,0,3,0,45.5,0,0,0,0.425,4.296,0.768,11.48,0,1.071,1.095,0,0,0,1.384,6,0,1,0,2.799,0,0,0,0.955,4,0,0,0,4.407,2.58,0,9.865,0,7,1,,, -5.236,2.9421,6,0,0,0,0,33.3,0,0,0,0,4.006,0.799,10.434,0,1.069,1.12,0,0,0,1.347,1,0,1,0,2.414,0.004,0,0,0.873,0,0,0,0,4.085,2.722,0,9.504,0,6,1,,, -5.262,5.0351,3,0,0,1,0,14.3,0,2,0,0,3.47,0.863,10.026,0,1.23,1.166,0,0,0,1.252,0,0,0,0,2.175,0.166,0,18.861,1.435,0,0,0,0,4.309,4.226,1,9.115,0,3,1,,, -4.263,3.3475,0,0,2,0,0,25.8,0,1,0,0,3.166,2.187,9.533,0,0.983,1.159,0,0,0,1.163,0,0,0,0,2,0.013,0,0,4.143,0,0,2,0,3.15,1.955,2,7.645,0,0,1,,, -4.303,3.4665,0,0,0,0,0,21.4,0,1,2,-0.157,2.66,1.459,8.815,2,1.013,1.175,0,0,0,1.081,0,0,0,0,1.932,-0.251,0,9.958,1.745,0,0,0,2,3.346,2.944,2,7.353,0,0,1,,, -5.323,4.4887,2,0,0,1,0,30,0,1,2,-0.75,4.234,1.299,10.107,1,1.087,1.13,0,0,0,1.359,0,1,0,0,2.222,-0.087,0,10.158,1.569,0,0,2,2,7.078,3.49,2,12.275,0,2,1,,, -4.895,3.5219,1,0,2,0,3,37.5,0,2,4,0,3.52,1.16,10.099,4,1.059,1.137,0,1,0,1.252,1,0,1,0,2.292,-0.008,0,20.341,1.696,0,3,4,2,3.977,3.192,2,8.892,0,1,1,,, -4.922,3.5578,0,0,3,0,3,35,0,3,4,0,3.45,1.258,10.152,6,1.042,1.144,0,1,0,1.195,1,0,0,0,2.309,0.005,0,20.579,1.768,0,3,4,2,3.865,3.042,2,8.639,0,0,1,,, -4.86,3.3466,1,0,0,0,3,40,0,2,0,0,3.453,1.273,9.967,4,1.02,1.122,0,0,0,1.25,1,0,1,0,2.282,-0.008,0,0,1.634,0,3,0,0,3.932,2.374,0,8.641,0,1,1,,, -4.845,3.1806,1,0,0,0,3,40,1,1,0,0,3.336,1.185,9.902,2,0.991,1.11,0,0,0,1.306,1,0,0,0,2.267,0.013,0,0,1.393,0,2,0,1,3.911,2.283,0,8.592,0,0,1,,, -4.922,3.5441,0,0,5,0,3,41.2,0,2,7,0,3.486,1.258,10.152,4,1.05,1.137,0,1,0,1.254,1,0,0,0,2.309,0.005,0,20.471,1.768,0,2,5,2,3.88,3.292,3,8.746,0,0,1,,, -4.732,3.3893,0,0,0,0,2,50,0,0,0,0,3.072,0.875,9.579,0,1.056,1.16,0,0,0,1.196,1,0,0,0,2.194,0.049,0,0,0.854,0,2,0,0,3.698,3.417,0,8.071,0,2,1,,, -4.919,1.1387,0,0,2,0,4,41,0,4,8,0,4.322,0,11.129,0,0.998,1.125,0,0,0,1.204,6,0,0,0,2.399,0,0,0,5.538,0,2,8,0,3.731,1.919,2,9.366,0,0,1,,, -5.299,1.7433,0,0,6,0,0,33.3,0,6,12,-1.781,3.969,0.61,10.903,12,1.044,1.146,0,0,0,1.126,4,0,0,0,2.505,0,0,37.083,1.91,0,0,3,0,3.801,2.643,3,9.054,0,0,1,,, -5.22,2.6213,0,0,0,0,5,50,0,4,0,-0.406,3.753,0.875,10.66,9,1.035,1.104,0,0,0,1.281,6,0,0,0,2.492,0,0,11.173,1.341,0,2,0,0,4.015,2.469,0,8.955,0,0,1,,, -5.016,2.3633,0,0,0,0,0,40.9,1,3,0,0.508,3.413,0.799,10.366,8,1.018,1.123,0,0,0,1.215,3,0,0,0,2.441,-0.002,0,22.148,1.197,2,0,0,0,3.707,2.681,0,8.396,0,0,1,,, -6.294,7.4499,0,0,0,9,0,30.6,2,1,0,0,4.487,1.805,12.164,1,1.255,1.333,0,0,0,0.906,0,0,0,0,2.676,0,0,0,4.437,0,0,0,1,3.97,5.628,0,9.463,0,20,1,,, -5.218,4.3575,1,0,0,0,0,29.1,4,3,0,0,3.851,2.646,10.565,7,0.979,1.142,0,0,0,1.2,0,0,0,0,2.264,0,0,12.174,4.797,0,0,0,1,4.412,2.149,0,9.174,0,0,1,,, -4.377,2.8894,1,0,0,0,0,28.6,2,1,0,0,4.312,2.246,9.253,2,0.968,1.129,0,0,0,1.45,0,0,0,0,2.029,-0.02,0,11.241,3.316,0,0,0,0,10.355,2.055,0,14.025,0,0,1,,, -4.732,3.1657,0,0,0,0,3,42.9,3,0,0,0,3.075,0.881,9.803,0,0.967,1.119,0,0,0,1.331,1,0,0,0,2.236,-0.023,0,0,0.95,0,0,0,0,3.69,1.889,0,8.082,0,0,1,,, -4.517,2.6567,1,0,0,0,1,44.4,1,0,0,0,3.265,1.409,9.391,0,0.986,1.112,0,0,0,1.311,1,0,1,0,2.149,-0.024,0,0,2.036,0,0,0,0,3.622,2.086,0,8.37,0,1,1,,, -4.732,3.2158,1,0,0,0,2,46.7,1,0,0,0,3.192,0.875,9.579,0,0.991,1.107,0,0,0,1.349,1,0,1,0,2.194,0.043,0,0,0.854,0,1,0,0,3.899,2.181,0,8.457,0,1,1,,, -4.562,3.1061,1,0,0,0,2,46.7,1,0,0,0,3.192,0.875,9.54,0,0.991,1.107,0,0,0,1.346,1,0,1,0,2.17,0.043,0,0,1.14,0,1,0,0,3.869,2.181,0,8.454,0,1,1,,, -4.77,3.0499,1,0,0,0,2,47.4,1,0,0,1.066,3.375,1.185,9.882,0,0.986,1.106,0,0,0,1.348,1,0,1,0,2.236,0.013,0,0,1.74,0,1,0,0,3.881,2.211,0,8.62,0,1,1,,, -4.818,2.9372,0,0,1,0,1,38.5,2,0,2,0,3.249,1.523,9.801,0,0.973,1.131,0,0,0,1.251,1,0,0,0,2.236,-0.007,0,0,2.002,0,1,4,0,3.603,1.879,1,8.063,0,0,1,,, -4.807,3.3506,2,0,1,0,3,42.9,0,0,2,0,3.453,0.881,9.833,0,1.029,1.117,0,0,0,1.332,1,0,1,0,2.246,-0.025,0,0,1.06,0,3,2,2,4.049,2.58,1,8.863,0,2,1,,, -4.97,3.5327,3,0,1,0,4,42.9,0,0,2,0,3.653,0.861,10.06,0,1.052,1.113,0,0,0,1.348,1,0,1,0,2.307,0.014,0,0,0.962,0,4,2,2,4.103,2.7,1,9.148,0,3,1,,, -5,2.595,0,1,6,0,2,48,0,1,8,0,3.647,0.781,10.242,2,1.003,1.115,0,0,0,1.29,2,0,0,0,2.327,0,0,10.782,2.451,0,2,6,0,3.728,2.356,2,8.668,0,0,1,,, -4.993,2.2683,0,0,0,0,3,35.8,2,1,0,0,3.995,2.885,10.458,3,0.968,1.133,0,0,0,1.235,1,0,0,0,2.325,0,0,0,6.393,0,1,0,1,3.725,1.8,0,8.621,0,0,1,,, -5.006,3.7881,0,0,2,0,3,35.3,0,5,4,0,3.591,1.23,10.309,10,1.101,1.145,0,2,0,1.185,1,0,0,0,2.338,-0.003,0,40.816,1.873,0,3,4,1,3.941,3.769,2,8.927,0,0,1,,, -4.807,3.3157,1,0,0,0,3,43.8,1,1,0,0,3.297,0.881,9.833,3,1.012,1.113,0,0,0,1.295,1,0,1,0,2.246,-0.025,0,0,1.06,0,2,0,1,3.903,2.568,0,8.562,0,1,1,,, -4.935,3.5332,3,0,0,0,4,46.2,0,1,0,0,3.66,0.861,10.06,2,1.073,1.099,0,0,0,1.333,1,0,1,0,2.303,0.015,0,0,1.047,0,4,0,1,4.119,2.9,0,9.154,0,3,1,,, -4.935,3.5136,3,0,0,0,4,46.2,0,1,0,0,4.069,0.861,10.06,2,1.066,1.091,0,0,0,1.379,1,1,1,0,2.303,0.014,1,0,1.047,0,4,0,1,6.88,2.764,0,11.635,0,3,1,,, -4.96,2.2721,2,0,0,0,2,41.7,1,2,0,0,4.413,0.945,10.234,4,1.017,1.108,0,0,0,1.308,2,1,0,0,2.327,-0.001,1,0,2.814,0,1,0,0,7.065,2.107,0,12.279,0,2,1,,, -4.804,1.7705,0,0,6,0,2,50,0,4,6,-0.224,4.203,0.465,10.828,12,1.02,1.108,0,0,0,1.269,3,0,0,0,2.283,0,0,23.61,6.12,0,0,6,0,3.741,2.5,2,9.265,0,0,1,,, -5.427,2.3997,0,0,0,2,2,42.9,4,2,0,0,3.875,0.68,11.012,10,0.985,1.119,0,0,0,1.261,2,0,0,0,2.381,0,0,0,4.151,0,0,0,0,3.702,2.042,0,8.847,0,0,1,,, -4.499,2.3714,0,0,0,0,0,40,0,0,0,0,2.924,1.125,9.311,0,0.965,1.125,0,0,0,1.266,1,0,0,0,2.136,0.039,0,0,1.481,1,0,0,0,3.304,1.854,0,7.542,0,0,1,,, -4.958,2.4699,0,0,0,0,0,32.3,3,1,0,0,3.186,1.16,10.099,3,0.973,1.141,0,0,0,1.202,1,0,0,0,2.303,-0.006,0,0,1.602,2,0,0,1,3.371,1.985,0,7.729,0,0,1,,, -5.338,2.2854,1,0,0,0,9,42.9,6,4,0,0,4.322,0.662,11.258,15,0.994,1.114,0,0,0,1.311,3,0,0,0,2.407,0,0,13.723,2.977,0,3,0,0,4.443,2.183,0,9.741,0,0,1,,, -5.077,2.4109,0,0,0,0,4,47.1,3,0,0,0,3.66,0.703,10.481,0,0.969,1.11,0,0,0,1.324,2,0,0,0,2.366,0,0,0,2.693,0,0,0,0,3.76,1.875,0,8.68,0,0,1,,, -5.51,2.4441,0,0,0,1,3,46.9,2,1,0,0,3.674,0.703,10.717,2,0.981,1.11,0,0,0,1.291,2,0,0,0,2.412,0,0,0,2.589,0,1,0,1,3.767,2.141,0,8.7,0,0,1,,, -5.003,2.9595,1,0,0,0,3,55.6,0,0,0,0,3.496,0.629,10.084,0,0.992,1.089,0,0,0,1.369,3,0,1,0,2.351,0.004,0,0,1.075,0,1,0,0,4.009,2.101,0,8.805,0,1,1,,, -5.026,2.9255,0,0,0,0,3,50,0,1,0,0,3.425,0.896,10.138,3,0.988,1.104,0,0,0,1.315,3,0,0,0,2.365,-0.002,0,0,1.339,0,1,0,0,3.88,2.042,0,8.525,0,0,1,,, -5.382,2.2507,0,0,0,1,5,50,3,2,0,-0.114,3.928,0.932,11.101,8,0.992,1.104,0,0,0,1.303,6,0,0,0,2.522,0,0,25.043,2.006,0,0,0,0,3.962,2.346,0,9.069,0,0,1,,, -5.318,2.3664,1,0,0,0,5,58.3,0,2,0,-0.26,3.898,0.682,10.772,8,1.021,1.084,0,0,0,1.325,6,0,1,0,2.508,0,0,24.364,1.393,0,1,0,0,4.006,2.575,0,9.176,0,1,1,,, -4.91,2.843,1,0,0,0,3,52.6,0,0,0,0,3.468,0.629,10.06,0,0.98,1.083,0,0,0,1.409,3,0,0,0,2.333,0.004,0,0,1.207,0,1,0,0,3.921,2.02,0,8.704,0,0,1,,, -5.014,2.6947,0,0,2,0,4,54.5,0,0,4,0,3.548,0,10.384,0,0.983,1.098,0,0,0,1.364,6,0,0,1,2.469,-0.001,0,0,1.305,0,0,4,1,4.048,1.936,1,8.807,0,0,1,,, -5.006,2.3877,1,0,0,0,2,46.9,1,1,0,0,3.828,1.202,10.306,4,0.991,1.109,0,0,0,1.295,2,0,0,0,2.334,0,0,0,2.163,0,0,0,0,3.696,2.075,0,8.888,0,1,1,,, -4.953,2.6126,3,0,0,0,6,50,0,2,0,0,4.037,0.732,10.578,6,1.043,1.097,0,0,0,1.322,2,0,1,0,2.346,0,0,0,3.061,0,6,0,1,4.022,2.578,0,9.422,0,3,1,,, -4.523,3.7332,2,0,0,0,0,28.6,4,1,0,0,3.362,1.837,9.454,0,1.008,1.143,0,0,0,1.208,0,0,1,0,2.074,0.043,0,0,2.57,0,0,0,0,3.618,2.377,0,8.51,0,2,1,,, -4.77,3.3541,0,0,2,0,2,37.5,0,2,4,0,3.288,1.185,9.882,4,1.039,1.14,0,1,0,1.224,1,0,0,0,2.236,0.015,0,20.154,1.74,0,2,4,2,3.832,3.133,2,8.506,0,0,1,,, -4.971,3.0393,0,0,2,0,2,38.7,3,1,2,0.124,3.509,1.733,10.236,4,0.984,1.13,0,0,0,1.251,1,0,0,0,2.305,0.001,0,11.885,2.585,0,0,3,0,3.725,2.214,1,8.44,0,0,1,,, -5.379,2.2903,0,0,0,0,6,60.7,0,1,0,0.134,3.865,0.463,10.86,4,0.991,1.082,0,0,0,1.355,12,0,0,0,2.551,0,0,12.494,1.154,0,0,0,0,4.038,2.148,0,9.074,0,0,1,,, -4.382,2.2073,0,0,2,0,0,32.4,0,1,2,0.254,3.426,0.544,9.692,2,0.977,1.143,0,0,0,1.195,1,0,0,0,2.066,0.001,0,11.274,1.489,0,0,2,1,3.34,1.976,1,7.923,0,0,1,,, -4.802,3.0585,1,0,1,0,2,50,0,0,2,0,3.325,0,9.805,0,0.996,1.089,0,0,0,1.408,3,0,0,0,2.323,0.013,0,0,0.95,0,0,2,0,4.226,2.019,1,8.879,0,0,1,,, -4.17,2.7612,2,0,0,0,0,27.3,2,0,0,0,3.002,0.971,8.597,0,1.017,1.141,0,0,0,1.28,0,0,1,0,1.848,0.354,0,0,1.542,0,0,0,0,3.755,2.611,0,8.35,0,2,1,,, -6.272,7.1546,0,0,0,8,0,30.3,2,1,0,0,4.347,1.77,12.012,1,1.235,1.32,0,0,1,0.911,0,0,0,0,2.66,0,0,0,3.939,0,0,0,1,3.957,5.536,0,9.319,0,17,1,,, -5.541,3.7298,4,0,4,1,1,31,0,2,5,0,4.127,1.698,10.987,6,1.051,1.137,0,0,0,1.28,1,0,0,0,2.424,0,0,24.111,2.051,0,1,3,0,4.747,2.994,2,9.951,0,3,1,,, -5.119,2.6427,0,0,0,0,4,60,0,0,0,0,3.457,0,10.355,0,0.977,1.083,0,0,0,1.391,7,0,0,0,2.471,0.001,0,0,0.751,0,0,0,0,3.976,1.889,0,8.661,0,0,1,,, -5.665,2.2521,1,0,0,1,3,54.3,0,0,0,0,4,0.432,10.89,0,0.983,1.093,0,0,0,1.345,3,0,1,0,2.49,0,0,0,1.839,0,0,0,0,4,2.018,0,9.166,0,1,1,,, -4.481,3.6846,0,0,0,0,0,24,3,3,0,0,3.109,1.895,9.292,0,1.011,1.161,0,0,0,1.083,0,0,0,0,2.074,-0.026,0,0,2.02,0,0,3,3,3.171,2.9,1,7.444,0,0,1,,, -4.214,3.4743,0,0,1,0,0,23.1,1,2,1,-0.711,2.68,1.459,8.755,1,1.031,1.166,0,0,0,1.083,0,0,0,0,1.902,-0.262,0,9.599,2.052,0,0,0,2,3.36,3.278,1,7.41,0,0,1,,, -5.248,4.2601,3,0,0,0,0,20.8,0,3,0,-0.08,3.68,1.888,10.249,5,1.021,1.133,0,0,0,1.287,0,0,0,0,2.229,-0.008,0,10.819,3.06,0,0,1,1,4.819,2.664,1,9.618,0,0,1,,, -4.953,1.9231,0,0,4,0,4,38.9,2,0,6,0,4.176,2.296,10.707,0,0.967,1.128,0,0,0,1.265,2,0,0,1,2.343,0,0,0,6.238,0,2,4,1,3.732,1.73,1,8.931,0,0,1,,, -4.753,1.6977,0,0,2,0,4,38.9,2,0,4,0,4.176,2.296,10.68,0,0.967,1.128,0,0,0,1.262,2,0,0,1,2.295,0,0,0,7.067,0,2,4,1,3.702,1.73,1,8.928,0,0,1,,, -5.366,3.5678,1,0,3,0,2,35,1,2,2,0,3.484,1.16,10.405,4,1.018,1.125,0,0,0,1.27,1,0,0,0,2.342,-0.008,0,21.663,1.661,0,1,1,2,4.729,2.932,1,9.496,0,0,1,,, -5.333,2.0991,1,0,0,0,6,41.5,6,4,0,0,4.4,0.956,11.313,15,0.989,1.117,0,0,0,1.284,3,0,0,0,2.394,0,0,13.711,3.349,0,3,0,0,4.441,2.135,0,9.741,0,0,1,,, -5.31,3.4316,1,0,2,0,1,35.3,0,2,2,0,3.415,1.185,10.25,4,1.028,1.123,0,0,0,1.261,1,0,0,0,2.288,0.014,0,21.221,1.726,0,1,1,2,4.725,3.058,1,9.463,0,0,1,,, -5.187,2.1504,0,0,0,0,0,43.5,1,3,0,0.666,3.507,0.773,10.68,8,1.017,1.118,0,0,0,1.256,6,0,0,0,2.597,-0.001,0,22.622,1.14,4,0,0,0,3.799,2.564,0,8.551,0,0,1,,, -5.394,2.8307,3,1,3,0,4,37,1,4,5,0.311,4.137,0.978,11.021,8,1.066,1.122,0,0,0,1.274,2,0,1,0,2.465,0,0,33.791,2.542,0,4,8,1,4.748,3.086,2,9.947,0,2,1,,, -4.303,2.6572,1,0,0,0,0,25,0,0,0,0.787,2.909,0.862,9.067,0,1.004,1.146,0,0,0,1.3,1,0,0,0,2.115,0.131,0,0,0.956,0,0,2,2,3.86,2.222,2,8.216,0,0,1,,, -4.842,3.2355,0,0,0,0,0,41.2,0,4,2,-2.3,3.459,1.258,10.136,6,1.069,1.127,0,0,0,1.18,1,0,0,0,2.278,0.005,0,20.567,2.113,0,0,3,2,3.771,3.472,1,8.551,0,0,1,,, -4,2.2975,0,0,0,0,0,26.7,0,1,0,0,2.626,0,8.623,0,0.998,1.158,0,0,0,1.095,1,0,0,0,2,0.128,0,0,0.667,0,0,2,1,3.093,2,1,6.951,0,0,1,,, -5.286,3.4146,0,0,6,0,0,27.3,3,6,9,-2.66,3.792,1.622,10.617,12,1.048,1.159,0,0,0,1.111,1,0,0,0,2.458,0,0,35.324,1.615,0,0,3,3,3.783,3.278,3,8.66,0,0,1,,, -5.406,2.1252,1,0,7,0,5,43.9,0,4,10,0,4.279,0.777,11.11,10,1.024,1.116,0,1,0,1.28,3,0,0,1,2.428,0,0,47.783,3.924,0,5,9,2,4.711,2.702,3,9.916,0,0,1,,, -4.868,3.0252,0,0,1,0,0,34.8,3,1,6,1.466,3.292,1.16,10.076,2,0.998,1.143,0,0,0,1.235,1,0,0,0,2.288,-0.008,0,10.884,1.634,0,0,5,1,3.697,2.439,2,8.261,0,0,1,,, -3.618,2.8543,2,0,0,0,0,33.3,1,0,0,0,3.029,1.522,7.853,0,1.033,1.126,0,0,0,1.299,0,0,1,0,1.732,0.386,0,0,2.094,0,0,0,0,3.71,2.744,0,8.384,0,2,1,,, -3.732,2.5972,0,0,0,0,0,23.5,1,1,1,0,2.575,1.585,8.16,1,0.991,1.164,0,0,0,1.105,0,0,0,0,1.802,-0.226,0,0,2.488,0,0,1,2,2.93,2.333,1,6.762,0,0,1,,, -4.629,3.9373,0,0,0,0,0,31.3,0,1,1,-0.056,2.83,1.379,9.312,3,0.998,1.148,0,0,0,1.211,0,0,0,0,2.053,0.143,0,10.319,1.776,0,0,1,0,3.475,2.81,1,7.673,0,0,1,,, -3.879,2.9588,0,0,0,0,0,33.3,0,0,4,0,3.091,2.281,8.743,0,0.998,1.152,0,0,0,1.251,0,0,0,0,1.902,0.047,0,0,3.685,0,0,4,1,3.271,2.833,3,7.905,0,0,1,,, -4.709,4.1448,4,0,2,0,0,25,0,0,0,1.674,3.751,1.792,10.014,0,0.997,1.115,0,0,0,1.399,0,0,0,0,2.152,-0.007,0,0,3.444,0,0,0,0,4.33,2.194,2,9.389,0,0,1,,, -5.328,5.8726,7,0,0,0,0,25.7,6,4,0,0,4.34,2.088,10.678,9,1.056,1.137,0,0,0,1.225,0,0,1,0,2.34,0,0,12.579,2.507,0,0,0,0,4.429,2.799,0,9.905,0,6,1,,, -5.086,2.4664,1,0,0,1,0,30.8,2,1,0,0,2.929,1.39,9.812,0,1.014,1.14,0,0,0,1.169,1,0,1,0,2.38,0.126,0,0,1.372,1,0,0,0,3.699,2.31,0,8.194,0,1,1,,, -4.691,4.7731,0,0,4,0,0,26.9,4,4,4,0,3.786,2.122,10.467,0,0.996,1.155,0,0,0,1.142,0,0,0,0,2.211,0,0,0,3.997,0,0,6,4,3.321,2.517,2,8.334,0,0,1,,, -4.499,2.9639,0,0,1,0,1,41.2,0,0,2,0,2.976,1.125,9.311,0,0.979,1.127,0,0,0,1.266,1,0,0,0,2.136,0.043,0,0,1.481,0,1,2,1,3.561,2.021,1,7.846,0,0,1,,, -4.9,3.0875,0,0,1,0,1,40.9,1,1,2,0.052,3.304,1.455,9.955,2,0.993,1.127,0,0,0,1.258,1,0,0,0,2.251,-0.008,0,10.87,2.15,0,1,3,0,3.655,2.394,1,8.286,0,0,1,,, -4.757,1.8029,0,0,4,0,4,50,0,0,8,0,3.915,0,10.525,0,0.983,1.108,0,0,0,1.313,3,0,0,2,2.295,0,0,0,4.15,0,4,8,2,3.712,1.983,2,8.934,0,0,1,,, -4.593,2.7059,0,0,2,0,1,39.1,0,0,2,0,3.168,1.461,9.665,0,0.974,1.13,0,0,0,1.283,1,0,0,0,2.175,0.013,0,0,2.298,0,0,1,0,3.566,1.917,1,8.02,0,0,1,,, -5.563,4.6158,0,0,9,1,6,33.3,5,6,12,0,4.018,1.153,11.151,12,1.044,1.146,0,3,0,1.214,1,0,0,0,2.546,0,0,67.469,1.406,0,3,6,0,4.092,3.298,3,9.375,0,0,1,,, -5.371,3.236,0,0,0,1,3,39.3,4,1,0,0,3.351,1.124,10.524,3,0.978,1.126,0,0,0,1.273,1,0,0,0,2.364,0.004,0,0,1.718,0,1,0,1,3.765,2.188,0,8.354,0,0,1,,, -4.901,2.5104,2,0,1,0,5,48,0,3,2,0,4.028,0.946,10.601,8,1.051,1.107,0,1,0,1.295,2,0,1,0,2.329,0,0,20.982,3.43,0,5,2,0,4.016,2.781,1,9.395,0,2,1,,, -4.813,3.4399,0,0,1,0,2,38.9,0,3,2,0,3.37,1.273,9.947,6,1.041,1.132,0,1,0,1.209,1,0,0,0,2.259,-0.008,0,20.438,1.86,0,2,2,0,3.839,2.985,1,8.549,0,0,1,,, -4.973,3.602,0,0,2,0,3,35,0,3,4,0,3.45,1.258,10.152,6,1.042,1.144,0,1,0,1.193,1,0,0,0,2.317,0.005,0,20.72,1.706,0,3,4,2,3.883,3.042,2,8.639,0,0,1,,, -4.562,3.1971,1,0,0,0,2,46.2,0,1,0,0,3.214,0.875,9.54,2,1.023,1.108,0,0,0,1.291,1,0,1,0,2.17,0.046,0,0,1.14,0,2,0,1,3.872,2.681,0,8.473,0,1,1,,, -5.42,3.2399,0,0,0,4,2,34.5,10,4,0,0,4.009,1.912,11.411,16,0.988,1.136,0,0,0,1.206,1,0,0,0,2.409,0,0,0,4.536,0,0,0,0,3.753,2.097,0,8.931,0,0,1,,, -5.777,2.3194,0,0,0,2,0,33.3,3,2,0,0,3.323,1.126,10.946,6,0.987,1.139,0,0,0,1.182,3,0,0,0,2.607,-0.002,0,0,1.274,3,0,0,1,3.675,2.097,0,8.164,0,0,1,,, -5.256,2.6157,0,0,0,1,0,34.6,3,1,0,0.448,3.133,0.861,10.335,2,0.977,1.136,0,0,0,1.252,1,0,0,0,2.327,0.012,0,11.089,1.037,1,0,0,0,3.497,2.175,0,7.914,0,0,1,,, -4.928,1.9515,0,0,0,0,4,56.3,0,0,0,0,3.82,0,10.506,0,0.975,1.091,0,0,0,1.345,3,0,0,0,2.348,0,0,0,3.091,0,0,0,0,3.788,1.926,0,8.882,0,0,1,,, -5.003,2.9132,0,0,0,0,3,52.4,1,0,0,0,3.338,0.629,10.084,0,0.972,1.099,0,0,0,1.349,3,0,0,0,2.351,0.004,0,0,1.075,0,0,0,0,3.873,1.909,0,8.471,0,0,1,,, -4.732,2.1796,0,0,0,0,4,48.1,0,2,0,0,3.633,0.648,10.3,4,0.998,1.108,0,0,0,1.276,2,0,0,0,2.275,0,0,0,3.095,0,2,0,2,3.7,2.411,0,8.654,0,0,1,,, -5.174,2.5182,0,0,1,0,5,42.9,2,4,2,-1.681,4.049,1.282,10.904,11,1.009,1.121,0,0,0,1.236,2,0,0,0,2.42,0,0,23.818,3.572,0,2,4,2,3.874,2.638,1,9.097,0,0,1,,, -4.924,2.4008,0,0,0,0,3,45.2,2,1,0,0,3.608,1.429,10.32,4,0.981,1.114,0,0,0,1.299,3,0,0,0,2.348,0,0,0,2.904,0,1,0,0,3.861,1.956,0,8.632,0,0,1,,, -5.509,3.254,2,0,2,0,5,35.7,0,6,2,0,4.086,1.096,11.165,12,1.063,1.116,0,0,0,1.251,3,0,0,0,2.511,0,0,44.995,1.996,0,3,2,4,4.77,3.377,1,10.203,0,0,1,,, -4.861,1.9254,0,0,0,0,0,35.7,0,0,0,0,3.092,0,9.863,0,0.963,1.133,0,0,0,1.222,3,0,0,0,2.303,-0.006,0,0,1.047,2,0,0,0,3.309,1.467,0,7.533,0,0,1,,, -5.735,1.9915,0,0,0,2,0,35.3,2,2,0,0,3.48,1.074,11.021,4,0.985,1.134,0,0,0,1.18,6,0,0,0,2.641,0,0,0,1.239,2,0,0,2,3.656,2.083,0,8.176,0,0,1,,, -5.047,2.5237,0,0,0,0,4,58.3,0,0,0,0,3.606,0,10.4,0,0.976,1.087,0,0,0,1.38,6,0,0,0,2.414,0,0,0,1.443,0,0,0,0,3.966,1.905,0,8.812,0,0,1,,, -5.318,2.361,0,0,2,0,5,53.8,0,2,2,-0.252,3.801,0.682,10.772,8,1.011,1.099,0,0,0,1.296,6,0,0,0,2.508,0,0,24.37,1.393,0,1,2,2,3.955,2.569,1,8.963,0,0,1,,, -5.408,2.3533,0,0,6,0,6,43.5,4,2,6,-0.202,4.087,1.289,11.084,8,0.993,1.121,0,0,0,1.276,6,0,0,0,2.553,0,0,26.148,1.71,0,2,8,2,3.991,2.292,2,9.158,0,0,1,,, -4.802,3.136,0,2,4,0,2,42.9,0,0,6,0,3.196,0,9.805,0,1.014,1.136,0,0,0,1.31,3,0,0,1,2.323,0.014,0,0,0.95,0,0,6,1,3.901,2.204,3,8.414,0,0,1,,, -5.69,2.3056,0,0,8,1,2,48.4,0,2,6,-0.826,3.889,0.565,10.94,5,1.009,1.113,0,0,0,1.277,3,0,0,0,2.55,0,0,23.886,1.935,1,0,4,2,3.857,2.469,2,8.961,0,0,1,,, -4,2.9848,1,0,0,0,0,22.2,1,0,0,0.5,2.594,0.811,8.318,0,0.994,1.139,0,0,0,1.344,0,0,0,0,1.732,-0.59,0,0,1,0,0,1,2,3.84,2.833,1,8.085,0,0,1,,, -4.562,3.2789,4,0,0,0,0,25,2,0,0,0,4.705,0.918,9.184,0,1.071,1.077,0,0,0,1.641,0,1,0,0,2,-0.207,0,0,1.521,0,0,0,0,7.129,2.278,0,12.952,0,4,1,,, -5.262,4.2718,2,0,0,1,0,25,2,0,0,0,3.317,0.863,10.026,0,1.23,1.269,0,0,1,1.042,0,0,0,0,2.175,0.172,0,0,1.435,0,0,0,0,3.997,4.972,0,8.663,0,5,1,,, -4.303,3.16,2,0,0,0,0,33.3,1,0,0,0,3.13,1.459,8.815,0,1.015,1.129,0,0,0,1.308,0,0,1,0,1.932,-0.227,0,0,1.745,0,0,0,0,3.772,2.676,0,8.437,0,2,1,,, -6.242,9.1775,0,0,3,12,0,30,3,0,3,0,4.727,1.637,12.421,0,1.311,1.377,0,0,3,0.883,0,0,0,0,2.694,0,0,0,2.922,0,0,3,0,3.991,5.825,1,9.745,0,27,1,,, -3.802,2.8705,2,0,0,0,0,26.7,2,1,0,0,3.2,1.95,8.394,0,1.026,1.145,0,0,0,1.197,0,0,0,0,1.848,0.139,0,0,2.885,0,0,0,0,3.57,2.532,0,8.378,0,2,1,,, -4.228,3.1334,0,0,0,0,0,26.3,3,2,0,0,2.746,1.557,8.891,0,0.998,1.153,0,0,0,1.103,0,0,0,0,1.932,0.136,0,0,2.535,0,0,0,1,3.105,2.548,0,7.107,0,0,1,,, -5.13,3.5878,0,0,0,1,0,32.1,4,1,0,0,3.095,1.761,10.106,1,0.974,1.141,0,0,0,1.237,0,0,0,0,2.175,-0.022,0,10.101,2.729,0,0,0,0,3.309,2.258,0,7.751,0,0,1,,, -5.086,3.9599,3,0,0,1,0,28.6,1,1,0,0,3.347,0.918,9.771,0,1.152,1.125,0,0,1,1.32,0,0,0,0,2.074,-0.263,0,9.431,1.372,0,0,0,0,4.238,3.764,0,8.925,0,3,1,,, -4.236,2.9709,0,0,1,0,0,31.9,1,1,1,-0.151,3.915,3.554,10.042,1,0.969,1.143,0,0,0,1.198,0,0,0,0,1.995,0,0,10.604,9.701,0,0,1,2,3.309,1.861,1,8.305,0,0,1,,, -4.303,3.6845,1,0,0,0,0,15.4,0,3,0,0,2.917,1.459,8.815,4,1.038,1.155,0,0,0,1.172,0,0,0,0,1.932,-0.264,0,9.921,1.745,0,0,0,0,4.19,3.154,0,8.634,0,0,1,,, -5.262,4.5221,1,0,0,0,0,28.3,3,4,0,0,3.856,2.733,10.387,12,0.987,1.144,0,0,0,1.187,0,0,0,0,2.257,0,0,12.436,3.441,0,0,0,0,4.415,2.115,0,9.152,0,0,1,,, -6.253,7.1279,0,0,0,7,0,30.8,1,2,0,-4.279,4.252,1.544,11.905,2,1.285,1.34,0,0,1,0.908,0,0,0,0,2.648,0,0,9.723,3.516,0,0,0,1,3.954,5.737,0,9.274,0,15,1,,, -4.807,3.1717,0,0,0,0,3,42.9,3,0,0,0,3.075,0.881,9.833,0,0.967,1.119,0,0,0,1.321,1,0,0,0,2.246,-0.023,0,0,1.06,0,0,0,0,3.698,1.889,0,8.086,0,0,1,,, -5.236,3.778,5,0,0,0,6,46.7,1,0,0,0,5.013,0.799,10.434,0,1.045,1.058,0,0,0,1.58,1,1,0,0,2.414,0.004,1,0,0.873,0,5,0,0,6.982,2.146,0,13.184,0,5,1,,, -4.562,3.1297,0,0,2,0,2,37.5,0,0,4,0,2.997,0.875,9.54,0,0.991,1.14,0,0,0,1.242,1,0,0,0,2.17,0.045,0,0,1.14,0,2,4,4,3.633,2.417,2,7.969,0,0,1,,, -4.658,2.9363,0,0,2,0,2,38.1,1,1,4,-0.074,3.314,1.455,9.9,1,1.003,1.137,0,0,0,1.239,1,0,0,0,2.22,-0.008,0,10.556,2.349,0,2,5,3,3.658,2.591,2,8.311,0,0,1,,, -5.388,3.3114,0,0,0,1,3,39.3,4,1,0,0,3.351,1.124,10.539,4,0.978,1.126,0,0,0,1.27,1,0,0,0,2.377,0.004,0,0,1.69,0,1,0,1,3.78,2.188,0,8.357,0,0,1,,, -4.97,3.3797,0,0,0,0,4,40.9,3,1,0,0,3.193,0.861,10.06,4,0.983,1.123,0,0,0,1.272,1,0,0,0,2.307,0.013,0,0,0.962,0,1,0,1,3.774,2.267,0,8.239,0,0,1,,, -5.399,3.4164,0,0,0,1,4,38.7,5,1,0,0,3.424,1.084,10.644,4,0.977,1.127,0,0,0,1.271,1,0,0,0,2.403,-0.002,0,0,1.65,0,1,0,1,3.818,2.147,0,8.453,0,0,1,,, -4.807,3.3179,1,0,0,0,3,43.8,1,1,0,0,3.297,0.881,9.833,2,1.012,1.113,0,0,0,1.298,1,0,1,0,2.246,-0.025,0,0,1.06,0,2,0,1,3.926,2.568,0,8.562,0,1,1,,, -4.607,3.0008,1,0,0,0,2,41.2,0,1,0,0,3.256,1.187,9.631,2,0.996,1.105,0,0,0,1.302,1,0,0,0,2.194,-0.025,0,0,1.544,0,2,0,1,3.865,2.352,0,8.506,0,0,1,,, -5.313,2.7782,0,0,0,1,2,40,4,1,0,0,3.564,1.697,10.592,2,0.976,1.125,0,0,0,1.274,1,0,0,0,2.329,-0.001,0,10.535,2.986,0,0,0,0,3.692,2.161,0,8.482,0,0,1,,, -5.103,3.9184,0,0,4,0,4,35,1,5,4,0,3.649,1.197,10.456,11,1.08,1.144,0,2,0,1.195,1,0,0,0,2.38,0.001,0,41.425,1.74,0,3,4,1,3.967,3.619,2,8.984,0,0,1,,, -5.265,3.3444,2,0,6,0,1,35.3,2,3,3,0.134,3.998,2.353,10.632,6,1.012,1.123,0,0,0,1.296,1,0,0,0,2.3,0,0,0,3.88,0,0,3,0,4.698,2.463,2,9.666,0,0,1,,, -5.029,2.5966,0,0,0,0,4,46.7,0,4,0,-0.761,3.837,1.418,10.634,10,1.02,1.111,0,0,0,1.254,3,0,0,0,2.403,0,0,22.765,2.586,0,0,0,0,3.919,2.611,0,8.958,2,0,1,,, -5.431,2.8955,0,0,0,2,0,32.1,4,1,1,0.374,3.233,0.832,10.681,2,0.982,1.144,0,0,0,1.232,1,0,0,0,2.394,-0.007,0,11.254,1.055,2,0,6,1,3.573,2.242,1,8.088,0,0,1,,, -5.287,3.3732,0,0,9,0,0,35.3,0,9,9,-5.256,4.319,2.346,11.029,21,1.043,1.14,0,0,0,1.178,1,0,0,0,2.462,0,0,71.167,3.396,0,0,3,0,3.787,3.083,3,9.278,0,0,1,,, -4.869,1.767,0,1,9,0,5,44.4,0,4,14,-0.391,4.435,1.073,11.072,9,1.016,1.123,0,1,0,1.261,3,0,0,0,2.314,0,0,33.54,6.465,0,4,13,0,3.848,2.576,5,9.537,1,0,1,,, -5.158,1.6914,2,0,36,0,9,56.1,0,0,44,0,4.902,0.257,11.817,0,1.007,1.093,0,0,0,1.41,147,0,1,2,2.622,0,0,0,1.535,0,1,16,0,5.808,2.055,8,11.055,0,1,1,,, -5.076,2.6588,2,0,0,0,4,54.5,0,0,0,0,3.792,0.673,10.327,0,1.003,1.089,0,0,0,1.364,2,0,1,0,2.363,-0.001,0,0,2.106,0,2,0,0,4.009,2.206,0,9.13,0,2,1,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -,,,,{'SpMax_L'};,{'J_Dz(e)'};,{'nHM'};,{'F01[N-N]'};,{'F04[C-N]'};,{'NssssC'};,{'nCb-'};,{'C%'};,{'nCp'};,{'nO'};,{'F03[C-N]'};,{'SdssC'};,{'HyWi_B(m)'};,{'LOC'};,{'SM6_L'};,{'F03[C-O]'};,{'Me'};,{'Mi'};,{'nN-N'};,{'nArNO2'};,{'nCRX3'};,{'SpPosA_B(p)'};,{'nCIR'};,{'B01[C-Br]'};,{'B03[C-Cl]'};,{'N-073'};,{'SpMax_A'};,{'Psi_i_1d'};,{'B04[C-Br]'};,{'SdO'};,{'TI2_L'};,{'nCrt'};,{'C-026'};,{'F02[C-N]'};,{'nHDon'};,{'SpMax_B(m)'};,{'Psi_i_A'};,{'nN'};,{'SM6_B(m)'};,{'nArCOOR'};,{'nX'}; +SpMax_L,J_Dz(e),nHM,F01[N-N],F04[C-N],NssssC,nCb-,C%,nCp,nO,F03[C-N],SdssC,HyWi_B(m),LOC,SM6_L,F03[C-O],Me,Mi,nN-N,nArNO2,nCRX3,SpPosA_B(p),nCIR,B01[C-Br],B03[C-Cl],N-073,SpMax_A,Psi_i_1d,B04[C-Br],SdO,TI2_L,nCrt,C-026,F02[C-N],nHDon,SpMax_B(m),Psi_i_A,nN,SM6_B(m),nArCOOR,nX,Class* +3.802,2.4475,0,0,0,0,0,30.4,2,0,0,0,2.661,1.95,8.394,0,0.96,1.144,0,0,0,1.195,0,0,0,0,1.848,0.108,0,0,2.885,0,0,0,0,2.881,1.643,0,6.741,0,0,0 +3,2.5664,0,0,0,0,0,28.6,0,1,0,0,2.022,0,7.286,0,1.014,1.149,0,0,0,0.946,1,0,0,0,2,-0.541,0,0,0.444,0,0,0,0,3.119,2.167,0,6.827,0,0,0 +4.414,3.0215,0,0,0,0,1,46.7,1,0,0,0,2.853,0.802,9.183,0,0.969,1.111,0,0,0,1.323,1,0,0,0,2.101,-0.074,0,0,0.975,0,0,0,0,3.549,1.952,0,7.773,0,0,0 +3.879,2.7312,0,0,2,0,0,28.6,2,0,2,0,2.922,2.281,8.743,0,0.966,1.151,0,0,0,1.179,0,0,0,0,1.902,0.038,0,0,3.685,0,0,2,1,2.957,1.722,1,7.078,0,0,0 +5.175,3.753,0,0,0,1,0,34.5,3,1,0,1.296,3.211,1.859,10.165,2,0.975,1.136,0,0,0,1.224,0,0,0,0,2.175,0.013,0,0,3.341,0,0,0,1,3.401,2.265,0,7.963,0,0,0 +3.919,3.1183,0,0,0,0,0,27.6,2,3,0,0,3.178,2.55,9.002,1,0.998,1.151,0,0,0,1.116,0,0,0,0,1.932,0.014,0,0,4.489,0,0,0,1,3.056,2.318,0,7.456,0,0,0 +3.618,2.4524,0,0,0,0,0,30.8,1,1,0,0,2.452,1.522,7.853,1,0.989,1.144,0,0,0,1.189,0,0,0,0,1.732,0.363,0,9.405,2.094,0,0,0,0,3.058,2.8,0,6.889,0,0,0 +5.125,3.6783,0,0,0,1,0,30,3,2,0,0.025,2.925,1.299,10.01,2,0.998,1.146,0,0,0,1.177,0,0,0,0,2.149,-0.078,0,10.299,2.097,0,0,0,1,3.422,2.927,0,7.748,0,0,0 +4.404,3.5332,0,0,0,0,0,30.4,0,4,0,-0.595,3.272,2.187,9.589,6,1.027,1.145,0,0,0,1.128,0,0,0,0,2.053,0.015,0,21.015,3.667,0,0,0,0,3.402,2.985,0,8.054,0,0,0 +4.56,4.4123,0,0,0,0,0,33.3,4,4,0,-0.945,3.979,3.022,10.432,10,0.987,1.139,0,0,0,1.183,0,0,0,0,2.156,0,0,23.321,6.935,0,0,0,0,3.542,2.25,0,8.631,0,0,0 +4.383,3.1398,0,0,0,0,0,31.6,1,3,0,-0.678,3.809,3.249,10.063,4,0.98,1.142,0,0,0,1.173,0,0,0,0,2.054,0,0,10.339,7.892,0,0,0,2,3.329,2.214,0,8.27,0,0,0 +3.975,2.9106,0,0,1,0,0,33.3,1,0,1,0,3.74,3.322,9.707,0,0.965,1.14,0,0,0,1.228,0,0,0,0,1.978,0,0,0,8.122,0,0,1,0,3.24,1.8,1,8.146,0,0,0 +4.732,2.9545,2,0,0,0,1,46.7,1,0,0,0,3.439,1.187,9.642,0,1.012,1.103,0,0,0,1.336,1,0,1,0,2.194,-0.025,0,0,1.659,0,0,0,0,3.981,2.358,0,8.774,0,2,0 +4.783,3.3094,0,0,0,0,2,41.2,0,2,0,0,3.118,1.187,9.668,4,1.011,1.122,0,0,0,1.216,1,0,0,0,2.222,-0.026,0,0,1.358,0,2,0,1,3.674,2.537,0,8.062,0,0,0 +4.499,2.9058,0,0,0,0,1,43.8,1,1,0,0,2.99,1.125,9.311,2,0.991,1.117,0,0,0,1.243,1,0,0,0,2.136,0.044,0,0,1.481,0,0,0,1,3.556,2.396,0,7.845,0,0,0 +4.896,3.4023,0,0,0,0,3,40.9,0,3,0,0,3.404,1.299,10.027,6,1.018,1.123,0,0,0,1.219,1,0,0,0,2.301,0.004,0,10.378,1.541,0,2,0,0,3.744,2.583,0,8.369,0,0,0 +4.77,3.0868,0,0,0,0,2,44.4,1,2,0,-0.875,3.239,1.185,9.882,4,1.011,1.116,0,0,0,1.248,1,0,0,0,2.236,0.014,0,10.312,1.74,0,0,0,1,3.704,2.8,0,8.277,0,0,0 +4.796,3.1573,0,0,0,0,2,44.4,1,2,0,-0.872,3.239,1.185,9.882,4,1.011,1.116,0,0,0,1.248,1,0,0,0,2.243,0.014,0,10.353,1.686,0,0,0,1,3.71,2.8,0,8.277,0,0,0 +4.882,2.872,0,0,0,0,2,37.5,2,6,0,-1.14,3.8,2.123,10.361,10,1.036,1.13,0,0,0,1.161,1,0,0,0,2.305,0,0,22.748,3.779,0,0,0,2,3.755,2.981,0,8.711,2,0,0 +4.607,3.1387,0,0,2,0,2,50,0,1,2,0,3.168,1.187,9.631,2,1.014,1.11,0,0,0,1.295,1,0,0,0,2.194,-0.027,0,0,1.544,0,1,1,1,3.692,2.87,1,8.242,0,0,0 +5.313,3.4001,1,0,0,0,2,36.8,1,3,0,0,3.493,1.16,10.388,6,1.031,1.116,0,0,0,1.257,1,0,0,0,2.311,-0.008,0,20.999,1.833,0,1,0,1,4.735,3.114,0,9.506,0,0,0 +4.641,3.0853,0,0,0,0,2,44.4,0,2,0,0,3.233,1.261,9.714,4,1.011,1.116,0,0,0,1.249,1,0,0,0,2.214,0.014,0,10.165,1.783,0,1,0,0,3.664,2.583,0,8.192,0,0,0 +4.928,2.4843,0,0,0,0,0,34.2,1,3,0,0.171,3.606,2.125,10.295,6,0.992,1.137,0,0,0,1.199,1,0,0,0,2.36,0,0,22.913,3.3,2,0,0,0,3.538,2.375,0,8.342,0,0,0 +3.618,3.4601,0,0,0,0,0,33.3,0,0,0,0,2.591,0,8.412,0,1.01,1.157,0,0,0,1.306,1,0,0,1,2,-0.238,0,0,0.579,0,0,2,1,3.52,2.3,2,7.59,0,0,0 +4.414,3.2788,0,0,0,0,0,41.7,0,1,1,0,2.899,0.802,9.183,1,1.016,1.128,0,0,0,1.239,1,0,0,0,2.101,-0.083,0,0,0.975,0,1,2,1,3.589,2.667,1,7.854,0,0,0 +3.732,2.4884,0,0,0,0,0,33.3,1,0,0,0,2.57,1.585,8.16,0,0.961,1.138,0,0,0,1.247,0,0,0,0,1.802,-0.197,0,0,2.488,0,0,0,0,2.991,1.917,0,6.88,0,0,0 +4,2.4305,1,0,0,0,0,27.3,2,0,0,0,3.494,0.811,8.318,0,0.979,1.137,0,0,0,1.386,0,1,0,0,1.732,-0.48,0,0,1,0,0,0,0,6.874,2.021,0,11.569,0,1,0 +3.618,2.2408,0,0,1,0,0,25,2,0,1,0,2.359,1.522,7.853,0,0.97,1.161,0,0,0,1.133,0,0,0,0,1.732,0.323,0,0,2.094,0,0,1,2,2.811,2.1,1,6.385,0,0,0 +3.966,2.856,0,0,0,0,0,31.4,2,1,0,0,3.553,3.146,9.522,1,0.968,1.143,0,0,0,1.187,0,0,0,0,1.97,0,0,0,6.909,0,0,0,1,2.988,1.794,0,7.753,0,0,0 +3.618,2.5915,0,0,0,0,0,23.1,1,2,0,0,2.435,1.522,7.853,0,1.015,1.16,0,0,0,1.065,0,0,0,0,1.732,0.402,0,0,2.094,0,0,0,1,2.919,2.9,0,6.634,0,0,0 +4.228,3.2893,0,0,0,0,0,29.4,2,2,0,-0.193,2.793,1.557,8.891,2,1.004,1.147,0,0,0,1.14,0,0,0,0,1.932,0.142,0,9.98,2.535,0,0,0,0,3.346,2.738,0,7.465,0,0,0 +4.236,3.0679,0,0,1,0,0,32.8,1,1,1,-0.156,3.929,3.554,10.042,1,0.97,1.141,0,0,0,1.209,0,0,0,0,1.995,0,0,10.599,9.701,0,0,1,2,3.309,1.903,1,8.382,0,0,0 +4.278,3.167,0,0,0,0,0,30,0,4,0,-1.74,3.192,1.922,9.464,4,1.036,1.146,0,0,0,1.114,0,0,0,0,2,-0.027,0,19.793,3.677,0,0,0,2,3.359,3.533,0,7.994,0,0,0 +4.346,3.6387,0,0,0,0,0,31.6,0,3,0,-0.41,3.074,1.975,9.16,3,1.021,1.142,0,0,0,1.153,0,0,0,0,2,0.05,0,10.291,3.165,0,0,0,0,3.424,2.852,0,7.785,0,0,0 +4.655,4.0026,0,0,0,0,0,31.8,2,3,0,-0.11,3.176,1.761,9.723,6,1.013,1.142,0,0,0,1.161,0,0,0,0,2.107,-0.027,0,10.614,2.989,0,0,0,1,3.531,2.967,0,8.001,0,0,0 +4.233,3.6203,0,0,0,0,0,37.5,1,2,0,-0.914,2.983,1.906,9.011,2,1.012,1.13,0,0,0,1.207,0,0,0,0,1.95,-0.086,0,9.753,2.999,0,0,0,1,3.444,3.083,0,7.828,0,0,0 +4.691,4.0085,0,0,0,0,0,33.3,0,4,0,-2.748,3.146,1.436,9.666,6,1.064,1.139,0,0,0,1.141,0,0,0,0,2.117,0.053,0,19.662,2.428,0,0,0,2,3.564,3.944,0,8.128,0,0,0 +4.495,3.6519,0,0,0,0,0,32,2,3,0,-0.558,3.25,2.187,9.562,5,1.007,1.141,0,0,0,1.172,0,0,0,0,2.074,0.014,0,21.071,3.568,0,0,0,0,3.461,2.803,0,8.013,0,0,0 +4.414,3.0837,0,0,1,0,1,42.9,0,0,2,0,2.868,0.802,9.183,0,0.982,1.125,0,0,0,1.282,1,0,0,0,2.101,-0.077,0,0,0.975,0,1,2,2,3.557,2.238,1,7.79,0,0,0 +4.675,2.6951,0,0,0,0,1,44,1,2,0,0.083,3.474,1.911,10.014,7,0.998,1.116,0,0,0,1.264,1,0,0,0,2.189,-0.002,0,10.994,3.45,0,0,0,0,3.6,2.462,0,8.445,0,0,0 +4.517,2.6642,0,0,0,0,1,42.1,1,1,0,0,3.086,1.409,9.391,1,0.987,1.12,0,0,0,1.256,1,0,0,0,2.149,-0.024,0,0,2.036,0,0,0,1,3.553,2.296,0,7.898,0,0,0 +4.732,2.3414,0,0,0,0,1,47.1,0,1,0,0,3.124,0,9.688,2,0.992,1.11,0,0,0,1.243,2,0,0,0,2.348,0.013,0,0,1.659,0,0,0,0,3.639,2,0,8.158,0,0,0 +4.788,2.7203,0,0,2,0,1,40.9,0,3,2,-1.424,3.499,1.911,10.014,3,1.028,1.127,0,0,0,1.201,1,0,0,0,2.23,-0.002,0,21.295,3.219,0,0,2,2,3.664,3,1,8.485,0,0,0 +4.953,3.2896,0,0,1,0,2,40,0,2,3,-0.907,3.324,1.273,9.975,4,1.015,1.129,0,0,0,1.214,1,0,0,0,2.288,-0.008,0,10.541,1.602,0,1,2,2,3.736,2.773,1,8.324,0,0,0 +5.313,2.5728,0,0,0,1,0,34.5,3,1,0,1.477,3.215,1.16,10.388,2,0.975,1.136,0,0,0,1.211,1,0,0,0,2.311,-0.007,0,0,1.833,1,0,0,1,3.478,2.068,0,7.975,0,0,0 +4.774,2.8791,0,0,0,0,1,45,1,1,0,0.209,3.211,1.461,9.724,3,0.987,1.114,0,0,0,1.264,1,0,0,0,2.214,0.014,0,11.013,1.964,0,0,0,0,3.652,2.383,0,8.162,0,0,0 +5.096,3.1703,0,0,0,0,0,40,2,4,0,-1.5,3.433,1.124,10.278,10,1.042,1.125,0,0,0,1.223,1,0,0,0,2.358,0.004,0,32.786,1.482,1,0,0,0,3.742,3.208,0,8.487,0,0,0 +4.876,3.9788,0,0,0,0,0,22.7,2,5,0,0,3.16,1.761,9.801,6,1.043,1.161,0,0,0,1.008,0,0,0,0,2.189,-0.027,0,0,2.313,0,0,0,5,3.341,3.7,0,7.66,0,0,0 +5.035,2.2675,0,0,0,0,2,50,0,2,0,-0.271,3.703,0.625,10.385,6,0.998,1.104,0,0,0,1.277,2,0,0,0,2.328,0,0,11.895,3.098,0,0,0,1,3.73,2.458,0,8.718,0,0,0 +4.214,3.0457,0,0,0,0,0,23.1,1,2,1,-0.836,2.68,1.459,8.755,2,1.031,1.166,0,0,0,1.08,0,0,0,0,1.902,-0.255,0,9.519,2.052,0,0,1,3,3.328,3.611,1,7.365,0,0,0 +3.879,2.6225,1,0,0,0,0,30.8,2,0,0,0,3.142,2.281,8.743,0,0.972,1.142,0,0,0,1.223,0,0,1,0,1.902,0.037,0,0,3.685,0,0,0,0,3.542,1.846,0,7.944,0,1,0 +3.732,2.7566,0,0,0,0,0,25,2,2,0,0,2.597,1.585,8.16,0,1.005,1.156,0,0,0,1.077,0,0,0,0,1.802,-0.237,0,0,2.488,0,0,0,1,2.961,2.667,0,6.802,0,0,0 +4.236,3.0785,0,0,0,0,0,33.3,1,2,0,-0.664,3.764,3.284,9.867,2,0.975,1.138,0,0,0,1.199,0,0,0,0,1.993,0,0,10.324,8.066,0,0,0,1,3.329,2.083,0,8.257,0,0,0 +4.347,3.0077,0,0,0,0,0,31.7,1,2,0,-0.071,3.477,2.79,9.619,3,0.979,1.142,0,0,0,1.178,0,0,0,0,2.017,0.001,0,10.797,5.864,0,0,0,0,3.357,2.078,0,7.954,0,0,0 +4.343,3.3851,0,0,0,0,0,28.6,0,4,0,-2.153,3.013,1.5,9.309,4,1.069,1.149,0,0,0,1.075,0,0,0,0,2,-0.092,0,19.281,2.682,0,0,0,2,3.408,4.042,0,7.893,0,0,0 +4.343,3.6524,0,0,0,0,0,31.3,1,3,0,-0.501,2.977,1.906,9.058,3,1.032,1.143,0,0,0,1.126,0,0,0,0,1.989,-0.089,0,10.119,2.682,0,0,0,1,3.423,3.271,0,7.697,0,0,0 +5.421,4.9033,0,0,0,1,0,28.6,0,7,0,-5.019,3.504,1.669,10.487,10,1.087,1.149,0,0,0,1.084,0,0,0,0,2.327,0.005,0,30.485,2.211,0,0,0,4,3.629,4.173,0,8.471,0,0,0 +4.947,4.4275,0,0,0,0,0,25,0,6,0,-3.537,3.249,1.371,9.956,6,1.101,1.156,0,0,0,1.036,0,0,0,0,2.214,-0.03,0,19.544,2.232,0,0,0,4,3.565,4.4,0,8.152,0,0,0 +4.17,2.6683,0,0,0,0,0,26.7,2,1,0,0,2.4,0.971,8.597,1,0.983,1.152,0,0,0,1.103,0,0,0,0,1.848,0.343,0,0,1.542,0,0,0,1,3.022,2.567,0,6.702,0,0,0 +4.842,3.1241,0,0,0,0,2,44.4,0,4,0,-2.126,3.451,1.258,10.136,8,1.053,1.116,0,0,0,1.194,1,0,0,0,2.278,0.005,0,20.669,2.113,0,0,0,2,3.753,3.389,0,8.529,0,0,0 +4.517,2.7667,0,0,2,0,1,50,0,0,1,0,3.137,1.409,9.391,0,0.985,1.109,0,0,0,1.339,1,0,0,0,2.149,-0.025,0,0,2.036,0,0,1,0,3.578,2.407,1,8.128,0,0,0 +4.77,2.4954,0,0,0,0,0,38.5,2,0,0,2.904,3.133,1.185,9.882,0,0.964,1.128,0,0,0,1.272,1,0,0,0,2.236,0.012,0,0,1.74,1,0,0,0,3.454,1.817,0,7.92,0,0,0 +4.739,2.5436,0,0,0,0,0,35.7,1,2,0,0.266,3.336,1.695,9.972,7,0.99,1.134,0,0,0,1.205,1,0,0,0,2.212,0.004,0,11.089,2.815,0,0,0,0,3.547,2.306,0,8.108,0,0,0 +3.618,2.445,0,0,0,0,0,21.4,1,1,0,0,2.41,1.522,7.853,0,0.997,1.169,0,0,0,1.079,0,0,0,0,1.732,0.375,0,0,2.094,0,0,1,2,2.872,2.7,1,6.534,0,0,0 +4.303,3.1827,0,0,0,0,0,25,2,2,2,0,2.996,1.837,9.389,0,0.998,1.159,0,0,0,1.112,0,0,0,0,2,0.044,0,0,3.193,0,0,2,3,3.139,2.685,1,7.43,0,0,0 +3.919,3.2139,0,0,0,0,0,25.9,1,4,0,0,3.201,2.55,9.002,0,1.014,1.154,0,0,0,1.091,0,0,0,0,1.932,0.015,0,0,4.489,0,0,0,1,3.069,2.5,0,7.529,0,0,0 +4.17,3.2439,1,0,0,0,0,25,1,2,0,-0.98,2.842,0.971,8.597,0,1.093,1.149,0,0,0,1.11,0,0,0,0,1.848,0.442,0,9.236,1.542,0,0,0,1,3.644,4.056,0,8.039,0,1,0 +5.072,2.524,0,0,0,0,2,36.4,2,4,0,-0.888,4.154,3.02,10.632,12,0.986,1.132,0,0,0,1.212,1,0,0,0,2.351,0,0,24.704,6.968,0,0,0,0,3.794,2.167,0,8.904,2,0,0 +5.089,1.8308,0,0,0,0,2,40,0,4,0,-0.809,4.049,0.49,10.803,14,0.996,1.125,0,0,0,1.213,3,0,0,0,2.373,0,0,25.001,4.39,0,0,0,0,3.8,2.222,0,8.91,2,0,0 +4.91,2.8789,0,0,0,0,3,52.6,0,1,0,0,3.357,0.629,10.06,2,0.993,1.098,0,0,0,1.319,3,0,0,0,2.333,0.004,0,0,1.207,0,1,0,1,3.86,2.273,0,8.487,0,0,0 +4,3.1669,0,0,0,0,0,45.5,0,0,1,0,2.737,0,8.623,0,0.988,1.121,0,0,0,1.287,1,0,0,0,2,0.137,0,0,0.667,0,0,2,0,3.482,2.167,1,7.627,0,0,0 +4.977,2.0833,0,0,0,0,0,31,1,6,0,-0.343,3.988,3.337,10.606,9,1,1.143,0,0,0,1.142,1,0,0,0,2.361,0,0,11.629,7.693,0,0,0,3,3.497,2.458,0,8.579,0,0,0 +6.496,4.6405,0,0,0,8,0,33.3,2,0,0,0,4.224,0.88,12.028,0,1.303,1.365,0,0,2,0.898,1,0,0,0,2.798,0,0,0,1.166,2,0,0,0,4.055,5.75,0,9.257,0,16,1 +5.998,1.8789,6,0,0,3,0,44.4,0,1,0,0.642,4.308,0.749,11.637,3,1.054,1.103,0,0,0,1.332,15,0,1,0,2.859,0,0,0,1.117,6,0,0,0,4.405,2.355,0,9.843,0,6,1 +5.437,2.538,5,0,0,1,4,50,1,0,0,0,4.239,0.954,10.89,0,1.029,1.094,0,0,1,1.375,2,0,1,0,2.424,0,0,0,2.444,0,2,0,0,4.252,2.411,0,9.708,0,5,1 +5.462,2.4473,0,2,11,2,6,40.8,6,1,12,0,4.083,1.008,11.343,3,0.987,1.128,0,0,0,1.289,4,0,0,1,2.506,0,0,0,2.666,0,2,10,1,4.02,2.104,3,9.254,0,0,1 +5.462,2.4518,1,2,11,2,7,40.8,6,1,12,0,4.195,1.003,11.392,3,0.993,1.127,0,0,0,1.298,4,0,1,1,2.51,0,0,0,2.826,0,3,10,1,4.068,2.171,3,9.454,0,1,1 +5.994,2.4353,7,0,0,3,0,45.5,0,0,0,0.425,4.296,0.768,11.48,0,1.071,1.095,0,0,0,1.384,6,0,1,0,2.799,0,0,0,0.955,4,0,0,0,4.407,2.58,0,9.865,0,7,1 +5.236,2.9421,6,0,0,0,0,33.3,0,0,0,0,4.006,0.799,10.434,0,1.069,1.12,0,0,0,1.347,1,0,1,0,2.414,0.004,0,0,0.873,0,0,0,0,4.085,2.722,0,9.504,0,6,1 +5.262,5.0351,3,0,0,1,0,14.3,0,2,0,0,3.47,0.863,10.026,0,1.23,1.166,0,0,0,1.252,0,0,0,0,2.175,0.166,0,18.861,1.435,0,0,0,0,4.309,4.226,1,9.115,0,3,1 +4.263,3.3475,0,0,2,0,0,25.8,0,1,0,0,3.166,2.187,9.533,0,0.983,1.159,0,0,0,1.163,0,0,0,0,2,0.013,0,0,4.143,0,0,2,0,3.15,1.955,2,7.645,0,0,1 +4.303,3.4665,0,0,0,0,0,21.4,0,1,2,-0.157,2.66,1.459,8.815,2,1.013,1.175,0,0,0,1.081,0,0,0,0,1.932,-0.251,0,9.958,1.745,0,0,0,2,3.346,2.944,2,7.353,0,0,1 +5.323,4.4887,2,0,0,1,0,30,0,1,2,-0.75,4.234,1.299,10.107,1,1.087,1.13,0,0,0,1.359,0,1,0,0,2.222,-0.087,0,10.158,1.569,0,0,2,2,7.078,3.49,2,12.275,0,2,1 +4.895,3.5219,1,0,2,0,3,37.5,0,2,4,0,3.52,1.16,10.099,4,1.059,1.137,0,1,0,1.252,1,0,1,0,2.292,-0.008,0,20.341,1.696,0,3,4,2,3.977,3.192,2,8.892,0,1,1 +4.922,3.5578,0,0,3,0,3,35,0,3,4,0,3.45,1.258,10.152,6,1.042,1.144,0,1,0,1.195,1,0,0,0,2.309,0.005,0,20.579,1.768,0,3,4,2,3.865,3.042,2,8.639,0,0,1 +4.86,3.3466,1,0,0,0,3,40,0,2,0,0,3.453,1.273,9.967,4,1.02,1.122,0,0,0,1.25,1,0,1,0,2.282,-0.008,0,0,1.634,0,3,0,0,3.932,2.374,0,8.641,0,1,1 +4.845,3.1806,1,0,0,0,3,40,1,1,0,0,3.336,1.185,9.902,2,0.991,1.11,0,0,0,1.306,1,0,0,0,2.267,0.013,0,0,1.393,0,2,0,1,3.911,2.283,0,8.592,0,0,1 +4.922,3.5441,0,0,5,0,3,41.2,0,2,7,0,3.486,1.258,10.152,4,1.05,1.137,0,1,0,1.254,1,0,0,0,2.309,0.005,0,20.471,1.768,0,2,5,2,3.88,3.292,3,8.746,0,0,1 +4.732,3.3893,0,0,0,0,2,50,0,0,0,0,3.072,0.875,9.579,0,1.056,1.16,0,0,0,1.196,1,0,0,0,2.194,0.049,0,0,0.854,0,2,0,0,3.698,3.417,0,8.071,0,2,1 +4.919,1.1387,0,0,2,0,4,41,0,4,8,0,4.322,0,11.129,0,0.998,1.125,0,0,0,1.204,6,0,0,0,2.399,0,0,0,5.538,0,2,8,0,3.731,1.919,2,9.366,0,0,1 +5.299,1.7433,0,0,6,0,0,33.3,0,6,12,-1.781,3.969,0.61,10.903,12,1.044,1.146,0,0,0,1.126,4,0,0,0,2.505,0,0,37.083,1.91,0,0,3,0,3.801,2.643,3,9.054,0,0,1 +5.22,2.6213,0,0,0,0,5,50,0,4,0,-0.406,3.753,0.875,10.66,9,1.035,1.104,0,0,0,1.281,6,0,0,0,2.492,0,0,11.173,1.341,0,2,0,0,4.015,2.469,0,8.955,0,0,1 +5.016,2.3633,0,0,0,0,0,40.9,1,3,0,0.508,3.413,0.799,10.366,8,1.018,1.123,0,0,0,1.215,3,0,0,0,2.441,-0.002,0,22.148,1.197,2,0,0,0,3.707,2.681,0,8.396,0,0,1 +6.294,7.4499,0,0,0,9,0,30.6,2,1,0,0,4.487,1.805,12.164,1,1.255,1.333,0,0,0,0.906,0,0,0,0,2.676,0,0,0,4.437,0,0,0,1,3.97,5.628,0,9.463,0,20,1 +5.218,4.3575,1,0,0,0,0,29.1,4,3,0,0,3.851,2.646,10.565,7,0.979,1.142,0,0,0,1.2,0,0,0,0,2.264,0,0,12.174,4.797,0,0,0,1,4.412,2.149,0,9.174,0,0,1 +4.377,2.8894,1,0,0,0,0,28.6,2,1,0,0,4.312,2.246,9.253,2,0.968,1.129,0,0,0,1.45,0,0,0,0,2.029,-0.02,0,11.241,3.316,0,0,0,0,10.355,2.055,0,14.025,0,0,1 +4.732,3.1657,0,0,0,0,3,42.9,3,0,0,0,3.075,0.881,9.803,0,0.967,1.119,0,0,0,1.331,1,0,0,0,2.236,-0.023,0,0,0.95,0,0,0,0,3.69,1.889,0,8.082,0,0,1 +4.517,2.6567,1,0,0,0,1,44.4,1,0,0,0,3.265,1.409,9.391,0,0.986,1.112,0,0,0,1.311,1,0,1,0,2.149,-0.024,0,0,2.036,0,0,0,0,3.622,2.086,0,8.37,0,1,1 +4.732,3.2158,1,0,0,0,2,46.7,1,0,0,0,3.192,0.875,9.579,0,0.991,1.107,0,0,0,1.349,1,0,1,0,2.194,0.043,0,0,0.854,0,1,0,0,3.899,2.181,0,8.457,0,1,1 +4.562,3.1061,1,0,0,0,2,46.7,1,0,0,0,3.192,0.875,9.54,0,0.991,1.107,0,0,0,1.346,1,0,1,0,2.17,0.043,0,0,1.14,0,1,0,0,3.869,2.181,0,8.454,0,1,1 +4.77,3.0499,1,0,0,0,2,47.4,1,0,0,1.066,3.375,1.185,9.882,0,0.986,1.106,0,0,0,1.348,1,0,1,0,2.236,0.013,0,0,1.74,0,1,0,0,3.881,2.211,0,8.62,0,1,1 +4.818,2.9372,0,0,1,0,1,38.5,2,0,2,0,3.249,1.523,9.801,0,0.973,1.131,0,0,0,1.251,1,0,0,0,2.236,-0.007,0,0,2.002,0,1,4,0,3.603,1.879,1,8.063,0,0,1 +4.807,3.3506,2,0,1,0,3,42.9,0,0,2,0,3.453,0.881,9.833,0,1.029,1.117,0,0,0,1.332,1,0,1,0,2.246,-0.025,0,0,1.06,0,3,2,2,4.049,2.58,1,8.863,0,2,1 +4.97,3.5327,3,0,1,0,4,42.9,0,0,2,0,3.653,0.861,10.06,0,1.052,1.113,0,0,0,1.348,1,0,1,0,2.307,0.014,0,0,0.962,0,4,2,2,4.103,2.7,1,9.148,0,3,1 +5,2.595,0,1,6,0,2,48,0,1,8,0,3.647,0.781,10.242,2,1.003,1.115,0,0,0,1.29,2,0,0,0,2.327,0,0,10.782,2.451,0,2,6,0,3.728,2.356,2,8.668,0,0,1 +4.993,2.2683,0,0,0,0,3,35.8,2,1,0,0,3.995,2.885,10.458,3,0.968,1.133,0,0,0,1.235,1,0,0,0,2.325,0,0,0,6.393,0,1,0,1,3.725,1.8,0,8.621,0,0,1 +5.006,3.7881,0,0,2,0,3,35.3,0,5,4,0,3.591,1.23,10.309,10,1.101,1.145,0,2,0,1.185,1,0,0,0,2.338,-0.003,0,40.816,1.873,0,3,4,1,3.941,3.769,2,8.927,0,0,1 +4.807,3.3157,1,0,0,0,3,43.8,1,1,0,0,3.297,0.881,9.833,3,1.012,1.113,0,0,0,1.295,1,0,1,0,2.246,-0.025,0,0,1.06,0,2,0,1,3.903,2.568,0,8.562,0,1,1 +4.935,3.5332,3,0,0,0,4,46.2,0,1,0,0,3.66,0.861,10.06,2,1.073,1.099,0,0,0,1.333,1,0,1,0,2.303,0.015,0,0,1.047,0,4,0,1,4.119,2.9,0,9.154,0,3,1 +4.935,3.5136,3,0,0,0,4,46.2,0,1,0,0,4.069,0.861,10.06,2,1.066,1.091,0,0,0,1.379,1,1,1,0,2.303,0.014,1,0,1.047,0,4,0,1,6.88,2.764,0,11.635,0,3,1 +4.96,2.2721,2,0,0,0,2,41.7,1,2,0,0,4.413,0.945,10.234,4,1.017,1.108,0,0,0,1.308,2,1,0,0,2.327,-0.001,1,0,2.814,0,1,0,0,7.065,2.107,0,12.279,0,2,1 +4.804,1.7705,0,0,6,0,2,50,0,4,6,-0.224,4.203,0.465,10.828,12,1.02,1.108,0,0,0,1.269,3,0,0,0,2.283,0,0,23.61,6.12,0,0,6,0,3.741,2.5,2,9.265,0,0,1 +5.427,2.3997,0,0,0,2,2,42.9,4,2,0,0,3.875,0.68,11.012,10,0.985,1.119,0,0,0,1.261,2,0,0,0,2.381,0,0,0,4.151,0,0,0,0,3.702,2.042,0,8.847,0,0,1 +4.499,2.3714,0,0,0,0,0,40,0,0,0,0,2.924,1.125,9.311,0,0.965,1.125,0,0,0,1.266,1,0,0,0,2.136,0.039,0,0,1.481,1,0,0,0,3.304,1.854,0,7.542,0,0,1 +4.958,2.4699,0,0,0,0,0,32.3,3,1,0,0,3.186,1.16,10.099,3,0.973,1.141,0,0,0,1.202,1,0,0,0,2.303,-0.006,0,0,1.602,2,0,0,1,3.371,1.985,0,7.729,0,0,1 +5.338,2.2854,1,0,0,0,9,42.9,6,4,0,0,4.322,0.662,11.258,15,0.994,1.114,0,0,0,1.311,3,0,0,0,2.407,0,0,13.723,2.977,0,3,0,0,4.443,2.183,0,9.741,0,0,1 +5.077,2.4109,0,0,0,0,4,47.1,3,0,0,0,3.66,0.703,10.481,0,0.969,1.11,0,0,0,1.324,2,0,0,0,2.366,0,0,0,2.693,0,0,0,0,3.76,1.875,0,8.68,0,0,1 +5.51,2.4441,0,0,0,1,3,46.9,2,1,0,0,3.674,0.703,10.717,2,0.981,1.11,0,0,0,1.291,2,0,0,0,2.412,0,0,0,2.589,0,1,0,1,3.767,2.141,0,8.7,0,0,1 +5.003,2.9595,1,0,0,0,3,55.6,0,0,0,0,3.496,0.629,10.084,0,0.992,1.089,0,0,0,1.369,3,0,1,0,2.351,0.004,0,0,1.075,0,1,0,0,4.009,2.101,0,8.805,0,1,1 +5.026,2.9255,0,0,0,0,3,50,0,1,0,0,3.425,0.896,10.138,3,0.988,1.104,0,0,0,1.315,3,0,0,0,2.365,-0.002,0,0,1.339,0,1,0,0,3.88,2.042,0,8.525,0,0,1 +5.382,2.2507,0,0,0,1,5,50,3,2,0,-0.114,3.928,0.932,11.101,8,0.992,1.104,0,0,0,1.303,6,0,0,0,2.522,0,0,25.043,2.006,0,0,0,0,3.962,2.346,0,9.069,0,0,1 +5.318,2.3664,1,0,0,0,5,58.3,0,2,0,-0.26,3.898,0.682,10.772,8,1.021,1.084,0,0,0,1.325,6,0,1,0,2.508,0,0,24.364,1.393,0,1,0,0,4.006,2.575,0,9.176,0,1,1 +4.91,2.843,1,0,0,0,3,52.6,0,0,0,0,3.468,0.629,10.06,0,0.98,1.083,0,0,0,1.409,3,0,0,0,2.333,0.004,0,0,1.207,0,1,0,0,3.921,2.02,0,8.704,0,0,1 +5.014,2.6947,0,0,2,0,4,54.5,0,0,4,0,3.548,0,10.384,0,0.983,1.098,0,0,0,1.364,6,0,0,1,2.469,-0.001,0,0,1.305,0,0,4,1,4.048,1.936,1,8.807,0,0,1 +5.006,2.3877,1,0,0,0,2,46.9,1,1,0,0,3.828,1.202,10.306,4,0.991,1.109,0,0,0,1.295,2,0,0,0,2.334,0,0,0,2.163,0,0,0,0,3.696,2.075,0,8.888,0,1,1 +4.953,2.6126,3,0,0,0,6,50,0,2,0,0,4.037,0.732,10.578,6,1.043,1.097,0,0,0,1.322,2,0,1,0,2.346,0,0,0,3.061,0,6,0,1,4.022,2.578,0,9.422,0,3,1 +4.523,3.7332,2,0,0,0,0,28.6,4,1,0,0,3.362,1.837,9.454,0,1.008,1.143,0,0,0,1.208,0,0,1,0,2.074,0.043,0,0,2.57,0,0,0,0,3.618,2.377,0,8.51,0,2,1 +4.77,3.3541,0,0,2,0,2,37.5,0,2,4,0,3.288,1.185,9.882,4,1.039,1.14,0,1,0,1.224,1,0,0,0,2.236,0.015,0,20.154,1.74,0,2,4,2,3.832,3.133,2,8.506,0,0,1 +4.971,3.0393,0,0,2,0,2,38.7,3,1,2,0.124,3.509,1.733,10.236,4,0.984,1.13,0,0,0,1.251,1,0,0,0,2.305,0.001,0,11.885,2.585,0,0,3,0,3.725,2.214,1,8.44,0,0,1 +5.379,2.2903,0,0,0,0,6,60.7,0,1,0,0.134,3.865,0.463,10.86,4,0.991,1.082,0,0,0,1.355,12,0,0,0,2.551,0,0,12.494,1.154,0,0,0,0,4.038,2.148,0,9.074,0,0,1 +4.382,2.2073,0,0,2,0,0,32.4,0,1,2,0.254,3.426,0.544,9.692,2,0.977,1.143,0,0,0,1.195,1,0,0,0,2.066,0.001,0,11.274,1.489,0,0,2,1,3.34,1.976,1,7.923,0,0,1 +4.802,3.0585,1,0,1,0,2,50,0,0,2,0,3.325,0,9.805,0,0.996,1.089,0,0,0,1.408,3,0,0,0,2.323,0.013,0,0,0.95,0,0,2,0,4.226,2.019,1,8.879,0,0,1 +4.17,2.7612,2,0,0,0,0,27.3,2,0,0,0,3.002,0.971,8.597,0,1.017,1.141,0,0,0,1.28,0,0,1,0,1.848,0.354,0,0,1.542,0,0,0,0,3.755,2.611,0,8.35,0,2,1 +6.272,7.1546,0,0,0,8,0,30.3,2,1,0,0,4.347,1.77,12.012,1,1.235,1.32,0,0,1,0.911,0,0,0,0,2.66,0,0,0,3.939,0,0,0,1,3.957,5.536,0,9.319,0,17,1 +5.541,3.7298,4,0,4,1,1,31,0,2,5,0,4.127,1.698,10.987,6,1.051,1.137,0,0,0,1.28,1,0,0,0,2.424,0,0,24.111,2.051,0,1,3,0,4.747,2.994,2,9.951,0,3,1 +5.119,2.6427,0,0,0,0,4,60,0,0,0,0,3.457,0,10.355,0,0.977,1.083,0,0,0,1.391,7,0,0,0,2.471,0.001,0,0,0.751,0,0,0,0,3.976,1.889,0,8.661,0,0,1 +5.665,2.2521,1,0,0,1,3,54.3,0,0,0,0,4,0.432,10.89,0,0.983,1.093,0,0,0,1.345,3,0,1,0,2.49,0,0,0,1.839,0,0,0,0,4,2.018,0,9.166,0,1,1 +4.481,3.6846,0,0,0,0,0,24,3,3,0,0,3.109,1.895,9.292,0,1.011,1.161,0,0,0,1.083,0,0,0,0,2.074,-0.026,0,0,2.02,0,0,3,3,3.171,2.9,1,7.444,0,0,1 +4.214,3.4743,0,0,1,0,0,23.1,1,2,1,-0.711,2.68,1.459,8.755,1,1.031,1.166,0,0,0,1.083,0,0,0,0,1.902,-0.262,0,9.599,2.052,0,0,0,2,3.36,3.278,1,7.41,0,0,1 +5.248,4.2601,3,0,0,0,0,20.8,0,3,0,-0.08,3.68,1.888,10.249,5,1.021,1.133,0,0,0,1.287,0,0,0,0,2.229,-0.008,0,10.819,3.06,0,0,1,1,4.819,2.664,1,9.618,0,0,1 +4.953,1.9231,0,0,4,0,4,38.9,2,0,6,0,4.176,2.296,10.707,0,0.967,1.128,0,0,0,1.265,2,0,0,1,2.343,0,0,0,6.238,0,2,4,1,3.732,1.73,1,8.931,0,0,1 +4.753,1.6977,0,0,2,0,4,38.9,2,0,4,0,4.176,2.296,10.68,0,0.967,1.128,0,0,0,1.262,2,0,0,1,2.295,0,0,0,7.067,0,2,4,1,3.702,1.73,1,8.928,0,0,1 +5.366,3.5678,1,0,3,0,2,35,1,2,2,0,3.484,1.16,10.405,4,1.018,1.125,0,0,0,1.27,1,0,0,0,2.342,-0.008,0,21.663,1.661,0,1,1,2,4.729,2.932,1,9.496,0,0,1 +5.333,2.0991,1,0,0,0,6,41.5,6,4,0,0,4.4,0.956,11.313,15,0.989,1.117,0,0,0,1.284,3,0,0,0,2.394,0,0,13.711,3.349,0,3,0,0,4.441,2.135,0,9.741,0,0,1 +5.31,3.4316,1,0,2,0,1,35.3,0,2,2,0,3.415,1.185,10.25,4,1.028,1.123,0,0,0,1.261,1,0,0,0,2.288,0.014,0,21.221,1.726,0,1,1,2,4.725,3.058,1,9.463,0,0,1 +5.187,2.1504,0,0,0,0,0,43.5,1,3,0,0.666,3.507,0.773,10.68,8,1.017,1.118,0,0,0,1.256,6,0,0,0,2.597,-0.001,0,22.622,1.14,4,0,0,0,3.799,2.564,0,8.551,0,0,1 +5.394,2.8307,3,1,3,0,4,37,1,4,5,0.311,4.137,0.978,11.021,8,1.066,1.122,0,0,0,1.274,2,0,1,0,2.465,0,0,33.791,2.542,0,4,8,1,4.748,3.086,2,9.947,0,2,1 +4.303,2.6572,1,0,0,0,0,25,0,0,0,0.787,2.909,0.862,9.067,0,1.004,1.146,0,0,0,1.3,1,0,0,0,2.115,0.131,0,0,0.956,0,0,2,2,3.86,2.222,2,8.216,0,0,1 +4.842,3.2355,0,0,0,0,0,41.2,0,4,2,-2.3,3.459,1.258,10.136,6,1.069,1.127,0,0,0,1.18,1,0,0,0,2.278,0.005,0,20.567,2.113,0,0,3,2,3.771,3.472,1,8.551,0,0,1 +4,2.2975,0,0,0,0,0,26.7,0,1,0,0,2.626,0,8.623,0,0.998,1.158,0,0,0,1.095,1,0,0,0,2,0.128,0,0,0.667,0,0,2,1,3.093,2,1,6.951,0,0,1 +5.286,3.4146,0,0,6,0,0,27.3,3,6,9,-2.66,3.792,1.622,10.617,12,1.048,1.159,0,0,0,1.111,1,0,0,0,2.458,0,0,35.324,1.615,0,0,3,3,3.783,3.278,3,8.66,0,0,1 +5.406,2.1252,1,0,7,0,5,43.9,0,4,10,0,4.279,0.777,11.11,10,1.024,1.116,0,1,0,1.28,3,0,0,1,2.428,0,0,47.783,3.924,0,5,9,2,4.711,2.702,3,9.916,0,0,1 +4.868,3.0252,0,0,1,0,0,34.8,3,1,6,1.466,3.292,1.16,10.076,2,0.998,1.143,0,0,0,1.235,1,0,0,0,2.288,-0.008,0,10.884,1.634,0,0,5,1,3.697,2.439,2,8.261,0,0,1 +3.618,2.8543,2,0,0,0,0,33.3,1,0,0,0,3.029,1.522,7.853,0,1.033,1.126,0,0,0,1.299,0,0,1,0,1.732,0.386,0,0,2.094,0,0,0,0,3.71,2.744,0,8.384,0,2,1 +3.732,2.5972,0,0,0,0,0,23.5,1,1,1,0,2.575,1.585,8.16,1,0.991,1.164,0,0,0,1.105,0,0,0,0,1.802,-0.226,0,0,2.488,0,0,1,2,2.93,2.333,1,6.762,0,0,1 +4.629,3.9373,0,0,0,0,0,31.3,0,1,1,-0.056,2.83,1.379,9.312,3,0.998,1.148,0,0,0,1.211,0,0,0,0,2.053,0.143,0,10.319,1.776,0,0,1,0,3.475,2.81,1,7.673,0,0,1 +3.879,2.9588,0,0,0,0,0,33.3,0,0,4,0,3.091,2.281,8.743,0,0.998,1.152,0,0,0,1.251,0,0,0,0,1.902,0.047,0,0,3.685,0,0,4,1,3.271,2.833,3,7.905,0,0,1 +4.709,4.1448,4,0,2,0,0,25,0,0,0,1.674,3.751,1.792,10.014,0,0.997,1.115,0,0,0,1.399,0,0,0,0,2.152,-0.007,0,0,3.444,0,0,0,0,4.33,2.194,2,9.389,0,0,1 +5.328,5.8726,7,0,0,0,0,25.7,6,4,0,0,4.34,2.088,10.678,9,1.056,1.137,0,0,0,1.225,0,0,1,0,2.34,0,0,12.579,2.507,0,0,0,0,4.429,2.799,0,9.905,0,6,1 +5.086,2.4664,1,0,0,1,0,30.8,2,1,0,0,2.929,1.39,9.812,0,1.014,1.14,0,0,0,1.169,1,0,1,0,2.38,0.126,0,0,1.372,1,0,0,0,3.699,2.31,0,8.194,0,1,1 +4.691,4.7731,0,0,4,0,0,26.9,4,4,4,0,3.786,2.122,10.467,0,0.996,1.155,0,0,0,1.142,0,0,0,0,2.211,0,0,0,3.997,0,0,6,4,3.321,2.517,2,8.334,0,0,1 +4.499,2.9639,0,0,1,0,1,41.2,0,0,2,0,2.976,1.125,9.311,0,0.979,1.127,0,0,0,1.266,1,0,0,0,2.136,0.043,0,0,1.481,0,1,2,1,3.561,2.021,1,7.846,0,0,1 +4.9,3.0875,0,0,1,0,1,40.9,1,1,2,0.052,3.304,1.455,9.955,2,0.993,1.127,0,0,0,1.258,1,0,0,0,2.251,-0.008,0,10.87,2.15,0,1,3,0,3.655,2.394,1,8.286,0,0,1 +4.757,1.8029,0,0,4,0,4,50,0,0,8,0,3.915,0,10.525,0,0.983,1.108,0,0,0,1.313,3,0,0,2,2.295,0,0,0,4.15,0,4,8,2,3.712,1.983,2,8.934,0,0,1 +4.593,2.7059,0,0,2,0,1,39.1,0,0,2,0,3.168,1.461,9.665,0,0.974,1.13,0,0,0,1.283,1,0,0,0,2.175,0.013,0,0,2.298,0,0,1,0,3.566,1.917,1,8.02,0,0,1 +5.563,4.6158,0,0,9,1,6,33.3,5,6,12,0,4.018,1.153,11.151,12,1.044,1.146,0,3,0,1.214,1,0,0,0,2.546,0,0,67.469,1.406,0,3,6,0,4.092,3.298,3,9.375,0,0,1 +5.371,3.236,0,0,0,1,3,39.3,4,1,0,0,3.351,1.124,10.524,3,0.978,1.126,0,0,0,1.273,1,0,0,0,2.364,0.004,0,0,1.718,0,1,0,1,3.765,2.188,0,8.354,0,0,1 +4.901,2.5104,2,0,1,0,5,48,0,3,2,0,4.028,0.946,10.601,8,1.051,1.107,0,1,0,1.295,2,0,1,0,2.329,0,0,20.982,3.43,0,5,2,0,4.016,2.781,1,9.395,0,2,1 +4.813,3.4399,0,0,1,0,2,38.9,0,3,2,0,3.37,1.273,9.947,6,1.041,1.132,0,1,0,1.209,1,0,0,0,2.259,-0.008,0,20.438,1.86,0,2,2,0,3.839,2.985,1,8.549,0,0,1 +4.973,3.602,0,0,2,0,3,35,0,3,4,0,3.45,1.258,10.152,6,1.042,1.144,0,1,0,1.193,1,0,0,0,2.317,0.005,0,20.72,1.706,0,3,4,2,3.883,3.042,2,8.639,0,0,1 +4.562,3.1971,1,0,0,0,2,46.2,0,1,0,0,3.214,0.875,9.54,2,1.023,1.108,0,0,0,1.291,1,0,1,0,2.17,0.046,0,0,1.14,0,2,0,1,3.872,2.681,0,8.473,0,1,1 +5.42,3.2399,0,0,0,4,2,34.5,10,4,0,0,4.009,1.912,11.411,16,0.988,1.136,0,0,0,1.206,1,0,0,0,2.409,0,0,0,4.536,0,0,0,0,3.753,2.097,0,8.931,0,0,1 +5.777,2.3194,0,0,0,2,0,33.3,3,2,0,0,3.323,1.126,10.946,6,0.987,1.139,0,0,0,1.182,3,0,0,0,2.607,-0.002,0,0,1.274,3,0,0,1,3.675,2.097,0,8.164,0,0,1 +5.256,2.6157,0,0,0,1,0,34.6,3,1,0,0.448,3.133,0.861,10.335,2,0.977,1.136,0,0,0,1.252,1,0,0,0,2.327,0.012,0,11.089,1.037,1,0,0,0,3.497,2.175,0,7.914,0,0,1 +4.928,1.9515,0,0,0,0,4,56.3,0,0,0,0,3.82,0,10.506,0,0.975,1.091,0,0,0,1.345,3,0,0,0,2.348,0,0,0,3.091,0,0,0,0,3.788,1.926,0,8.882,0,0,1 +5.003,2.9132,0,0,0,0,3,52.4,1,0,0,0,3.338,0.629,10.084,0,0.972,1.099,0,0,0,1.349,3,0,0,0,2.351,0.004,0,0,1.075,0,0,0,0,3.873,1.909,0,8.471,0,0,1 +4.732,2.1796,0,0,0,0,4,48.1,0,2,0,0,3.633,0.648,10.3,4,0.998,1.108,0,0,0,1.276,2,0,0,0,2.275,0,0,0,3.095,0,2,0,2,3.7,2.411,0,8.654,0,0,1 +5.174,2.5182,0,0,1,0,5,42.9,2,4,2,-1.681,4.049,1.282,10.904,11,1.009,1.121,0,0,0,1.236,2,0,0,0,2.42,0,0,23.818,3.572,0,2,4,2,3.874,2.638,1,9.097,0,0,1 +4.924,2.4008,0,0,0,0,3,45.2,2,1,0,0,3.608,1.429,10.32,4,0.981,1.114,0,0,0,1.299,3,0,0,0,2.348,0,0,0,2.904,0,1,0,0,3.861,1.956,0,8.632,0,0,1 +5.509,3.254,2,0,2,0,5,35.7,0,6,2,0,4.086,1.096,11.165,12,1.063,1.116,0,0,0,1.251,3,0,0,0,2.511,0,0,44.995,1.996,0,3,2,4,4.77,3.377,1,10.203,0,0,1 +4.861,1.9254,0,0,0,0,0,35.7,0,0,0,0,3.092,0,9.863,0,0.963,1.133,0,0,0,1.222,3,0,0,0,2.303,-0.006,0,0,1.047,2,0,0,0,3.309,1.467,0,7.533,0,0,1 +5.735,1.9915,0,0,0,2,0,35.3,2,2,0,0,3.48,1.074,11.021,4,0.985,1.134,0,0,0,1.18,6,0,0,0,2.641,0,0,0,1.239,2,0,0,2,3.656,2.083,0,8.176,0,0,1 +5.047,2.5237,0,0,0,0,4,58.3,0,0,0,0,3.606,0,10.4,0,0.976,1.087,0,0,0,1.38,6,0,0,0,2.414,0,0,0,1.443,0,0,0,0,3.966,1.905,0,8.812,0,0,1 +5.318,2.361,0,0,2,0,5,53.8,0,2,2,-0.252,3.801,0.682,10.772,8,1.011,1.099,0,0,0,1.296,6,0,0,0,2.508,0,0,24.37,1.393,0,1,2,2,3.955,2.569,1,8.963,0,0,1 +5.408,2.3533,0,0,6,0,6,43.5,4,2,6,-0.202,4.087,1.289,11.084,8,0.993,1.121,0,0,0,1.276,6,0,0,0,2.553,0,0,26.148,1.71,0,2,8,2,3.991,2.292,2,9.158,0,0,1 +4.802,3.136,0,2,4,0,2,42.9,0,0,6,0,3.196,0,9.805,0,1.014,1.136,0,0,0,1.31,3,0,0,1,2.323,0.014,0,0,0.95,0,0,6,1,3.901,2.204,3,8.414,0,0,1 +5.69,2.3056,0,0,8,1,2,48.4,0,2,6,-0.826,3.889,0.565,10.94,5,1.009,1.113,0,0,0,1.277,3,0,0,0,2.55,0,0,23.886,1.935,1,0,4,2,3.857,2.469,2,8.961,0,0,1 +4,2.9848,1,0,0,0,0,22.2,1,0,0,0.5,2.594,0.811,8.318,0,0.994,1.139,0,0,0,1.344,0,0,0,0,1.732,-0.59,0,0,1,0,0,1,2,3.84,2.833,1,8.085,0,0,1 +4.562,3.2789,4,0,0,0,0,25,2,0,0,0,4.705,0.918,9.184,0,1.071,1.077,0,0,0,1.641,0,1,0,0,2,-0.207,0,0,1.521,0,0,0,0,7.129,2.278,0,12.952,0,4,1 +5.262,4.2718,2,0,0,1,0,25,2,0,0,0,3.317,0.863,10.026,0,1.23,1.269,0,0,1,1.042,0,0,0,0,2.175,0.172,0,0,1.435,0,0,0,0,3.997,4.972,0,8.663,0,5,1 +4.303,3.16,2,0,0,0,0,33.3,1,0,0,0,3.13,1.459,8.815,0,1.015,1.129,0,0,0,1.308,0,0,1,0,1.932,-0.227,0,0,1.745,0,0,0,0,3.772,2.676,0,8.437,0,2,1 +6.242,9.1775,0,0,3,12,0,30,3,0,3,0,4.727,1.637,12.421,0,1.311,1.377,0,0,3,0.883,0,0,0,0,2.694,0,0,0,2.922,0,0,3,0,3.991,5.825,1,9.745,0,27,1 +3.802,2.8705,2,0,0,0,0,26.7,2,1,0,0,3.2,1.95,8.394,0,1.026,1.145,0,0,0,1.197,0,0,0,0,1.848,0.139,0,0,2.885,0,0,0,0,3.57,2.532,0,8.378,0,2,1 +4.228,3.1334,0,0,0,0,0,26.3,3,2,0,0,2.746,1.557,8.891,0,0.998,1.153,0,0,0,1.103,0,0,0,0,1.932,0.136,0,0,2.535,0,0,0,1,3.105,2.548,0,7.107,0,0,1 +5.13,3.5878,0,0,0,1,0,32.1,4,1,0,0,3.095,1.761,10.106,1,0.974,1.141,0,0,0,1.237,0,0,0,0,2.175,-0.022,0,10.101,2.729,0,0,0,0,3.309,2.258,0,7.751,0,0,1 +5.086,3.9599,3,0,0,1,0,28.6,1,1,0,0,3.347,0.918,9.771,0,1.152,1.125,0,0,1,1.32,0,0,0,0,2.074,-0.263,0,9.431,1.372,0,0,0,0,4.238,3.764,0,8.925,0,3,1 +4.236,2.9709,0,0,1,0,0,31.9,1,1,1,-0.151,3.915,3.554,10.042,1,0.969,1.143,0,0,0,1.198,0,0,0,0,1.995,0,0,10.604,9.701,0,0,1,2,3.309,1.861,1,8.305,0,0,1 +4.303,3.6845,1,0,0,0,0,15.4,0,3,0,0,2.917,1.459,8.815,4,1.038,1.155,0,0,0,1.172,0,0,0,0,1.932,-0.264,0,9.921,1.745,0,0,0,0,4.19,3.154,0,8.634,0,0,1 +5.262,4.5221,1,0,0,0,0,28.3,3,4,0,0,3.856,2.733,10.387,12,0.987,1.144,0,0,0,1.187,0,0,0,0,2.257,0,0,12.436,3.441,0,0,0,0,4.415,2.115,0,9.152,0,0,1 +6.253,7.1279,0,0,0,7,0,30.8,1,2,0,-4.279,4.252,1.544,11.905,2,1.285,1.34,0,0,1,0.908,0,0,0,0,2.648,0,0,9.723,3.516,0,0,0,1,3.954,5.737,0,9.274,0,15,1 +4.807,3.1717,0,0,0,0,3,42.9,3,0,0,0,3.075,0.881,9.833,0,0.967,1.119,0,0,0,1.321,1,0,0,0,2.246,-0.023,0,0,1.06,0,0,0,0,3.698,1.889,0,8.086,0,0,1 +5.236,3.778,5,0,0,0,6,46.7,1,0,0,0,5.013,0.799,10.434,0,1.045,1.058,0,0,0,1.58,1,1,0,0,2.414,0.004,1,0,0.873,0,5,0,0,6.982,2.146,0,13.184,0,5,1 +4.562,3.1297,0,0,2,0,2,37.5,0,0,4,0,2.997,0.875,9.54,0,0.991,1.14,0,0,0,1.242,1,0,0,0,2.17,0.045,0,0,1.14,0,2,4,4,3.633,2.417,2,7.969,0,0,1 +4.658,2.9363,0,0,2,0,2,38.1,1,1,4,-0.074,3.314,1.455,9.9,1,1.003,1.137,0,0,0,1.239,1,0,0,0,2.22,-0.008,0,10.556,2.349,0,2,5,3,3.658,2.591,2,8.311,0,0,1 +5.388,3.3114,0,0,0,1,3,39.3,4,1,0,0,3.351,1.124,10.539,4,0.978,1.126,0,0,0,1.27,1,0,0,0,2.377,0.004,0,0,1.69,0,1,0,1,3.78,2.188,0,8.357,0,0,1 +4.97,3.3797,0,0,0,0,4,40.9,3,1,0,0,3.193,0.861,10.06,4,0.983,1.123,0,0,0,1.272,1,0,0,0,2.307,0.013,0,0,0.962,0,1,0,1,3.774,2.267,0,8.239,0,0,1 +5.399,3.4164,0,0,0,1,4,38.7,5,1,0,0,3.424,1.084,10.644,4,0.977,1.127,0,0,0,1.271,1,0,0,0,2.403,-0.002,0,0,1.65,0,1,0,1,3.818,2.147,0,8.453,0,0,1 +4.807,3.3179,1,0,0,0,3,43.8,1,1,0,0,3.297,0.881,9.833,2,1.012,1.113,0,0,0,1.298,1,0,1,0,2.246,-0.025,0,0,1.06,0,2,0,1,3.926,2.568,0,8.562,0,1,1 +4.607,3.0008,1,0,0,0,2,41.2,0,1,0,0,3.256,1.187,9.631,2,0.996,1.105,0,0,0,1.302,1,0,0,0,2.194,-0.025,0,0,1.544,0,2,0,1,3.865,2.352,0,8.506,0,0,1 +5.313,2.7782,0,0,0,1,2,40,4,1,0,0,3.564,1.697,10.592,2,0.976,1.125,0,0,0,1.274,1,0,0,0,2.329,-0.001,0,10.535,2.986,0,0,0,0,3.692,2.161,0,8.482,0,0,1 +5.103,3.9184,0,0,4,0,4,35,1,5,4,0,3.649,1.197,10.456,11,1.08,1.144,0,2,0,1.195,1,0,0,0,2.38,0.001,0,41.425,1.74,0,3,4,1,3.967,3.619,2,8.984,0,0,1 +5.265,3.3444,2,0,6,0,1,35.3,2,3,3,0.134,3.998,2.353,10.632,6,1.012,1.123,0,0,0,1.296,1,0,0,0,2.3,0,0,0,3.88,0,0,3,0,4.698,2.463,2,9.666,0,0,1 +5.029,2.5966,0,0,0,0,4,46.7,0,4,0,-0.761,3.837,1.418,10.634,10,1.02,1.111,0,0,0,1.254,3,0,0,0,2.403,0,0,22.765,2.586,0,0,0,0,3.919,2.611,0,8.958,2,0,1 +5.431,2.8955,0,0,0,2,0,32.1,4,1,1,0.374,3.233,0.832,10.681,2,0.982,1.144,0,0,0,1.232,1,0,0,0,2.394,-0.007,0,11.254,1.055,2,0,6,1,3.573,2.242,1,8.088,0,0,1 +5.287,3.3732,0,0,9,0,0,35.3,0,9,9,-5.256,4.319,2.346,11.029,21,1.043,1.14,0,0,0,1.178,1,0,0,0,2.462,0,0,71.167,3.396,0,0,3,0,3.787,3.083,3,9.278,0,0,1 +4.869,1.767,0,1,9,0,5,44.4,0,4,14,-0.391,4.435,1.073,11.072,9,1.016,1.123,0,1,0,1.261,3,0,0,0,2.314,0,0,33.54,6.465,0,4,13,0,3.848,2.576,5,9.537,1,0,1 +5.158,1.6914,2,0,36,0,9,56.1,0,0,44,0,4.902,0.257,11.817,0,1.007,1.093,0,0,0,1.41,147,0,1,2,2.622,0,0,0,1.535,0,1,16,0,5.808,2.055,8,11.055,0,1,1 +5.076,2.6588,2,0,0,0,4,54.5,0,0,0,0,3.792,0.673,10.327,0,1.003,1.089,0,0,0,1.364,2,0,1,0,2.363,-0.001,0,0,2.106,0,2,0,0,4.009,2.206,0,9.13,0,2,1 diff --git a/run.py b/run.py index 4da2010..3f9beb1 100644 --- a/run.py +++ b/run.py @@ -333,6 +333,18 @@ def retrieveFileName(): global addGradB addGradB = addRF+randomSearchVar + global KNNModelsCount + global LRModelsCount + global MLPModelsCount + global RFModelsCount + global GradBModelsCount + + KNNModelsCount = 0 + LRModelsCount = KNNModelsCount+randomSearchVar + MLPModelsCount = LRModelsCount+randomSearchVar + RFModelsCount = MLPModelsCount+randomSearchVar + GradBModelsCount = RFModelsCount+randomSearchVar + # Initializing models global RetrieveModelsList @@ -398,6 +410,7 @@ def retrieveFileName(): global fileInput fileInput = data['fileName'] + DataRawLength = -1 DataRawLengthTest = -1 print(data['fileName']) @@ -983,7 +996,6 @@ def PreprocessingPred(): predictionsRF = ResultsGatheredFirst[4] + ResultsGatheredLast[4] predictionsGradB = ResultsGatheredFirst[5] + ResultsGatheredLast[5] yDataSorted = yDataSortedFirst + yDataSortedLast - return [predictionsKNN, predictionsLR, predictionsMLP, predictionsRF, predictionsGradB, predictions] def computeClusters(dataLocal,one,two,three,four,five,flagLocal): @@ -1813,12 +1825,10 @@ def EnsembleModel (Models, keyRetrieved): sclf.fit(XData, yData) y_pred = sclf.predict(XDataTest) print('Test data set') - print(accuracy_score(yDataTest, y_pred)) print(classification_report(yDataTest, y_pred)) y_pred = sclf.predict(XDataExternal) print('External data set') - print(accuracy_score(yDataExternal, y_pred)) print(classification_report(yDataExternal, y_pred)) return 'Okay' @@ -1867,6 +1877,8 @@ def returnResults(ModelSpaceMDS,ModelSpaceTSNE,ModelSpaceUMAP,parametersGen,sumP XDataJSONEntireSet = XData.to_json(orient='records') XDataColumns = XData.columns.tolist() + ModelsIDsPreviously = PreprocessingIDs() + Results.append(json.dumps(ModelsIDs)) Results.append(json.dumps(sumPerClassifier)) Results.append(json.dumps(parametersGenPD)) @@ -1883,6 +1895,7 @@ def returnResults(ModelSpaceMDS,ModelSpaceTSNE,ModelSpaceUMAP,parametersGen,sumP Results.append(json.dumps(names_labels)) Results.append(json.dumps(yDataSorted)) Results.append(json.dumps(mode)) + Results.append(json.dumps(ModelsIDsPreviously)) return Results