From e11ad323734e8d2d0feb96226b63c8be9a27445f Mon Sep 17 00:00:00 2001 From: Angelos Chatzimparmpas Date: Thu, 5 Nov 2020 10:36:21 +0100 Subject: [PATCH] paper-version Former-commit-id: 81af0c596384a8b10c4de18b6a7246564655cde1 --- data/heart.csv | 304 ++++++++++++++++++ data/iris.csv | 152 +++++++++ .../src/components/DataSetExecController.vue | 3 +- insertMongo.py | 5 +- requirements.txt | 7 + run.py | 73 ++--- 6 files changed, 496 insertions(+), 48 deletions(-) create mode 100644 data/heart.csv create mode 100755 data/iris.csv create mode 100644 requirements.txt diff --git a/data/heart.csv b/data/heart.csv new file mode 100644 index 000000000..9e9a1f9a6 --- /dev/null +++ b/data/heart.csv @@ -0,0 +1,304 @@ +Age,Sex,Cp,Trestbps,Chol,Fbs,Restecg,Thalach,Exang,Oldpeak,Slope,Ca,Thal,Outcome* +63,1,3,145,233,1,0,150,0,2.3,0,0,1,1 +37,1,2,130,250,0,1,187,0,3.5,0,0,2,1 +41,0,1,130,204,0,0,172,0,1.4,2,0,2,1 +56,1,1,120,236,0,1,178,0,0.8,2,0,2,1 +57,0,0,120,354,0,1,163,1,0.6,2,0,2,1 +57,1,0,140,192,0,1,148,0,0.4,1,0,1,1 +56,0,1,140,294,0,0,153,0,1.3,1,0,2,1 +44,1,1,120,263,0,1,173,0,0,2,0,3,1 +52,1,2,172,199,1,1,162,0,0.5,2,0,3,1 +57,1,2,150,168,0,1,174,0,1.6,2,0,2,1 +54,1,0,140,239,0,1,160,0,1.2,2,0,2,1 +48,0,2,130,275,0,1,139,0,0.2,2,0,2,1 +49,1,1,130,266,0,1,171,0,0.6,2,0,2,1 +64,1,3,110,211,0,0,144,1,1.8,1,0,2,1 +58,0,3,150,283,1,0,162,0,1,2,0,2,1 +50,0,2,120,219,0,1,158,0,1.6,1,0,2,1 +58,0,2,120,340,0,1,172,0,0,2,0,2,1 +66,0,3,150,226,0,1,114,0,2.6,0,0,2,1 +43,1,0,150,247,0,1,171,0,1.5,2,0,2,1 +69,0,3,140,239,0,1,151,0,1.8,2,2,2,1 +59,1,0,135,234,0,1,161,0,0.5,1,0,3,1 +44,1,2,130,233,0,1,179,1,0.4,2,0,2,1 +42,1,0,140,226,0,1,178,0,0,2,0,2,1 +61,1,2,150,243,1,1,137,1,1,1,0,2,1 +40,1,3,140,199,0,1,178,1,1.4,2,0,3,1 +71,0,1,160,302,0,1,162,0,0.4,2,2,2,1 +59,1,2,150,212,1,1,157,0,1.6,2,0,2,1 +51,1,2,110,175,0,1,123,0,0.6,2,0,2,1 +65,0,2,140,417,1,0,157,0,0.8,2,1,2,1 +53,1,2,130,197,1,0,152,0,1.2,0,0,2,1 +41,0,1,105,198,0,1,168,0,0,2,1,2,1 +65,1,0,120,177,0,1,140,0,0.4,2,0,3,1 +44,1,1,130,219,0,0,188,0,0,2,0,2,1 +54,1,2,125,273,0,0,152,0,0.5,0,1,2,1 +51,1,3,125,213,0,0,125,1,1.4,2,1,2,1 +46,0,2,142,177,0,0,160,1,1.4,0,0,2,1 +54,0,2,135,304,1,1,170,0,0,2,0,2,1 +54,1,2,150,232,0,0,165,0,1.6,2,0,3,1 +65,0,2,155,269,0,1,148,0,0.8,2,0,2,1 +65,0,2,160,360,0,0,151,0,0.8,2,0,2,1 +51,0,2,140,308,0,0,142,0,1.5,2,1,2,1 +48,1,1,130,245,0,0,180,0,0.2,1,0,2,1 +45,1,0,104,208,0,0,148,1,3,1,0,2,1 +53,0,0,130,264,0,0,143,0,0.4,1,0,2,1 +39,1,2,140,321,0,0,182,0,0,2,0,2,1 +52,1,1,120,325,0,1,172,0,0.2,2,0,2,1 +44,1,2,140,235,0,0,180,0,0,2,0,2,1 +47,1,2,138,257,0,0,156,0,0,2,0,2,1 +53,0,2,128,216,0,0,115,0,0,2,0,0,1 +53,0,0,138,234,0,0,160,0,0,2,0,2,1 +51,0,2,130,256,0,0,149,0,0.5,2,0,2,1 +66,1,0,120,302,0,0,151,0,0.4,1,0,2,1 +62,1,2,130,231,0,1,146,0,1.8,1,3,3,1 +44,0,2,108,141,0,1,175,0,0.6,1,0,2,1 +63,0,2,135,252,0,0,172,0,0,2,0,2,1 +52,1,1,134,201,0,1,158,0,0.8,2,1,2,1 +48,1,0,122,222,0,0,186,0,0,2,0,2,1 +45,1,0,115,260,0,0,185,0,0,2,0,2,1 +34,1,3,118,182,0,0,174,0,0,2,0,2,1 +57,0,0,128,303,0,0,159,0,0,2,1,2,1 +71,0,2,110,265,1,0,130,0,0,2,1,2,1 +54,1,1,108,309,0,1,156,0,0,2,0,3,1 +52,1,3,118,186,0,0,190,0,0,1,0,1,1 +41,1,1,135,203,0,1,132,0,0,1,0,1,1 +58,1,2,140,211,1,0,165,0,0,2,0,2,1 +35,0,0,138,183,0,1,182,0,1.4,2,0,2,1 +51,1,2,100,222,0,1,143,1,1.2,1,0,2,1 +45,0,1,130,234,0,0,175,0,0.6,1,0,2,1 +44,1,1,120,220,0,1,170,0,0,2,0,2,1 +62,0,0,124,209,0,1,163,0,0,2,0,2,1 +54,1,2,120,258,0,0,147,0,0.4,1,0,3,1 +51,1,2,94,227,0,1,154,1,0,2,1,3,1 +29,1,1,130,204,0,0,202,0,0,2,0,2,1 +51,1,0,140,261,0,0,186,1,0,2,0,2,1 +43,0,2,122,213,0,1,165,0,0.2,1,0,2,1 +55,0,1,135,250,0,0,161,0,1.4,1,0,2,1 +51,1,2,125,245,1,0,166,0,2.4,1,0,2,1 +59,1,1,140,221,0,1,164,1,0,2,0,2,1 +52,1,1,128,205,1,1,184,0,0,2,0,2,1 +58,1,2,105,240,0,0,154,1,0.6,1,0,3,1 +41,1,2,112,250,0,1,179,0,0,2,0,2,1 +45,1,1,128,308,0,0,170,0,0,2,0,2,1 +60,0,2,102,318,0,1,160,0,0,2,1,2,1 +52,1,3,152,298,1,1,178,0,1.2,1,0,3,1 +42,0,0,102,265,0,0,122,0,0.6,1,0,2,1 +67,0,2,115,564,0,0,160,0,1.6,1,0,3,1 +68,1,2,118,277,0,1,151,0,1,2,1,3,1 +46,1,1,101,197,1,1,156,0,0,2,0,3,1 +54,0,2,110,214,0,1,158,0,1.6,1,0,2,1 +58,0,0,100,248,0,0,122,0,1,1,0,2,1 +48,1,2,124,255,1,1,175,0,0,2,2,2,1 +57,1,0,132,207,0,1,168,1,0,2,0,3,1 +52,1,2,138,223,0,1,169,0,0,2,4,2,1 +54,0,1,132,288,1,0,159,1,0,2,1,2,1 +45,0,1,112,160,0,1,138,0,0,1,0,2,1 +53,1,0,142,226,0,0,111,1,0,2,0,3,1 +62,0,0,140,394,0,0,157,0,1.2,1,0,2,1 +52,1,0,108,233,1,1,147,0,0.1,2,3,3,1 +43,1,2,130,315,0,1,162,0,1.9,2,1,2,1 +53,1,2,130,246,1,0,173,0,0,2,3,2,1 +42,1,3,148,244,0,0,178,0,0.8,2,2,2,1 +59,1,3,178,270,0,0,145,0,4.2,0,0,3,1 +63,0,1,140,195,0,1,179,0,0,2,2,2,1 +42,1,2,120,240,1,1,194,0,0.8,0,0,3,1 +50,1,2,129,196,0,1,163,0,0,2,0,2,1 +68,0,2,120,211,0,0,115,0,1.5,1,0,2,1 +69,1,3,160,234,1,0,131,0,0.1,1,1,2,1 +45,0,0,138,236,0,0,152,1,0.2,1,0,2,1 +50,0,1,120,244,0,1,162,0,1.1,2,0,2,1 +50,0,0,110,254,0,0,159,0,0,2,0,2,1 +64,0,0,180,325,0,1,154,1,0,2,0,2,1 +57,1,2,150,126,1,1,173,0,0.2,2,1,3,1 +64,0,2,140,313,0,1,133,0,0.2,2,0,3,1 +43,1,0,110,211,0,1,161,0,0,2,0,3,1 +55,1,1,130,262,0,1,155,0,0,2,0,2,1 +37,0,2,120,215,0,1,170,0,0,2,0,2,1 +41,1,2,130,214,0,0,168,0,2,1,0,2,1 +56,1,3,120,193,0,0,162,0,1.9,1,0,3,1 +46,0,1,105,204,0,1,172,0,0,2,0,2,1 +46,0,0,138,243,0,0,152,1,0,1,0,2,1 +64,0,0,130,303,0,1,122,0,2,1,2,2,1 +59,1,0,138,271,0,0,182,0,0,2,0,2,1 +41,0,2,112,268,0,0,172,1,0,2,0,2,1 +54,0,2,108,267,0,0,167,0,0,2,0,2,1 +39,0,2,94,199,0,1,179,0,0,2,0,2,1 +34,0,1,118,210,0,1,192,0,0.7,2,0,2,1 +47,1,0,112,204,0,1,143,0,0.1,2,0,2,1 +67,0,2,152,277,0,1,172,0,0,2,1,2,1 +52,0,2,136,196,0,0,169,0,0.1,1,0,2,1 +74,0,1,120,269,0,0,121,1,0.2,2,1,2,1 +54,0,2,160,201,0,1,163,0,0,2,1,2,1 +49,0,1,134,271,0,1,162,0,0,1,0,2,1 +42,1,1,120,295,0,1,162,0,0,2,0,2,1 +41,1,1,110,235,0,1,153,0,0,2,0,2,1 +41,0,1,126,306,0,1,163,0,0,2,0,2,1 +49,0,0,130,269,0,1,163,0,0,2,0,2,1 +60,0,2,120,178,1,1,96,0,0,2,0,2,1 +62,1,1,128,208,1,0,140,0,0,2,0,2,1 +57,1,0,110,201,0,1,126,1,1.5,1,0,1,1 +64,1,0,128,263,0,1,105,1,0.2,1,1,3,1 +51,0,2,120,295,0,0,157,0,0.6,2,0,2,1 +43,1,0,115,303,0,1,181,0,1.2,1,0,2,1 +42,0,2,120,209,0,1,173,0,0,1,0,2,1 +67,0,0,106,223,0,1,142,0,0.3,2,2,2,1 +76,0,2,140,197,0,2,116,0,1.1,1,0,2,1 +70,1,1,156,245,0,0,143,0,0,2,0,2,1 +44,0,2,118,242,0,1,149,0,0.3,1,1,2,1 +60,0,3,150,240,0,1,171,0,0.9,2,0,2,1 +44,1,2,120,226,0,1,169,0,0,2,0,2,1 +42,1,2,130,180,0,1,150,0,0,2,0,2,1 +66,1,0,160,228,0,0,138,0,2.3,2,0,1,1 +71,0,0,112,149,0,1,125,0,1.6,1,0,2,1 +64,1,3,170,227,0,0,155,0,0.6,1,0,3,1 +66,0,2,146,278,0,0,152,0,0,1,1,2,1 +39,0,2,138,220,0,1,152,0,0,1,0,2,1 +58,0,0,130,197,0,1,131,0,0.6,1,0,2,1 +47,1,2,130,253,0,1,179,0,0,2,0,2,1 +35,1,1,122,192,0,1,174,0,0,2,0,2,1 +58,1,1,125,220,0,1,144,0,0.4,1,4,3,1 +56,1,1,130,221,0,0,163,0,0,2,0,3,1 +56,1,1,120,240,0,1,169,0,0,0,0,2,1 +55,0,1,132,342,0,1,166,0,1.2,2,0,2,1 +41,1,1,120,157,0,1,182,0,0,2,0,2,1 +38,1,2,138,175,0,1,173,0,0,2,4,2,1 +38,1,2,138,175,0,1,173,0,0,2,4,2,1 +67,1,0,160,286,0,0,108,1,1.5,1,3,2,0 +67,1,0,120,229,0,0,129,1,2.6,1,2,3,0 +62,0,0,140,268,0,0,160,0,3.6,0,2,2,0 +63,1,0,130,254,0,0,147,0,1.4,1,1,3,0 +53,1,0,140,203,1,0,155,1,3.1,0,0,3,0 +56,1,2,130,256,1,0,142,1,0.6,1,1,1,0 +48,1,1,110,229,0,1,168,0,1,0,0,3,0 +58,1,1,120,284,0,0,160,0,1.8,1,0,2,0 +58,1,2,132,224,0,0,173,0,3.2,2,2,3,0 +60,1,0,130,206,0,0,132,1,2.4,1,2,3,0 +40,1,0,110,167,0,0,114,1,2,1,0,3,0 +60,1,0,117,230,1,1,160,1,1.4,2,2,3,0 +64,1,2,140,335,0,1,158,0,0,2,0,2,0 +43,1,0,120,177,0,0,120,1,2.5,1,0,3,0 +57,1,0,150,276,0,0,112,1,0.6,1,1,1,0 +55,1,0,132,353,0,1,132,1,1.2,1,1,3,0 +65,0,0,150,225,0,0,114,0,1,1,3,3,0 +61,0,0,130,330,0,0,169,0,0,2,0,2,0 +58,1,2,112,230,0,0,165,0,2.5,1,1,3,0 +50,1,0,150,243,0,0,128,0,2.6,1,0,3,0 +44,1,0,112,290,0,0,153,0,0,2,1,2,0 +60,1,0,130,253,0,1,144,1,1.4,2,1,3,0 +54,1,0,124,266,0,0,109,1,2.2,1,1,3,0 +50,1,2,140,233,0,1,163,0,0.6,1,1,3,0 +41,1,0,110,172,0,0,158,0,0,2,0,3,0 +51,0,0,130,305,0,1,142,1,1.2,1,0,3,0 +58,1,0,128,216,0,0,131,1,2.2,1,3,3,0 +54,1,0,120,188,0,1,113,0,1.4,1,1,3,0 +60,1,0,145,282,0,0,142,1,2.8,1,2,3,0 +60,1,2,140,185,0,0,155,0,3,1,0,2,0 +59,1,0,170,326,0,0,140,1,3.4,0,0,3,0 +46,1,2,150,231,0,1,147,0,3.6,1,0,2,0 +67,1,0,125,254,1,1,163,0,0.2,1,2,3,0 +62,1,0,120,267,0,1,99,1,1.8,1,2,3,0 +65,1,0,110,248,0,0,158,0,0.6,2,2,1,0 +44,1,0,110,197,0,0,177,0,0,2,1,2,0 +60,1,0,125,258,0,0,141,1,2.8,1,1,3,0 +58,1,0,150,270,0,0,111,1,0.8,2,0,3,0 +68,1,2,180,274,1,0,150,1,1.6,1,0,3,0 +62,0,0,160,164,0,0,145,0,6.2,0,3,3,0 +52,1,0,128,255,0,1,161,1,0,2,1,3,0 +59,1,0,110,239,0,0,142,1,1.2,1,1,3,0 +60,0,0,150,258,0,0,157,0,2.6,1,2,3,0 +49,1,2,120,188,0,1,139,0,2,1,3,3,0 +59,1,0,140,177,0,1,162,1,0,2,1,3,0 +57,1,2,128,229,0,0,150,0,0.4,1,1,3,0 +61,1,0,120,260,0,1,140,1,3.6,1,1,3,0 +39,1,0,118,219,0,1,140,0,1.2,1,0,3,0 +61,0,0,145,307,0,0,146,1,1,1,0,3,0 +56,1,0,125,249,1,0,144,1,1.2,1,1,2,0 +43,0,0,132,341,1,0,136,1,3,1,0,3,0 +62,0,2,130,263,0,1,97,0,1.2,1,1,3,0 +63,1,0,130,330,1,0,132,1,1.8,2,3,3,0 +65,1,0,135,254,0,0,127,0,2.8,1,1,3,0 +48,1,0,130,256,1,0,150,1,0,2,2,3,0 +63,0,0,150,407,0,0,154,0,4,1,3,3,0 +55,1,0,140,217,0,1,111,1,5.6,0,0,3,0 +65,1,3,138,282,1,0,174,0,1.4,1,1,2,0 +56,0,0,200,288,1,0,133,1,4,0,2,3,0 +54,1,0,110,239,0,1,126,1,2.8,1,1,3,0 +70,1,0,145,174,0,1,125,1,2.6,0,0,3,0 +62,1,1,120,281,0,0,103,0,1.4,1,1,3,0 +35,1,0,120,198,0,1,130,1,1.6,1,0,3,0 +59,1,3,170,288,0,0,159,0,0.2,1,0,3,0 +64,1,2,125,309,0,1,131,1,1.8,1,0,3,0 +47,1,2,108,243,0,1,152,0,0,2,0,2,0 +57,1,0,165,289,1,0,124,0,1,1,3,3,0 +55,1,0,160,289,0,0,145,1,0.8,1,1,3,0 +64,1,0,120,246,0,0,96,1,2.2,0,1,2,0 +70,1,0,130,322,0,0,109,0,2.4,1,3,2,0 +51,1,0,140,299,0,1,173,1,1.6,2,0,3,0 +58,1,0,125,300,0,0,171,0,0,2,2,3,0 +60,1,0,140,293,0,0,170,0,1.2,1,2,3,0 +77,1,0,125,304,0,0,162,1,0,2,3,2,0 +35,1,0,126,282,0,0,156,1,0,2,0,3,0 +70,1,2,160,269,0,1,112,1,2.9,1,1,3,0 +59,0,0,174,249,0,1,143,1,0,1,0,2,0 +64,1,0,145,212,0,0,132,0,2,1,2,1,0 +57,1,0,152,274,0,1,88,1,1.2,1,1,3,0 +56,1,0,132,184,0,0,105,1,2.1,1,1,1,0 +48,1,0,124,274,0,0,166,0,0.5,1,0,3,0 +56,0,0,134,409,0,0,150,1,1.9,1,2,3,0 +66,1,1,160,246,0,1,120,1,0,1,3,1,0 +54,1,1,192,283,0,0,195,0,0,2,1,3,0 +69,1,2,140,254,0,0,146,0,2,1,3,3,0 +51,1,0,140,298,0,1,122,1,4.2,1,3,3,0 +43,1,0,132,247,1,0,143,1,0.1,1,4,3,0 +62,0,0,138,294,1,1,106,0,1.9,1,3,2,0 +67,1,0,100,299,0,0,125,1,0.9,1,2,2,0 +59,1,3,160,273,0,0,125,0,0,2,0,2,0 +45,1,0,142,309,0,0,147,1,0,1,3,3,0 +58,1,0,128,259,0,0,130,1,3,1,2,3,0 +50,1,0,144,200,0,0,126,1,0.9,1,0,3,0 +62,0,0,150,244,0,1,154,1,1.4,1,0,2,0 +38,1,3,120,231,0,1,182,1,3.8,1,0,3,0 +66,0,0,178,228,1,1,165,1,1,1,2,3,0 +52,1,0,112,230,0,1,160,0,0,2,1,2,0 +53,1,0,123,282,0,1,95,1,2,1,2,3,0 +63,0,0,108,269,0,1,169,1,1.8,1,2,2,0 +54,1,0,110,206,0,0,108,1,0,1,1,2,0 +66,1,0,112,212,0,0,132,1,0.1,2,1,2,0 +55,0,0,180,327,0,2,117,1,3.4,1,0,2,0 +49,1,2,118,149,0,0,126,0,0.8,2,3,2,0 +54,1,0,122,286,0,0,116,1,3.2,1,2,2,0 +56,1,0,130,283,1,0,103,1,1.6,0,0,3,0 +46,1,0,120,249,0,0,144,0,0.8,2,0,3,0 +61,1,3,134,234,0,1,145,0,2.6,1,2,2,0 +67,1,0,120,237,0,1,71,0,1,1,0,2,0 +58,1,0,100,234,0,1,156,0,0.1,2,1,3,0 +47,1,0,110,275,0,0,118,1,1,1,1,2,0 +52,1,0,125,212,0,1,168,0,1,2,2,3,0 +58,1,0,146,218,0,1,105,0,2,1,1,3,0 +57,1,1,124,261,0,1,141,0,0.3,2,0,3,0 +58,0,1,136,319,1,0,152,0,0,2,2,2,0 +61,1,0,138,166,0,0,125,1,3.6,1,1,2,0 +42,1,0,136,315,0,1,125,1,1.8,1,0,1,0 +52,1,0,128,204,1,1,156,1,1,1,0,0,0 +59,1,2,126,218,1,1,134,0,2.2,1,1,1,0 +40,1,0,152,223,0,1,181,0,0,2,0,3,0 +61,1,0,140,207,0,0,138,1,1.9,2,1,3,0 +46,1,0,140,311,0,1,120,1,1.8,1,2,3,0 +59,1,3,134,204,0,1,162,0,0.8,2,2,2,0 +57,1,1,154,232,0,0,164,0,0,2,1,2,0 +57,1,0,110,335,0,1,143,1,3,1,1,3,0 +55,0,0,128,205,0,2,130,1,2,1,1,3,0 +61,1,0,148,203,0,1,161,0,0,2,1,3,0 +58,1,0,114,318,0,2,140,0,4.4,0,3,1,0 +58,0,0,170,225,1,0,146,1,2.8,1,2,1,0 +67,1,2,152,212,0,0,150,0,0.8,1,0,3,0 +44,1,0,120,169,0,1,144,1,2.8,0,0,1,0 +63,1,0,140,187,0,0,144,1,4,2,2,3,0 +63,0,0,124,197,0,1,136,1,0,1,0,2,0 +59,1,0,164,176,1,0,90,0,1,1,2,1,0 +57,0,0,140,241,0,1,123,1,0.2,1,0,3,0 +45,1,3,110,264,0,1,132,0,1.2,1,0,3,0 +68,1,0,144,193,1,1,141,0,3.4,1,2,3,0 +57,1,0,130,131,0,1,115,1,1.2,1,1,3,0 +57,0,1,130,236,0,0,174,0,0,1,1,2,0 diff --git a/data/iris.csv b/data/iris.csv new file mode 100755 index 000000000..7456e9f47 --- /dev/null +++ b/data/iris.csv @@ -0,0 +1,152 @@ +"sepal_length","sepal_width","petal_length","petal_width","Species*" +5.1,3.5,1.4,0.2,Iris-setosa +4.9,3.0,1.4,0.2,Iris-setosa +4.7,3.2,1.3,0.2,Iris-setosa +4.6,3.1,1.5,0.2,Iris-setosa +5.0,3.6,1.4,0.2,Iris-setosa +5.4,3.9,1.7,0.4,Iris-setosa +4.6,3.4,1.4,0.3,Iris-setosa +5.0,3.4,1.5,0.2,Iris-setosa +4.4,2.9,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.4,3.7,1.5,0.2,Iris-setosa +4.8,3.4,1.6,0.2,Iris-setosa +4.8,3.0,1.4,0.1,Iris-setosa +4.3,3.0,1.1,0.1,Iris-setosa +5.8,4.0,1.2,0.2,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +5.4,3.9,1.3,0.4,Iris-setosa +5.1,3.5,1.4,0.3,Iris-setosa +5.7,3.8,1.7,0.3,Iris-setosa +5.1,3.8,1.5,0.3,Iris-setosa +5.4,3.4,1.7,0.2,Iris-setosa +5.1,3.7,1.5,0.4,Iris-setosa +4.6,3.6,1.0,0.2,Iris-setosa +5.1,3.3,1.7,0.5,Iris-setosa +4.8,3.4,1.9,0.2,Iris-setosa +5.0,3.0,1.6,0.2,Iris-setosa +5.0,3.4,1.6,0.4,Iris-setosa +5.2,3.5,1.5,0.2,Iris-setosa +5.2,3.4,1.4,0.2,Iris-setosa +4.7,3.2,1.6,0.2,Iris-setosa +4.8,3.1,1.6,0.2,Iris-setosa +5.4,3.4,1.5,0.4,Iris-setosa +5.2,4.1,1.5,0.1,Iris-setosa +5.5,4.2,1.4,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.0,3.2,1.2,0.2,Iris-setosa +5.5,3.5,1.3,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +4.4,3.0,1.3,0.2,Iris-setosa +5.1,3.4,1.5,0.2,Iris-setosa +5.0,3.5,1.3,0.3,Iris-setosa +4.5,2.3,1.3,0.3,Iris-setosa +4.4,3.2,1.3,0.2,Iris-setosa +5.0,3.5,1.6,0.6,Iris-setosa +5.1,3.8,1.9,0.4,Iris-setosa +4.8,3.0,1.4,0.3,Iris-setosa +5.1,3.8,1.6,0.2,Iris-setosa +4.6,3.2,1.4,0.2,Iris-setosa +5.3,3.7,1.5,0.2,Iris-setosa +5.0,3.3,1.4,0.2,Iris-setosa +7.0,3.2,4.7,1.4,Iris-versicolor +6.4,3.2,4.5,1.5,Iris-versicolor +6.9,3.1,4.9,1.5,Iris-versicolor +5.5,2.3,4.0,1.3,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.3,3.3,4.7,1.6,Iris-versicolor +4.9,2.4,3.3,1.0,Iris-versicolor +6.6,2.9,4.6,1.3,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +5.0,2.0,3.5,1.0,Iris-versicolor +5.9,3.0,4.2,1.5,Iris-versicolor +6.0,2.2,4.0,1.0,Iris-versicolor +6.1,2.9,4.7,1.4,Iris-versicolor +5.6,2.9,3.6,1.3,Iris-versicolor +6.7,3.1,4.4,1.4,Iris-versicolor +5.6,3.0,4.5,1.5,Iris-versicolor +5.8,2.7,4.1,1.0,Iris-versicolor +6.2,2.2,4.5,1.5,Iris-versicolor +5.6,2.5,3.9,1.1,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +6.3,2.5,4.9,1.5,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +6.4,2.9,4.3,1.3,Iris-versicolor +6.6,3.0,4.4,1.4,Iris-versicolor +6.8,2.8,4.8,1.4,Iris-versicolor +6.7,3.0,5.0,1.7,Iris-versicolor +6.0,2.9,4.5,1.5,Iris-versicolor +5.7,2.6,3.5,1.0,Iris-versicolor +5.5,2.4,3.8,1.1,Iris-versicolor +5.5,2.4,3.7,1.0,Iris-versicolor +5.8,2.7,3.9,1.2,Iris-versicolor +6.0,2.7,5.1,1.6,Iris-versicolor +5.4,3.0,4.5,1.5,Iris-versicolor +6.0,3.4,4.5,1.6,Iris-versicolor +6.7,3.1,4.7,1.5,Iris-versicolor +6.3,2.3,4.4,1.3,Iris-versicolor +5.6,3.0,4.1,1.3,Iris-versicolor +5.5,2.5,4.0,1.3,Iris-versicolor +5.5,2.6,4.4,1.2,Iris-versicolor +6.1,3.0,4.6,1.4,Iris-versicolor +5.8,2.6,4.0,1.2,Iris-versicolor +5.0,2.3,3.3,1.0,Iris-versicolor +5.6,2.7,4.2,1.3,Iris-versicolor +5.7,3.0,4.2,1.2,Iris-versicolor +5.7,2.9,4.2,1.3,Iris-versicolor +6.2,2.9,4.3,1.3,Iris-versicolor +5.1,2.5,3.0,1.1,Iris-versicolor +5.7,2.8,4.1,1.3,Iris-versicolor +6.3,3.3,6.0,2.5,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +7.1,3.0,5.9,2.1,Iris-virginica +6.3,2.9,5.6,1.8,Iris-virginica +6.5,3.0,5.8,2.2,Iris-virginica +7.6,3.0,6.6,2.1,Iris-virginica +4.9,2.5,4.5,1.7,Iris-virginica +7.3,2.9,6.3,1.8,Iris-virginica +6.7,2.5,5.8,1.8,Iris-virginica +7.2,3.6,6.1,2.5,Iris-virginica +6.5,3.2,5.1,2.0,Iris-virginica +6.4,2.7,5.3,1.9,Iris-virginica +6.8,3.0,5.5,2.1,Iris-virginica +5.7,2.5,5.0,2.0,Iris-virginica +5.8,2.8,5.1,2.4,Iris-virginica +6.4,3.2,5.3,2.3,Iris-virginica +6.5,3.0,5.5,1.8,Iris-virginica +7.7,3.8,6.7,2.2,Iris-virginica +7.7,2.6,6.9,2.3,Iris-virginica +6.0,2.2,5.0,1.5,Iris-virginica +6.9,3.2,5.7,2.3,Iris-virginica +5.6,2.8,4.9,2.0,Iris-virginica +7.7,2.8,6.7,2.0,Iris-virginica +6.3,2.7,4.9,1.8,Iris-virginica +6.7,3.3,5.7,2.1,Iris-virginica +7.2,3.2,6.0,1.8,Iris-virginica +6.2,2.8,4.8,1.8,Iris-virginica +6.1,3.0,4.9,1.8,Iris-virginica +6.4,2.8,5.6,2.1,Iris-virginica +7.2,3.0,5.8,1.6,Iris-virginica +7.4,2.8,6.1,1.9,Iris-virginica +7.9,3.8,6.4,2.0,Iris-virginica +6.4,2.8,5.6,2.2,Iris-virginica +6.3,2.8,5.1,1.5,Iris-virginica +6.1,2.6,5.6,1.4,Iris-virginica +7.7,3.0,6.1,2.3,Iris-virginica +6.3,3.4,5.6,2.4,Iris-virginica +6.4,3.1,5.5,1.8,Iris-virginica +6.0,3.0,4.8,1.8,Iris-virginica +6.9,3.1,5.4,2.1,Iris-virginica +6.7,3.1,5.6,2.4,Iris-virginica +6.9,3.1,5.1,2.3,Iris-virginica +5.8,2.7,5.1,1.9,Iris-virginica +6.8,3.2,5.9,2.3,Iris-virginica +6.7,3.3,5.7,2.5,Iris-virginica +6.7,3.0,5.2,2.3,Iris-virginica +6.3,2.5,5.0,1.9,Iris-virginica +6.5,3.0,5.2,2.0,Iris-virginica +6.2,3.4,5.4,2.3,Iris-virginica +5.9,3.0,5.1,1.8,Iris-virginica + diff --git a/frontend/src/components/DataSetExecController.vue b/frontend/src/components/DataSetExecController.vue index 21e3e02cf..b2abc41a7 100644 --- a/frontend/src/components/DataSetExecController.vue +++ b/frontend/src/components/DataSetExecController.vue @@ -3,8 +3,7 @@ diff --git a/insertMongo.py b/insertMongo.py index fffbe5b66..41679f589 100644 --- a/insertMongo.py +++ b/insertMongo.py @@ -9,8 +9,7 @@ import os def import_content(filepath): mng_client = pymongo.MongoClient('localhost', 27017) mng_db = mng_client['mydb'] - #collection_name = 'StanceCTest' - collection_name = 'StanceC' + collection_name = 'HeartC' db_cm = mng_db[collection_name] cdir = os.path.dirname(__file__) file_res = os.path.join(cdir, filepath) @@ -21,5 +20,5 @@ def import_content(filepath): db_cm.insert(data_json) if __name__ == "__main__": - filepath = '/Users/anchaa/Documents/Research/StackVis_code/StackVis/stance.csv' + filepath = './data/heart.csv' import_content(filepath) \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..f88e140a6 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +flask_cors +scikit-learn +pandas +scikit-learn-extra +sk-dist +eli5 +pymongo \ No newline at end of file diff --git a/run.py b/run.py index e00571c5c..9b52c53bf 100644 --- a/run.py +++ b/run.py @@ -50,12 +50,11 @@ from sklearn.decomposition import PCA from mlxtend.classifier import StackingCVClassifier from mlxtend.feature_selection import ColumnSelector -from skdist.distribute.search import DistGridSearchCV -from pyspark.sql import SparkSession +from sklearn.model_selection import GridSearchCV from scipy.spatial import procrustes -# This block of code is for the connection between the server, the database, and the client (plus routing). +# This block of code == for the connection between the server, the database, and the client (plus routing). # Access MongoDB app = Flask(__name__) @@ -653,20 +652,12 @@ memory = Memory(location, verbose=0) # calculating for all algorithms and models the performance and other results @memory.cache def GridSearchForModels(XData, yData, clf, params, eachAlgor, AlgorithmsIDsEnd, toggle): - print('loop here') - # instantiate spark session - spark = ( - SparkSession - .builder - .getOrCreate() - ) - sc = spark.sparkContext # this is the grid we use to train the models - grid = DistGridSearchCV( - estimator=clf, param_grid=params, - sc=sc, cv=crossValidation, refit='accuracy', scoring=scoring, + grid = GridSearchCV( + estimator=clf, param_grid=params, + cv=crossValidation, refit='accuracy', scoring=scoring, verbose=0, n_jobs=-1) # fit and extract the probabilities @@ -974,11 +965,11 @@ def RetrieveFactors(): flagLocal = 0 countRemovals = 0 for l,el in enumerate(factors): - if el is 0: + if el == 0: loopThroughMetrics.drop(loopThroughMetrics.columns[[l-countRemovals]], axis=1, inplace=True) countRemovals = countRemovals + 1 flagLocal = 1 - if flagLocal is 1: + if flagLocal == 1: ModelSpaceMDSNew = FunMDS(loopThroughMetrics) ModelSpaceTSNENew = FunTsne(loopThroughMetrics) ModelSpaceTSNENew = ModelSpaceTSNENew.tolist() @@ -1536,7 +1527,7 @@ def preProcsumPerMetric(factors): name, values = row for loop, elements in enumerate(values): rowSum = elements*factors[loop] + rowSum - if sum(factors) is 0: + if sum(factors) == 0: sumPerClassifier = 0 else: sumPerClassifier.append(rowSum/sum(factors) * 100) @@ -2005,37 +1996,37 @@ def RetrieveSelDataPoints(): RetrieveParamsCleared[key] = withoutDuplicates RetrieveParamsClearedListGradB.append(RetrieveParamsCleared) - if (len(paramsListSeptoDicKNN['n_neighbors']) is 0): + if (len(paramsListSeptoDicKNN['n_neighbors']) == 0): RetrieveParamsClearedListKNN = [] - if (len(paramsListSeptoDicSVC['C']) is 0): + if (len(paramsListSeptoDicSVC['C']) == 0): RetrieveParamsClearedListSVC = [] - if (len(paramsListSeptoDicGausNB['var_smoothing']) is 0): + if (len(paramsListSeptoDicGausNB['var_smoothing']) == 0): RetrieveParamsClearedListGausNB = [] - if (len(paramsListSeptoDicMLP['alpha']) is 0): + if (len(paramsListSeptoDicMLP['alpha']) == 0): RetrieveParamsClearedListMLP = [] - if (len(paramsListSeptoDicLR['C']) is 0): + if (len(paramsListSeptoDicLR['C']) == 0): RetrieveParamsClearedListLR = [] - if (len(paramsListSeptoDicLDA['shrinkage']) is 0): + if (len(paramsListSeptoDicLDA['shrinkage']) == 0): RetrieveParamsClearedListLDA = [] - if (len(paramsListSeptoDicQDA['reg_param']) is 0): + if (len(paramsListSeptoDicQDA['reg_param']) == 0): RetrieveParamsClearedListQDA = [] - if (len(paramsListSeptoDicRF['n_estimators']) is 0): + if (len(paramsListSeptoDicRF['n_estimators']) == 0): RetrieveParamsClearedListRF = [] - if (len(paramsListSeptoDicExtraT['n_estimators']) is 0): + if (len(paramsListSeptoDicExtraT['n_estimators']) == 0): RetrieveParamsClearedListExtraT = [] - if (len(paramsListSeptoDicAdaB['n_estimators']) is 0): + if (len(paramsListSeptoDicAdaB['n_estimators']) == 0): RetrieveParamsClearedListAdaB = [] - if (len(paramsListSeptoDicGradB['n_estimators']) is 0): + if (len(paramsListSeptoDicGradB['n_estimators']) == 0): RetrieveParamsClearedListGradB = [] for eachAlgor in algorithms: @@ -2355,21 +2346,15 @@ def GridSearchSel(clf, params, factors, AlgorithmsIDsEnd, DataPointsSel): resultsMetrics.append([]) # Position: 0 and so on parametersSelData.append([]) else: - # instantiate spark session - spark = ( - SparkSession - .builder - .getOrCreate() - ) - sc = spark.sparkContext + XDatasubset = XData.iloc[DataPointsSel,:] yDataSubset = [yData[i] for i in DataPointsSel] # this is the grid we use to train the models - grid = DistGridSearchCV( - estimator=clf, param_grid=params, - sc=sc, cv=crossValidation, refit='accuracy', scoring=scoring, + grid = GridSearchCV( + estimator=clf, param_grid=params, + cv=crossValidation, refit='accuracy', scoring=scoring, verbose=0, n_jobs=-1) # fit and extract the probabilities @@ -2506,7 +2491,7 @@ def preProcsumPerMetricAccordingtoData(factors, loopThroughMetrics): name, values = row for loop, elements in enumerate(values): rowSum = elements*factors[loop] + rowSum - if sum(factors) is 0: + if sum(factors) == 0: sumPerClassifier = 0 else: sumPerClassifier.append(rowSum/sum(factors) * 100) @@ -2755,7 +2740,7 @@ def EnsembleModel(Models, keyRetrieved): for loop in Models['ClassifiersList']: if (int(loop) == int(modHere)): flag = 1 - if (flag is 1): + if (flag == 1): all_classifiersSelection.append(all_classifiers[index]) sclf = StackingCVClassifier(classifiers=all_classifiersSelection, @@ -3055,9 +3040,11 @@ def EnsembleModel(Models, keyRetrieved): # print(recall_score(yDataTest, y_pred, average='macro')) # print(f1_score(yDataTest, y_pred, average='macro')) - print(precision_score(yDataTest, y_pred, average='weighted')) - print(recall_score(yDataTest, y_pred, average='weighted')) - print(f1_score(yDataTest, y_pred, average='weighted')) + print(precision_score(yDataTest, y_pred, pos_label=0, average='weighted')) + print(recall_score(yDataTest, y_pred, pos_label=0, average='weighted')) + print(f1_score(yDataTest, y_pred, pos_label=0, average='weighted')) + + print(report) return 'Okay'