diff --git a/__pycache__/run.cpython-37.pyc b/__pycache__/run.cpython-37.pyc index 75fbcdb7e..585894f3e 100644 Binary files a/__pycache__/run.cpython-37.pyc and b/__pycache__/run.cpython-37.pyc differ diff --git a/cachedir/joblib/run/GridSearchForModels/34dd4fa44cf8d83f42cfacc70a3fdd71/metadata.json b/cachedir/joblib/run/GridSearchForModels/34dd4fa44cf8d83f42cfacc70a3fdd71/metadata.json new file mode 100644 index 000000000..69de5cf29 --- /dev/null +++ b/cachedir/joblib/run/GridSearchForModels/34dd4fa44cf8d83f42cfacc70a3fdd71/metadata.json @@ -0,0 +1 @@ +{"duration": 307.1607220172882, "input_args": {"XData": " sepal_l sepal_w petal_l petal_w\n0 6.3 3.3 6.0 2.5\n1 7.1 3.0 5.9 2.1\n2 5.8 2.7 5.1 1.9\n3 6.3 2.9 5.6 1.8\n4 7.6 3.0 6.6 2.1\n.. ... ... ... ...\n145 5.1 3.8 1.6 0.2\n146 5.0 3.5 1.6 0.6\n147 5.1 3.4 1.5 0.2\n148 4.6 3.2 1.4 0.2\n149 4.8 3.0 1.4 0.3\n\n[150 rows x 4 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]", "clf": "KNeighborsClassifier(algorithm='ball_tree', leaf_size=30, metric='minkowski',\n metric_params=None, n_jobs=None, n_neighbors=24, p=2,\n weights='distance')", "params": "{'n_neighbors': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], 'weights': ['uniform', 'distance'], 'algorithm': ['brute', 'kd_tree', 'ball_tree'], 'metric': ['chebyshev', 'manhattan', 'euclidean', 'minkowski']}", "eachAlgor": "'KNN'", "factors": "[1, 1, 1, 1, 1]", "AlgorithmsIDsEnd": "0"}} \ No newline at end of file diff --git a/cachedir/joblib/run/GridSearchForModels/65cf512fe73627ad01497d789001f38b/output.pkl b/cachedir/joblib/run/GridSearchForModels/65cf512fe73627ad01497d789001f38b/output.pkl new file mode 100644 index 000000000..ed3729944 Binary files /dev/null and b/cachedir/joblib/run/GridSearchForModels/65cf512fe73627ad01497d789001f38b/output.pkl differ diff --git a/cachedir/joblib/run/GridSearchForModels/c426e0f3d8f4e01216f1b2a58820e6ec/metadata.json b/cachedir/joblib/run/GridSearchForModels/c426e0f3d8f4e01216f1b2a58820e6ec/metadata.json new file mode 100644 index 000000000..72bb6d13a --- /dev/null +++ b/cachedir/joblib/run/GridSearchForModels/c426e0f3d8f4e01216f1b2a58820e6ec/metadata.json @@ -0,0 +1 @@ +{"duration": 393.25071001052856, "input_args": {"XData": " sepal_l sepal_w petal_l petal_w\n0 6.3 3.3 6.0 2.5\n1 7.1 3.0 5.9 2.1\n2 5.8 2.7 5.1 1.9\n3 6.3 2.9 5.6 1.8\n4 7.6 3.0 6.6 2.1\n.. ... ... ... ...\n145 5.1 3.8 1.6 0.2\n146 5.0 3.5 1.6 0.6\n147 5.1 3.4 1.5 0.2\n148 4.6 3.2 1.4 0.2\n149 4.8 3.0 1.4 0.3\n\n[150 rows x 4 columns]", "yData": "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]", "clf": "RandomForestClassifier(bootstrap=True, class_weight=None, criterion='entropy',\n max_depth=None, max_features='auto', max_leaf_nodes=None,\n min_impurity_decrease=0.0, min_impurity_split=None,\n min_samples_leaf=1, min_samples_split=2,\n min_weight_fraction_leaf=0.0, n_estimators=119,\n n_jobs=None, oob_score=False, random_state=None,\n verbose=0, warm_start=False)", "params": "{'n_estimators': [40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119], 'criterion': ['gini', 'entropy']}", "eachAlgor": "'RF'", "factors": "[1, 1, 1, 1, 1]", "AlgorithmsIDsEnd": "576"}} \ No newline at end of file diff --git a/cachedir/joblib/run/GridSearchForModels/ef9a593cce41dd71bdac1d445edc2a58/output.pkl b/cachedir/joblib/run/GridSearchForModels/ef9a593cce41dd71bdac1d445edc2a58/output.pkl new file mode 100644 index 000000000..63acf57b2 Binary files /dev/null and b/cachedir/joblib/run/GridSearchForModels/ef9a593cce41dd71bdac1d445edc2a58/output.pkl differ diff --git a/cachedir/joblib/run/GridSearchForModels/func_code.py b/cachedir/joblib/run/GridSearchForModels/func_code.py index a3ab84b4b..e10cd171d 100644 --- a/cachedir/joblib/run/GridSearchForModels/func_code.py +++ b/cachedir/joblib/run/GridSearchForModels/func_code.py @@ -1,6 +1,6 @@ -# first line: 393 +# first line: 454 @memory.cache -def GridSearchForModels(XData,yDataclf, params, eachAlgor, factors, AlgorithmsIDsEnd): +def GridSearchForModels(XData, yData, clf, params, eachAlgor, factors, AlgorithmsIDsEnd): # instantiate spark session spark = ( diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 6a4382886..400512859 100755 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -3171,9 +3171,9 @@ "integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==" }, "@types/node": { - "version": "13.5.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-13.5.0.tgz", - "integrity": "sha512-Onhn+z72D2O2Pb2ql2xukJ55rglumsVo1H6Fmyi8mlU9SvKdBk/pUSUAiBY/d9bAOF7VVWajX3sths/+g6ZiAQ==" + "version": "13.5.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.5.1.tgz", + "integrity": "sha512-Jj2W7VWQ2uM83f8Ls5ON9adxN98MvyJsMSASYFuSvrov8RMRY64Ayay7KV35ph1TSGIJ2gG9ZVDdEq3c3zaydA==" }, "@types/q": { "version": "1.5.2", @@ -21872,9 +21872,9 @@ "integrity": "sha1-bolgne69fc2vja7Mmuo5z1haCRg=" }, "rimraf": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.0.tgz", - "integrity": "sha512-NDGVxTsjqfunkds7CqsOiEnxln4Bo7Nddl3XhS4pXg5OzwkLqJ971ZVAAnB+DDLnF76N+VnDEiBHaVV8I06SUg==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.1.tgz", + "integrity": "sha512-IQ4ikL8SjBiEDZfk+DFVwqRK8md24RWMEJkdSlgNLkyyAImcjf8SWvU1qFMDOb4igBClbTQ/ugPqXcRwdFTxZw==", "dev": true, "requires": { "glob": "^7.1.3" diff --git a/frontend/package.json b/frontend/package.json index 50542a5b6..d68ec7d54 100755 --- a/frontend/package.json +++ b/frontend/package.json @@ -19,7 +19,7 @@ "@fortawesome/vue-fontawesome": "^0.1.9", "@statnett/vue-plotly": "^0.3.2", "@types/d3-drag": "^1.2.3", - "@types/node": "^13.5.0", + "@types/node": "^13.5.1", "ajv": "^6.11.0", "audit": "0.0.6", "axios": "^0.19.2", @@ -116,7 +116,7 @@ "postcss-import": "^12.0.1", "postcss-loader": "^3.0.0", "postcss-url": "^8.0.0", - "rimraf": "^3.0.0", + "rimraf": "^3.0.1", "sass": "^1.25.0", "sass-loader": "^8.0.2", "semver": "^7.1.1", diff --git a/frontend/src/components/DataSetExecController.vue b/frontend/src/components/DataSetExecController.vue index 9f36a43f5..0a28ebc25 100644 --- a/frontend/src/components/DataSetExecController.vue +++ b/frontend/src/components/DataSetExecController.vue @@ -53,21 +53,23 @@ export default { d3.select("#data").select("input").remove(); // Remove the selection field. EventBus.$emit('SendToServerDataSetConfirmation', this.RetrieveValueCSV) } else { + EventBus.$emit('SendToServerDataSetConfirmation', this.RetrieveValueCSV) d3.select("#data").select("input").remove(); this.dataset = "" var data d3.select("#data") .append("input") .attr("type", "file") - .style("font-size", "16px") + .style("font-size", "18.5px") .style("width", "200px") .on("change", function() { var file = d3.event.target.files[0]; Papa.parse(file, { header: true, dynamicTyping: true, + skipEmptyLines: true, complete: function(results) { - data = results; + data = results.data; EventBus.$emit('SendToServerLocalFile', data) } }); diff --git a/frontend/src/components/Main.vue b/frontend/src/components/Main.vue index d99ce25d4..47a3e3475 100755 --- a/frontend/src/components/Main.vue +++ b/frontend/src/components/Main.vue @@ -317,6 +317,26 @@ export default Vue.extend({ SendToServerData () { // fix that for the upload! console.log(this.localFile) + const path = `http://127.0.0.1:5000/data/SendtoSeverDataSet` + + const postData = { + uploadedData: this.localFile + } + const axiosConfig = { + headers: { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'Origin, Content-Type, X-Auth-Token', + 'Access-Control-Allow-Methods': 'GET, PUT, POST, DELETE, OPTIONS' + } + } + axios.post(path, postData, axiosConfig) + .then(response => { + console.log('Sent the new uploaded data to the server!') + }) + .catch(error => { + console.log(error) + }) }, SendSelectedPointsToServer () { if (this.ClassifierIDsList === ''){ @@ -489,27 +509,32 @@ export default Vue.extend({ }) }, fileNameSend () { + if (this.RetrieveValueFile == "local") { + this.DataSpaceCall() + this.SendAlgorithmsToServer() + } else { const path = `http://127.0.0.1:5000/data/ServerRequest` - const postData = { - fileName: this.RetrieveValueFile, - } - const axiosConfig = { - headers: { - 'Content-Type': 'application/json', - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Headers': 'Origin, Content-Type, X-Auth-Token', - 'Access-Control-Allow-Methods': 'GET, PUT, POST, DELETE, OPTIONS' - } + const postData = { + fileName: this.RetrieveValueFile, + } + const axiosConfig = { + headers: { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'Origin, Content-Type, X-Auth-Token', + 'Access-Control-Allow-Methods': 'GET, PUT, POST, DELETE, OPTIONS' } - axios.post(path, postData, axiosConfig) - .then(response => { - console.log('Send request to server! FileName was sent successfully!') - this.DataSpaceCall() - this.SendAlgorithmsToServer() - }) - .catch(error => { - console.log(error) - }) + } + axios.post(path, postData, axiosConfig) + .then(response => { + console.log('Send request to server! FileName was sent successfully!') + this.DataSpaceCall() + this.SendAlgorithmsToServer() + }) + .catch(error => { + console.log(error) + }) + } }, DataSpaceCall () { const path = `http://localhost:5000/data/requestDataSpaceResults` diff --git a/run.py b/run.py index c1b19e997..e0a8771a6 100644 --- a/run.py +++ b/run.py @@ -100,6 +100,12 @@ def Reset(): global crossValidation crossValidation = 3 + + # models + global KNNModels + KNNModels = [] + global RFModels + RFModels = [] global scoring #scoring = {'accuracy': 'accuracy', 'f1_macro': 'f1_weighted', 'precision': 'precision_weighted', 'recall': 'recall_weighted', 'jaccard': 'jaccard_weighted', 'neg_log_loss': 'neg_log_loss', 'r2': 'r2', 'neg_mean_absolute_error': 'neg_mean_absolute_error', 'neg_mean_absolute_error': 'neg_mean_absolute_error'} @@ -192,6 +198,12 @@ def RetrieveFileName(): global factors factors = [1,1,1,1,1] + # models + global KNNModels + KNNModels = [] + global RFModels + RFModels = [] + global results results = [] @@ -220,6 +232,66 @@ def RetrieveFileName(): DataSetSelection() return 'Everything is okay' +def Convert(lst): + it = iter(lst) + res_dct = dict(zip(it, it)) + return res_dct + +# Retrieve data set from client +@cross_origin(origin='localhost',headers=['Content-Type','Authorization']) +@app.route('/data/SendtoSeverDataSet', methods=["GET", "POST"]) +def SendToServerData(): + + uploadedData = request.get_data().decode('utf8').replace("'", '"') + uploadedDataParsed = json.loads(uploadedData) + DataResultsRaw = uploadedDataParsed['uploadedData'] + + DataResults = copy.deepcopy(DataResultsRaw) + + for dictionary in DataResultsRaw: + for key in dictionary.keys(): + if (key.find('*') != -1): + target = key + continue + continue + DataResultsRaw.sort(key=lambda x: x[target], reverse=True) + DataResults.sort(key=lambda x: x[target], reverse=True) + + for dictionary in DataResults: + del dictionary[target] + + global AllTargets + global target_names + AllTargets = [o[target] for o in DataResultsRaw] + AllTargetsFloatValues = [] + + previous = None + Class = 0 + for i, value in enumerate(AllTargets): + if (i == 0): + previous = value + target_names.append(value) + if (value == previous): + AllTargetsFloatValues.append(Class) + else: + Class = Class + 1 + target_names.append(value) + AllTargetsFloatValues.append(Class) + previous = value + + ArrayDataResults = pd.DataFrame.from_dict(DataResults) + + global XData, yData, RANDOM_SEED + XData, yData = ArrayDataResults, AllTargetsFloatValues + + global XDataStored, yDataStored + XDataStored = XData.copy() + yDataStored = yData.copy() + + callPreResults() + + return 'Processed uploaded data set' + # Sent data to client @app.route('/data/ClientRequest', methods=["GET", "POST"]) def CollectionData(): @@ -231,6 +303,7 @@ def CollectionData(): def DataSetSelection(): DataResults = copy.deepcopy(DataResultsRaw) + for dictionary in DataResultsRaw: for key in dictionary.keys(): if (key.find('*') != -1): @@ -559,11 +632,11 @@ def RetrieveModelsParam(): counter1 = 0 counter2 = 0 + global KNNModels - KNNModels = [] global RFModels - RFModels = [] global algorithmsList + algorithmsList = RetrieveModelsPar['algorithms'] for index, items in enumerate(algorithmsList): @@ -855,7 +928,7 @@ def ReturnResults(ModelSpaceMDS,ModelSpaceTSNE,DataSpaceList,PredictionSpaceList featureScoresCon = featureScoresCon.to_json(orient='records') XDataJSONEntireSet = XData.to_json(orient='records') XDataJSON = XData.columns.tolist() - print(XData) + Results.append(json.dumps(sumPerClassifier)) # Position: 0 Results.append(json.dumps(ModelSpaceMDS)) # Position: 1 Results.append(json.dumps(parametersGenPD)) # Position: 2