several measurements

master
parent ba8ddb2831
commit 597210c3a0
  1. BIN
      __pycache__/run.cpython-37.pyc
  2. 6
      frontend/src/components/DataSetExecController.vue
  3. 4
      frontend/src/components/FeatureSpace2.vue
  4. 2
      frontend/src/components/Main.vue
  5. 143
      run.py

Binary file not shown.

@ -2,8 +2,8 @@
<div> <div>
<label id="data" for="param-dataset" data-toggle="tooltip" data-placement="right" title="Tip: use one of the data sets already provided or upload a new file.">{{ dataset }}</label> <label id="data" for="param-dataset" data-toggle="tooltip" data-placement="right" title="Tip: use one of the data sets already provided or upload a new file.">{{ dataset }}</label>
<select id="selectFile" @change="selectDataSet()"> <select id="selectFile" @change="selectDataSet()">
<option value="HeartC.csv" selected>Heart Disease</option> <option value="HeartC.csv">Heart Disease</option>
<option value="IrisC.csv">Iris</option> <option value="IrisC.csv" selected>Iris</option>
<option value="local">Upload New File</option> <option value="local">Upload New File</option>
</select> </select>
<button class="btn-outline-success" <button class="btn-outline-success"
@ -34,7 +34,7 @@ export default {
name: 'DataSetExecController', name: 'DataSetExecController',
data () { data () {
return { return {
defaultDataSet: 'HeartC', // default value for the first data set defaultDataSet: 'IrisC', // default value for the first data set
searchText: 'Exploration of Features', searchText: 'Exploration of Features',
resetText: 'Reset', resetText: 'Reset',
dataset: 'Data set' dataset: 'Data set'

@ -23,8 +23,8 @@ export default {
}, },
methods: { methods: {
initializeNetwork () { initializeNetwork () {
var dataLoc = JSON.parse(this.dataFS2[1]) var dataLoc = JSON.parse(this.dataFS2[3])
var listofNodes = this.dataFS2[4] var listofNodes = this.dataFS2[0]
var nodes = [] var nodes = []
listofNodes.forEach(element => nodes.push({"name": element})) listofNodes.forEach(element => nodes.push({"name": element}))

@ -215,7 +215,7 @@ export default Vue.extend({
DataResults: '', DataResults: '',
keyNow: 1, keyNow: 1,
instancesImportance: '', instancesImportance: '',
RetrieveValueFile: 'HeartC', // this is for the default data set RetrieveValueFile: 'IrisC', // this is for the default data set
ClassifierIDsList: [], ClassifierIDsList: [],
ClassifierIDsListCM: [], ClassifierIDsListCM: [],
SelectedFeaturesPerClassifier: '', SelectedFeaturesPerClassifier: '',

143
run.py

@ -18,6 +18,10 @@ from sklearn.svm import SVC
from bayes_opt import BayesianOptimization from bayes_opt import BayesianOptimization
from sklearn.model_selection import cross_validate from sklearn.model_selection import cross_validate
from sklearn.model_selection import cross_val_predict from sklearn.model_selection import cross_val_predict
from sklearn.preprocessing import OneHotEncoder
from statsmodels.stats.outliers_influence import variance_inflation_factor
from statsmodels.tools.tools import add_constant
# this block of code is for the connection between the server, the database, and the client (plus routing) # this block of code is for the connection between the server, the database, and the client (plus routing)
@ -498,26 +502,133 @@ def Seperation():
else: else:
quadrant4.append(index) quadrant4.append(index)
Datarows1 = XData.iloc[quadrant1, :] DataRows1 = XData.iloc[quadrant1, :]
Datarows2 = XData.iloc[quadrant2, :] DataRows2 = XData.iloc[quadrant2, :]
Datarows3 = XData.iloc[quadrant3, :] DataRows3 = XData.iloc[quadrant3, :]
Datarows4 = XData.iloc[quadrant4, :] DataRows4 = XData.iloc[quadrant4, :]
corrMatrix1 = DataRows1.corr()
corrMatrix2 = DataRows2.corr()
corrMatrix3 = DataRows3.corr()
corrMatrix4 = DataRows4.corr()
DataRows1 = DataRows1.reset_index(drop=True)
DataRows2 = DataRows2.reset_index(drop=True)
DataRows3 = DataRows3.reset_index(drop=True)
DataRows4 = DataRows4.reset_index(drop=True)
targetRows1 = [yData[i] for i in quadrant1]
targetRows2 = [yData[i] for i in quadrant2]
targetRows3 = [yData[i] for i in quadrant3]
targetRows4 = [yData[i] for i in quadrant4]
targetRows1Arr = np.array(targetRows1)
targetRows2Arr = np.array(targetRows2)
targetRows3Arr = np.array(targetRows3)
targetRows4Arr = np.array(targetRows4)
uniqueTarget1 = unique(targetRows1)
uniqueTarget2 = unique(targetRows2)
uniqueTarget3 = unique(targetRows3)
uniqueTarget4 = unique(targetRows4)
if (len(targetRows1Arr) > 0):
onehotEncoder1 = OneHotEncoder(sparse=False)
targetRows1Arr = targetRows1Arr.reshape(len(targetRows1Arr), 1)
onehotEncoder1 = onehotEncoder1.fit_transform(targetRows1Arr)
hotEncoderDF1 = pd.DataFrame(onehotEncoder1)
concatDF1 = pd.concat([DataRows1, hotEncoderDF1], axis=1)
corrMatrixComb1 = concatDF1.corr()
corrMatrixComb1 = corrMatrixComb1.iloc[:,-len(uniqueTarget1):]
else:
corrMatrixComb1 = pd.DataFrame()
if (len(targetRows2Arr) > 0):
onehotEncoder2 = OneHotEncoder(sparse=False)
targetRows2Arr = targetRows2Arr.reshape(len(targetRows2Arr), 1)
onehotEncoder2 = onehotEncoder2.fit_transform(targetRows2Arr)
hotEncoderDF2 = pd.DataFrame(onehotEncoder2)
concatDF2 = pd.concat([DataRows2, hotEncoderDF2], axis=1)
corrMatrixComb2 = concatDF2.corr()
corrMatrixComb2 = corrMatrixComb2.iloc[:,-len(uniqueTarget2):]
else:
corrMatrixComb2 = pd.DataFrame()
if (len(targetRows3Arr) > 0):
onehotEncoder3 = OneHotEncoder(sparse=False)
targetRows3Arr = targetRows3Arr.reshape(len(targetRows3Arr), 1)
onehotEncoder3 = onehotEncoder3.fit_transform(targetRows3Arr)
hotEncoderDF3 = pd.DataFrame(onehotEncoder3)
concatDF3 = pd.concat([DataRows3, hotEncoderDF3], axis=1)
corrMatrixComb3 = concatDF3.corr()
corrMatrixComb3 = corrMatrixComb3.iloc[:,-len(uniqueTarget3):]
else:
corrMatrixComb3 = pd.DataFrame()
if (len(targetRows4Arr) > 0):
onehotEncoder4 = OneHotEncoder(sparse=False)
targetRows4Arr = targetRows4Arr.reshape(len(targetRows4Arr), 1)
onehotEncoder4 = onehotEncoder4.fit_transform(targetRows4Arr)
hotEncoderDF4 = pd.DataFrame(onehotEncoder4)
concatDF4 = pd.concat([DataRows4, hotEncoderDF4], axis=1)
corrMatrixComb4 = concatDF4.corr()
corrMatrixComb4 = corrMatrixComb4.iloc[:,-len(uniqueTarget4):]
else:
corrMatrixComb4 = pd.DataFrame()
X1 = add_constant(DataRows1.dropna())
VIF1 = pd.Series([variance_inflation_factor(X1.values, i)
for i in range(X1.shape[1])],
index=X1.columns)
print(VIF1)
targetRows1ArrDF = pd.DataFrame(targetRows1Arr)
targetRows2ArrDF = pd.DataFrame(targetRows2Arr)
targetRows3ArrDF = pd.DataFrame(targetRows3Arr)
targetRows4ArrDF = pd.DataFrame(targetRows4Arr)
concatAllDF1 = pd.concat([DataRows1, targetRows1ArrDF], axis=1)
concatAllDF2 = pd.concat([DataRows2, targetRows2ArrDF], axis=1)
concatAllDF3 = pd.concat([DataRows3, targetRows3ArrDF], axis=1)
concatAllDF4 = pd.concat([DataRows4, targetRows4ArrDF], axis=1)
corrMatrixCombTotal1 = concatAllDF1.corr()
corrMatrixCombTotal2 = concatAllDF2.corr()
corrMatrixCombTotal3 = concatAllDF3.corr()
corrMatrixCombTotal4 = concatAllDF4.corr()
corrMatrixCombTotal1 = pd.concat([corrMatrixCombTotal1.tail(1)])
corrMatrixCombTotal2 = pd.concat([corrMatrixCombTotal2.tail(1)])
corrMatrixCombTotal3 = pd.concat([corrMatrixCombTotal3.tail(1)])
corrMatrixCombTotal4 = pd.concat([corrMatrixCombTotal4.tail(1)])
global packCorr global packCorr
packCorr = [] packCorr = []
corrMatrix1 = Datarows1.corr() packCorr.append(list(XData.columns.values.tolist()))
corrMatrix2 = Datarows2.corr() packCorr.append(json.dumps(target_names))
corrMatrix3 = Datarows3.corr()
corrMatrix4 = Datarows4.corr()
packCorr.append(corrMatrix1.to_json()) packCorr.append(corrMatrix1.to_json())
packCorr.append(corrMatrix2.to_json()) packCorr.append(corrMatrix2.to_json())
packCorr.append(corrMatrix3.to_json()) packCorr.append(corrMatrix3.to_json())
packCorr.append(corrMatrix4.to_json()) packCorr.append(corrMatrix4.to_json())
packCorr.append(list(XData.columns.values.tolist())) packCorr.append(corrMatrixComb1.to_json())
packCorr.append(corrMatrixComb2.to_json())
packCorr.append(corrMatrixComb3.to_json())
packCorr.append(corrMatrixComb4.to_json())
packCorr.append(corrMatrixCombTotal1.to_json())
packCorr.append(corrMatrixCombTotal2.to_json())
packCorr.append(corrMatrixCombTotal3.to_json())
packCorr.append(corrMatrixCombTotal4.to_json())
packCorr.append(json.dumps(uniqueTarget1))
packCorr.append(json.dumps(uniqueTarget2))
packCorr.append(json.dumps(uniqueTarget3))
packCorr.append(json.dumps(uniqueTarget4))
return 'Everything Okay' return 'Everything Okay'
@ -528,4 +639,16 @@ def SendCorrel():
response = { response = {
'correlResul': packCorr 'correlResul': packCorr
} }
return jsonify(response) return jsonify(response)
def unique(list1):
# intilize a null list
unique_list = []
# traverse for all elements
for x in list1:
# check if exists in unique_list or not
if x not in unique_list:
unique_list.append(x)
return unique_list
Loading…
Cancel
Save