Experiment: Decision Tree code finished
parent
2fb57232fc
commit
2ac14e25a5
|
|
@ -1535,7 +1535,9 @@
|
|||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 70,
|
||||
"metadata": {},
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# make class predictions for X_test_dtm\n",
|
||||
|
|
|
|||
|
|
@ -10,18 +10,20 @@
|
|||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||
</component>
|
||||
<component name="CoverageDataManager">
|
||||
<SUITE FILE_PATH="coverage/SML_Homework$experimentMethod.coverage" NAME="experimentMethod Coverage Results" MODIFIED="1510156033648" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/learningmethod" />
|
||||
<SUITE FILE_PATH="coverage/Experiments$decisiontree.coverage" NAME="decisiontree Coverage Results" MODIFIED="1509983854236" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/learningmethod" />
|
||||
<SUITE FILE_PATH="coverage/SML_Homework$decisiontree.coverage" NAME="decisiontree Coverage Results" MODIFIED="1510053710829" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/learningmethod" />
|
||||
<SUITE FILE_PATH="coverage/SML_Homework$experimentDT.coverage" NAME="experimentDT Coverage Results" MODIFIED="1510155989418" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/learningmethod" />
|
||||
<SUITE FILE_PATH="coverage/Experiments$example.coverage" NAME="example Coverage Results" MODIFIED="1509983601618" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/sml_learningmethod" />
|
||||
<SUITE FILE_PATH="coverage/SML_Homework$experimentOne.coverage" NAME="experimentOne Coverage Results" MODIFIED="1510061287176" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/learningmethod" />
|
||||
<SUITE FILE_PATH="coverage/SML_Homework$experimentOne.coverage" NAME="experimentOne Coverage Results" MODIFIED="1510068084362" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/learningmethod" />
|
||||
</component>
|
||||
<component name="FileEditorManager">
|
||||
<leaf SIDE_TABS_SIZE_LIMIT_KEY="300">
|
||||
<file leaf-file-name="experimentOne.py" pinned="false" current-in-tab="true">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentOne.py">
|
||||
<file leaf-file-name="experimentMethod.py" pinned="false" current-in-tab="false">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentMethod.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="710">
|
||||
<caret line="134" column="36" lean-forward="false" selection-start-line="134" selection-start-column="36" selection-end-line="134" selection-end-column="36" />
|
||||
<state relative-caret-position="315">
|
||||
<caret line="99" column="0" lean-forward="true" selection-start-line="99" selection-start-column="0" selection-end-line="99" selection-end-column="0" />
|
||||
<folding>
|
||||
<element signature="e#0#52#0" expanded="true" />
|
||||
</folding>
|
||||
|
|
@ -29,11 +31,43 @@
|
|||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
<file leaf-file-name="showGraph.py" pinned="false" current-in-tab="false">
|
||||
<file leaf-file-name="experimentDT.py" pinned="false" current-in-tab="false">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentDT.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="3825">
|
||||
<caret line="255" column="29" lean-forward="false" selection-start-line="255" selection-start-column="29" selection-end-line="255" selection-end-column="29" />
|
||||
<folding>
|
||||
<element signature="e#0#52#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
<file leaf-file-name="experimentLR.py" pinned="false" current-in-tab="false">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentLR.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="0">
|
||||
<caret line="0" column="0" lean-forward="false" selection-start-line="0" selection-start-column="0" selection-end-line="0" selection-end-column="0" />
|
||||
<folding />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
<file leaf-file-name="experimentNN.py" pinned="false" current-in-tab="false">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentNN.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="0">
|
||||
<caret line="0" column="0" lean-forward="false" selection-start-line="0" selection-start-column="0" selection-end-line="0" selection-end-column="0" />
|
||||
<folding />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</file>
|
||||
<file leaf-file-name="showGraph.py" pinned="false" current-in-tab="true">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/showGraph.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="135">
|
||||
<caret line="9" column="0" lean-forward="true" selection-start-line="9" selection-start-column="0" selection-end-line="9" selection-end-column="0" />
|
||||
<state relative-caret-position="205">
|
||||
<caret line="130" column="26" lean-forward="true" selection-start-line="130" selection-start-column="26" selection-end-line="130" selection-end-column="26" />
|
||||
<folding>
|
||||
<element signature="e#0#34#0" expanded="true" />
|
||||
</folding>
|
||||
|
|
@ -60,8 +94,12 @@
|
|||
<option value="$PROJECT_DIR$/learningmethod/learningmethod.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/environment.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/settings.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/showGraph.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/experimentOne.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/showGraph.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/experimentDT.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/experimentLR.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/experimentNN.py" />
|
||||
<option value="$PROJECT_DIR$/learningmethod/experimentMethod.py" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
|
|
@ -149,7 +187,7 @@
|
|||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="RunManager" selected="Python.experimentOne">
|
||||
<component name="RunManager" selected="Python.experimentMethod">
|
||||
<configuration default="true" type="PyBehaveRunConfigurationType" factoryName="Behave">
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
|
|
@ -291,7 +329,7 @@
|
|||
<option name="_new_target" value="""" />
|
||||
<option name="_new_targetType" value=""PATH"" />
|
||||
</configuration>
|
||||
<configuration name="experimentOne" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||
<configuration name="experimentDT" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
|
|
@ -304,14 +342,37 @@
|
|||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<module name="SML-Homework" />
|
||||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" enabled="false" sample_coverage="true" runner="coverage.py" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/learningmethod/experimentOne.py" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/learningmethod/experimentDT.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
</configuration>
|
||||
<configuration name="experimentMethod" type="PythonConfigurationType" factoryName="Python" temporary="true">
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="/usr/bin/python3.5" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/learningmethod" />
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<module name="SML-Homework" />
|
||||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" enabled="false" sample_coverage="true" runner="coverage.py" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/learningmethod/experimentMethod.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
</configuration>
|
||||
<list size="2">
|
||||
<item index="0" class="java.lang.String" itemvalue="Python.experimentDT" />
|
||||
<item index="1" class="java.lang.String" itemvalue="Python.experimentMethod" />
|
||||
</list>
|
||||
<recent_temporary>
|
||||
<list size="1">
|
||||
<item index="0" class="java.lang.String" itemvalue="Python.experimentOne" />
|
||||
<list size="2">
|
||||
<item index="0" class="java.lang.String" itemvalue="Python.experimentMethod" />
|
||||
<item index="1" class="java.lang.String" itemvalue="Python.experimentDT" />
|
||||
</list>
|
||||
</recent_temporary>
|
||||
</component>
|
||||
|
|
@ -336,14 +397,14 @@
|
|||
<window_info id="TODO" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="6" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Event Log" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.3283582" sideWeight="0.50292826" order="7" side_tool="true" content_ui="tabs" />
|
||||
<window_info id="Database" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="3" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Version Control" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="false" weight="0.33" sideWeight="0.5" order="7" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Python Console" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="7" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Run" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.3283582" sideWeight="0.49707174" order="2" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Version Control" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="false" weight="0.33" sideWeight="0.5" order="10" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Python Console" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="8" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Run" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" show_stripe_button="true" weight="0.3283582" sideWeight="0.49707174" order="2" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Structure" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.25" sideWeight="0.5" order="1" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Terminal" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="7" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Terminal" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="9" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Favorites" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="2" side_tool="true" content_ui="tabs" />
|
||||
<window_info id="Debug" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.4" sideWeight="0.5" order="3" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Data View" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="3" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Data View" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="4" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Cvs" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.25" sideWeight="0.5" order="4" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Message" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.33" sideWeight="0.5" order="0" side_tool="false" content_ui="tabs" />
|
||||
<window_info id="Commander" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" show_stripe_button="true" weight="0.4" sideWeight="0.5" order="0" side_tool="false" content_ui="tabs" />
|
||||
|
|
@ -385,25 +446,51 @@
|
|||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/learningmethod.py" />
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/settings.py" />
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/showGraph.py">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentNN.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="135">
|
||||
<caret line="9" column="0" lean-forward="true" selection-start-line="9" selection-start-column="0" selection-end-line="9" selection-end-column="0" />
|
||||
<folding>
|
||||
<element signature="e#0#34#0" expanded="true" />
|
||||
</folding>
|
||||
<state relative-caret-position="0">
|
||||
<caret line="0" column="0" lean-forward="false" selection-start-line="0" selection-start-column="0" selection-end-line="0" selection-end-column="0" />
|
||||
<folding />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentOne.py">
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentLR.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="710">
|
||||
<caret line="134" column="36" lean-forward="false" selection-start-line="134" selection-start-column="36" selection-end-line="134" selection-end-column="36" />
|
||||
<state relative-caret-position="0">
|
||||
<caret line="0" column="0" lean-forward="false" selection-start-line="0" selection-start-column="0" selection-end-line="0" selection-end-column="0" />
|
||||
<folding />
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentDT.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="3825">
|
||||
<caret line="255" column="29" lean-forward="false" selection-start-line="255" selection-start-column="29" selection-end-line="255" selection-end-column="29" />
|
||||
<folding>
|
||||
<element signature="e#0#52#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/experimentMethod.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="315">
|
||||
<caret line="99" column="0" lean-forward="true" selection-start-line="99" selection-start-column="0" selection-end-line="99" selection-end-column="0" />
|
||||
<folding>
|
||||
<element signature="e#0#52#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
<entry file="file://$PROJECT_DIR$/learningmethod/showGraph.py">
|
||||
<provider selected="true" editor-type-id="text-editor">
|
||||
<state relative-caret-position="205">
|
||||
<caret line="130" column="26" lean-forward="true" selection-start-line="130" selection-start-column="26" selection-end-line="130" selection-end-column="26" />
|
||||
<folding>
|
||||
<element signature="e#0#34#0" expanded="true" />
|
||||
</folding>
|
||||
</state>
|
||||
</provider>
|
||||
</entry>
|
||||
</component>
|
||||
</project>
|
||||
|
|
@ -0,0 +1,294 @@
|
|||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.feature_extraction.text import CountVectorizer
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
from sklearn import metrics
|
||||
|
||||
import pandas
|
||||
from pandas import DataFrame
|
||||
|
||||
import numpy
|
||||
|
||||
import os
|
||||
|
||||
workspace = "/home/toshuumilia/Workspace/SML/" # Insert the working directory here.
|
||||
datasetPath = workspace + "data/sms.tsv" # Tells where is located the data
|
||||
|
||||
smsCount = 5574
|
||||
|
||||
if not os.path.exists(workspace + "results/"):
|
||||
os.makedirs(workspace + "results/")
|
||||
|
||||
###################
|
||||
# Loading dataset #
|
||||
###################
|
||||
|
||||
smsDF = pandas.read_table(datasetPath, header=None, names=["label", "message"])
|
||||
smsDF["label_numerical"] = smsDF.label.map({"ham": 0, "spam": 1})
|
||||
|
||||
smsDataset = smsDF.message
|
||||
smsLabel = smsDF.label_numerical
|
||||
|
||||
methodArray = []
|
||||
measureArray = []
|
||||
valueArray = []
|
||||
availableMeasures = ["Accuracy", "F1Score"]
|
||||
|
||||
|
||||
dataset_train, dataset_test, label_train, label_test = train_test_split(smsDataset, smsLabel, random_state=1)
|
||||
|
||||
# Note: DTM=documentTermMatrix
|
||||
vectorizer = CountVectorizer()
|
||||
trainDTM = vectorizer.fit_transform(dataset_train)
|
||||
testDTM = vectorizer.transform(dataset_test)
|
||||
|
||||
# DEPTH EXPERIMENT
|
||||
# availableDepths = [None, 50, 25, 10, 5, 3]
|
||||
#
|
||||
# print("Depth Experiment")
|
||||
# for x in range(0, 4):
|
||||
# for depth in availableDepths:
|
||||
# print("Step", x, "for depth:", depth)
|
||||
# # SEE: http://scikit-learn.org/stable/modules/generated/sklearn.tree.DecisionTreeClassifier.html
|
||||
# decisionTree = DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=depth,
|
||||
# min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0,
|
||||
# max_features=None, random_state=None, max_leaf_nodes=None,
|
||||
# min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None,
|
||||
# presort=False)
|
||||
# decisionTree.fit(trainDTM, label_train)
|
||||
#
|
||||
# label_predicted = decisionTree.predict(testDTM)
|
||||
#
|
||||
# # SEE: https://en.wikipedia.org/wiki/Precision_and_recall
|
||||
# valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
# valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
#
|
||||
# for index in range(0, 2):
|
||||
# measureArray.append(availableMeasures[index])
|
||||
# methodArray.append("Depth-" + str(depth))
|
||||
#
|
||||
# # Save the experiments
|
||||
# experimentDTDepthDF = DataFrame()
|
||||
# experimentDTDepthDF["Measure"] = measureArray
|
||||
# experimentDTDepthDF["Value"] = valueArray
|
||||
# experimentDTDepthDF["Depth"] = methodArray
|
||||
#
|
||||
# experimentDTDepthDF.to_csv(workspace + "results/experimentDTDepth.csv")
|
||||
|
||||
# CRITERION EXPERIMENT
|
||||
# availableCriterion = ["gini", "entropy"]
|
||||
#
|
||||
# methodArray = []
|
||||
# measureArray = []
|
||||
# valueArray = []
|
||||
#
|
||||
# print("Criteron Experiment")
|
||||
# for x in range(0, 4):
|
||||
# for criterion in availableCriterion:
|
||||
# print("Step", x, "for criterion:", criterion)
|
||||
# decisionTree = DecisionTreeClassifier(criterion=criterion, splitter='best', max_depth=None,
|
||||
# min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0,
|
||||
# max_features=None, random_state=None, max_leaf_nodes=None,
|
||||
# min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None,
|
||||
# presort=False)
|
||||
# decisionTree.fit(trainDTM, label_train)
|
||||
#
|
||||
# label_predicted = decisionTree.predict(testDTM)
|
||||
#
|
||||
# valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
# valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
#
|
||||
# for index in range(0, 2):
|
||||
# measureArray.append(availableMeasures[index])
|
||||
# methodArray.append("Criterion-" + criterion)
|
||||
#
|
||||
# # Save the experiments
|
||||
# experimentDTCriteronDF = DataFrame()
|
||||
# experimentDTCriteronDF["Measure"] = measureArray
|
||||
# experimentDTCriteronDF["Value"] = valueArray
|
||||
# experimentDTCriteronDF["Criterion"] = methodArray
|
||||
#
|
||||
# experimentDTCriteronDF.to_csv(workspace + "results/experimentDTCriterion.csv")
|
||||
|
||||
# MIN_SAMPLES_SPLIT EXPERIMENT
|
||||
|
||||
# availableMinSampleSplit = [2, 10, 25, 50, 100, 250]
|
||||
#
|
||||
# methodArray = []
|
||||
# measureArray = []
|
||||
# valueArray = []
|
||||
#
|
||||
# print("MinSampleSplit Experiment")
|
||||
# for x in range(0, 20):
|
||||
# for minSampleSplit in availableMinSampleSplit:
|
||||
# print("Step", x, "for minSampleSplit:", minSampleSplit)
|
||||
# decisionTree = DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=None,
|
||||
# min_samples_split=minSampleSplit, min_samples_leaf=1,
|
||||
# min_weight_fraction_leaf=0.0,
|
||||
# max_features=None, random_state=None, max_leaf_nodes=None,
|
||||
# min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None,
|
||||
# presort=False)
|
||||
# decisionTree.fit(trainDTM, label_train)
|
||||
#
|
||||
# label_predicted = decisionTree.predict(testDTM)
|
||||
#
|
||||
# valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
# valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
#
|
||||
# for index in range(0, 2):
|
||||
# measureArray.append(availableMeasures[index])
|
||||
# methodArray.append("MinSampleSplit-" + str(minSampleSplit))
|
||||
#
|
||||
# # Save the experiments
|
||||
# experimentDTMinSampleSplitDF = DataFrame()
|
||||
# experimentDTMinSampleSplitDF["Measure"] = measureArray
|
||||
# experimentDTMinSampleSplitDF["Value"] = valueArray
|
||||
# experimentDTMinSampleSplitDF["MinSampleSplit"] = methodArray
|
||||
#
|
||||
# experimentDTMinSampleSplitDF.to_csv(workspace + "results/experimentDTMinSampleSplit.csv")
|
||||
|
||||
# MAX_FEATURE EXPERIMENT
|
||||
|
||||
# availableMaxFeature = [None, "sqrt", "log2", 0.25, 0.5, 0.75]
|
||||
#
|
||||
# methodArray = []
|
||||
# measureArray = []
|
||||
# valueArray = []
|
||||
#
|
||||
# print("MaxFeature Experiment")
|
||||
# for x in range(0, 10):
|
||||
# for maxFeature in availableMaxFeature:
|
||||
# print("Step", x, "for MaxFeature:", maxFeature)
|
||||
# decisionTree = DecisionTreeClassifier(max_features=maxFeature)
|
||||
# decisionTree.fit(trainDTM, label_train)
|
||||
#
|
||||
# label_predicted = decisionTree.predict(testDTM)
|
||||
#
|
||||
# valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
# valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
#
|
||||
# for index in range(0, 2):
|
||||
# measureArray.append(availableMeasures[index])
|
||||
# methodArray.append("MaxFeature-" + str(maxFeature))
|
||||
#
|
||||
# # Save the experiments
|
||||
# experimentDTMaxFeatureDF = DataFrame()
|
||||
# experimentDTMaxFeatureDF["Measure"] = measureArray
|
||||
# experimentDTMaxFeatureDF["Value"] = valueArray
|
||||
# experimentDTMaxFeatureDF["MaxFeature"] = methodArray
|
||||
#
|
||||
# experimentDTMaxFeatureDF.to_csv(workspace + "results/experimentDTMaxFeature.csv")
|
||||
|
||||
# MAX_LEAF_NODES EXPERIMENT
|
||||
|
||||
# availableMaxLeafNodes = []
|
||||
# for ratio in numpy.arange(1/6, 1.01, 1/6):
|
||||
# availableMaxLeafNodes.append(int(ratio * smsCount))
|
||||
|
||||
# availableMaxLeafNodes = numpy.concatenate([[2], numpy.arange(10, 270, 10)])
|
||||
|
||||
# methodArray = []
|
||||
# measureArray = []
|
||||
# valueArray = []
|
||||
#
|
||||
# print("MaxLeafNodes Experiment")
|
||||
# for x in range(0, 5):
|
||||
# for maxLeafNodes in availableMaxLeafNodes:
|
||||
# print("Step", x, "for MaxLeafNodes:", maxLeafNodes)
|
||||
# decisionTree = DecisionTreeClassifier(max_leaf_nodes=maxLeafNodes)
|
||||
# decisionTree.fit(trainDTM, label_train)
|
||||
#
|
||||
# label_predicted = decisionTree.predict(testDTM)
|
||||
#
|
||||
# valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
# valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
#
|
||||
# for index in range(0, 2):
|
||||
# measureArray.append(availableMeasures[index])
|
||||
# methodArray.append(maxLeafNodes)
|
||||
#
|
||||
# # Save the experiments
|
||||
# experimentDTMaxLeafNodesDF = DataFrame()
|
||||
# experimentDTMaxLeafNodesDF["Measure"] = measureArray
|
||||
# experimentDTMaxLeafNodesDF["Value"] = valueArray
|
||||
# experimentDTMaxLeafNodesDF["MaxLeafNodes"] = methodArray
|
||||
#
|
||||
# experimentDTMaxLeafNodesDF.to_csv(workspace + "results/experimentDTMaxLeafNodes.csv")
|
||||
|
||||
# MIN_IMPURITY_DECREASE
|
||||
|
||||
# availableMinImpurityDecrease = numpy.arange(0., 0.061, 0.005)
|
||||
#
|
||||
# methodArray = []
|
||||
# measureArray = []
|
||||
# valueArray = []
|
||||
#
|
||||
# print("MaxFeature Experiment")
|
||||
# for x in range(0, 10):
|
||||
# for minImpurityDecrease in availableMinImpurityDecrease:
|
||||
# print("Step", x, "for MinImpurityDecrease:", minImpurityDecrease)
|
||||
# decisionTree = DecisionTreeClassifier(min_impurity_decrease=minImpurityDecrease)
|
||||
# decisionTree.fit(trainDTM, label_train)
|
||||
#
|
||||
# label_predicted = decisionTree.predict(testDTM)
|
||||
#
|
||||
# valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
# valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
#
|
||||
# for index in range(0, 2):
|
||||
# measureArray.append(availableMeasures[index])
|
||||
# methodArray.append(str(minImpurityDecrease*100) + "%")
|
||||
#
|
||||
# # Save the experiments
|
||||
# experimentDTMinImpurityDecreaseDF = DataFrame()
|
||||
# experimentDTMinImpurityDecreaseDF["Measure"] = measureArray
|
||||
# experimentDTMinImpurityDecreaseDF["Value"] = valueArray
|
||||
# experimentDTMinImpurityDecreaseDF["MinImpurityDecrease"] = methodArray
|
||||
#
|
||||
# experimentDTMinImpurityDecreaseDF.to_csv(workspace + "results/experimentDTMinImpurityDecrease.csv")
|
||||
|
||||
# DEFAULT DT VS OPTIMIZED DT EXPERIMENT
|
||||
|
||||
availableMeasures = ["Precision", "Recall", "Accuracy", "F1Score"]
|
||||
methodArray = []
|
||||
measureArray = []
|
||||
valueArray = []
|
||||
|
||||
print("MaxFeature Experiment")
|
||||
for x in range(0, 20):
|
||||
print("Step", x, "for Basic Decision Tree")
|
||||
decisionTree = DecisionTreeClassifier()
|
||||
decisionTree.fit(trainDTM, label_train)
|
||||
|
||||
label_predicted = decisionTree.predict(testDTM)
|
||||
|
||||
valueArray.append(metrics.precision_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.recall_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
|
||||
for measure in availableMeasures:
|
||||
measureArray.append(measure)
|
||||
methodArray.append("Basic Decision Tree")
|
||||
|
||||
print("Step", x, "for Custom Decision Tree")
|
||||
decisionTree = DecisionTreeClassifier(max_features=0.25, criterion="gini")
|
||||
decisionTree.fit(trainDTM, label_train)
|
||||
|
||||
label_predicted = decisionTree.predict(testDTM)
|
||||
|
||||
valueArray.append(metrics.precision_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.recall_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
|
||||
for measure in availableMeasures:
|
||||
measureArray.append(measure)
|
||||
methodArray.append("Custom Decision Tree")
|
||||
|
||||
# Save the experiments
|
||||
experimentDTBasicVsOptimizedDF = DataFrame()
|
||||
experimentDTBasicVsOptimizedDF["Measure"] = measureArray
|
||||
experimentDTBasicVsOptimizedDF["Value"] = valueArray
|
||||
experimentDTBasicVsOptimizedDF["Tuning"] = methodArray
|
||||
|
||||
experimentDTBasicVsOptimizedDF.to_csv(workspace + "results/experimentDTBasicVsOptimized.csv")
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
from sklearn.model_selection import train_test_split
|
||||
|
||||
from sklearn.feature_extraction.text import CountVectorizer
|
||||
from sklearn import metrics
|
||||
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
from sklearn.linear_model import LogisticRegression
|
||||
from sklearn import metrics
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.naive_bayes import MultinomialNB
|
||||
|
||||
import pandas
|
||||
from pandas import DataFrame
|
||||
|
|
@ -13,8 +14,9 @@ import os
|
|||
|
||||
workspace = "/home/toshuumilia/Workspace/SML/" # Insert the working directory here.
|
||||
datasetPath = workspace + "data/sms.tsv" # Tells where is located the data
|
||||
experimentOnePath = workspace + "experiment/experimentOne.csv" # Location of the first experiment result
|
||||
|
||||
if not os.path.exists(workspace + "results/"):
|
||||
os.makedirs(workspace + "results/")
|
||||
|
||||
smsDF = pandas.read_table(datasetPath, header=None, names=["label", "message"])
|
||||
smsDF["label_numerical"] = smsDF.label.map({"ham": 0, "spam": 1})
|
||||
|
|
@ -26,8 +28,11 @@ methodArray = []
|
|||
measureArray = []
|
||||
valueArray = []
|
||||
|
||||
availableMeasures = ["Precision", "Recall", "Accuracy", "F1Score"]
|
||||
availableMethods = ["Decision Tree", "Logistic Regression", "Neural Network", "Naive Bayesian"]
|
||||
|
||||
# Simulate ten trees so we can have an average.
|
||||
for x in range(0, 15):
|
||||
for x in range(0, 10):
|
||||
# Create the datasets and the labels used for the ML.
|
||||
# TODO: Parameter to test: how to split the smsDataset into train and test.
|
||||
dataset_train, dataset_test, label_train, label_test = train_test_split(smsDataset, smsLabel, random_state=1)
|
||||
|
|
@ -40,98 +45,77 @@ for x in range(0, 15):
|
|||
# DECISION TREE
|
||||
# TODO: Explore which parameters could be used.
|
||||
# SEE: http://scikit-learn.org/stable/modules/generated/sklearn.tree.DecisionTreeClassifier.html
|
||||
decisionTree = DecisionTreeClassifier(criterion='gini', splitter='best', max_depth=None,
|
||||
min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0.0,
|
||||
max_features=None, random_state=None, max_leaf_nodes=None,
|
||||
min_impurity_decrease=0.0, min_impurity_split=None, class_weight=None,
|
||||
presort=False)
|
||||
decisionTree = DecisionTreeClassifier()
|
||||
decisionTree.fit(trainDTM, label_train)
|
||||
|
||||
label_predicted = decisionTree.predict(testDTM)
|
||||
|
||||
# SEE: https://en.wikipedia.org/wiki/Precision_and_recall
|
||||
valueArray.append(metrics.precision_score(label_test, label_predicted))
|
||||
measureArray.append("precision")
|
||||
methodArray.append("Decision Tree")
|
||||
|
||||
valueArray.append(metrics.recall_score(label_test, label_predicted))
|
||||
measureArray.append("recall")
|
||||
methodArray.append("Decision Tree")
|
||||
|
||||
valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
measureArray.append("accuracy")
|
||||
methodArray.append("Decision Tree")
|
||||
|
||||
valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
measureArray.append("f1score")
|
||||
methodArray.append("Decision Tree")
|
||||
|
||||
for index in range(0, 4):
|
||||
measureArray.append(availableMeasures[index])
|
||||
methodArray.append(availableMethods[0])
|
||||
|
||||
# LOGISTIC REGRESSION
|
||||
# TODO: Explore which parameters could be used.
|
||||
# SEE: http://scikit-learn.org/stable/modules/generated/sklearn.linear_model.LogisticRegression.html
|
||||
logisticRegression = LogisticRegression(penalty='l2', dual=False, tol=0.0001,
|
||||
C=1.0, fit_intercept=True, intercept_scaling=1,
|
||||
class_weight=None, random_state=None, solver='liblinear',
|
||||
max_iter=100, multi_class='ovr', verbose=0,
|
||||
warm_start=False, n_jobs=1)
|
||||
logisticRegression = LogisticRegression()
|
||||
logisticRegression.fit(trainDTM, label_train)
|
||||
|
||||
label_predicted = logisticRegression.predict(testDTM)
|
||||
|
||||
|
||||
valueArray.append(metrics.precision_score(label_test, label_predicted))
|
||||
measureArray.append("precision")
|
||||
methodArray.append("Logistic Regression")
|
||||
|
||||
valueArray.append(metrics.recall_score(label_test, label_predicted))
|
||||
measureArray.append("recall")
|
||||
methodArray.append("Logistic Regression")
|
||||
|
||||
valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
measureArray.append("accuracy")
|
||||
methodArray.append("Logistic Regression")
|
||||
|
||||
valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
measureArray.append("f1score")
|
||||
methodArray.append("Logistic Regression")
|
||||
|
||||
for index in range(0, 4):
|
||||
measureArray.append(availableMeasures[index])
|
||||
methodArray.append(availableMethods[1])
|
||||
|
||||
# NEURAL NETWORK
|
||||
# SEE: http://scikit-learn.org/stable/modules/generated/sklearn.neural_network.MLPClassifier.html
|
||||
neuralNetwork = MLPClassifier(hidden_layer_sizes=(5,), activation='relu', solver='adam',
|
||||
alpha=0.0001, batch_size='auto', learning_rate='constant',
|
||||
learning_rate_init=0.001, power_t=0.5, max_iter=200,
|
||||
shuffle=True, random_state=None, tol=0.0001,
|
||||
verbose=False, warm_start=False, momentum=0.9,
|
||||
nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1,
|
||||
beta_1=0.9, beta_2=0.999, epsilon=1e-08)
|
||||
neuralNetwork = MLPClassifier()
|
||||
|
||||
neuralNetwork.fit(trainDTM, label_train)
|
||||
|
||||
label_predicted = neuralNetwork.predict(testDTM)
|
||||
|
||||
valueArray.append(metrics.precision_score(label_test, label_predicted))
|
||||
measureArray.append("precision")
|
||||
methodArray.append("Neural Network")
|
||||
|
||||
valueArray.append(metrics.recall_score(label_test, label_predicted))
|
||||
measureArray.append("recall")
|
||||
methodArray.append("Neural Network")
|
||||
|
||||
valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
measureArray.append("accuracy")
|
||||
methodArray.append("Neural Network")
|
||||
|
||||
valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
measureArray.append("f1score")
|
||||
methodArray.append("Neural Network")
|
||||
|
||||
for index in range(0, 4):
|
||||
measureArray.append(availableMeasures[index])
|
||||
methodArray.append(availableMethods[2])
|
||||
|
||||
# NAIVE BAYESIAN
|
||||
# SEE: http://scikit-learn.org/stable/modules/generated/sklearn.naive_bayes.MultinomialNB.html
|
||||
naiveBayesian = MultinomialNB(alpha=1.0, fit_prior=True, class_prior=None)
|
||||
|
||||
naiveBayesian.fit(trainDTM, label_train)
|
||||
|
||||
label_predicted = naiveBayesian.predict(testDTM)
|
||||
|
||||
valueArray.append(metrics.precision_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.recall_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.accuracy_score(label_test, label_predicted))
|
||||
valueArray.append(metrics.f1_score(label_test, label_predicted))
|
||||
|
||||
for index in range(0, 4):
|
||||
measureArray.append(availableMeasures[index])
|
||||
methodArray.append(availableMethods[3])
|
||||
|
||||
print("Step", x, "done.")
|
||||
|
||||
experimentOneDF = DataFrame()
|
||||
experimentOneDF["measure"] = measureArray
|
||||
experimentOneDF["value"] = valueArray
|
||||
experimentOneDF["method"] = methodArray
|
||||
experimentBasicMethodsDF = DataFrame()
|
||||
experimentBasicMethodsDF["Measure"] = measureArray
|
||||
experimentBasicMethodsDF["Value"] = valueArray
|
||||
experimentBasicMethodsDF["Method"] = methodArray
|
||||
|
||||
if not os.path.exists(workspace + "results/"):
|
||||
os.makedirs(workspace + "results/")
|
||||
|
||||
experimentOneDF.to_csv(experimentOnePath)
|
||||
experimentBasicMethodsDF.to_csv(workspace + "results/experimentBasicMethods.csv")
|
||||
|
|
@ -5,16 +5,135 @@ import pandas
|
|||
|
||||
workspace = "/home/toshuumilia/Workspace/SML/" # Insert the working directory here.
|
||||
datasetPath = workspace + "data/sms.tsv" # Tells where is located the data
|
||||
experimentOnePath = workspace + "results/experimentOne.csv" # Location of the first experiment result
|
||||
globalFigsize = (15, 6) # Graphs parameters
|
||||
|
||||
experimentOneDF = pandas.read_csv(experimentOnePath)
|
||||
# Experiment location
|
||||
|
||||
# Graphs parameters
|
||||
globalFigsize = (12, 6)
|
||||
|
||||
|
||||
# Comparison Experiment #
|
||||
#
|
||||
# experimentOneDF = pandas.read_csv(experimentOnePath)
|
||||
#
|
||||
# seaborn.set_style("darkgrid")
|
||||
# pyplot.figure(figsize=globalFigsize)
|
||||
# seaborn.barplot(x="Value", y="Measure", hue="Method",
|
||||
# data=experimentOneDF)
|
||||
# pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
# pyplot.ylabel('Measure', fontsize=12)
|
||||
# pyplot.xlabel('Value', fontsize=12)
|
||||
# pyplot.xlim(0.5, 1)
|
||||
# pyplot.title('Performance comparison between four learning methods', fontsize=15)
|
||||
# pyplot.show()
|
||||
|
||||
|
||||
# Decision Tree #
|
||||
# Depth Experiment
|
||||
#
|
||||
# experimentDTDepthDF = pandas.read_csv(workspace + "results/experimentDTDepth.csv")
|
||||
#
|
||||
# seaborn.set_style("whitegrid")
|
||||
# pyplot.figure(figsize=globalFigsize)
|
||||
# seaborn.barplot(x="Value", y="Measure", hue="Depth",
|
||||
# data=experimentDTDepthDF)
|
||||
# pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
# pyplot.ylabel('Measure', fontsize=12)
|
||||
# pyplot.xlabel('Value', fontsize=12)
|
||||
# pyplot.xlim(0.5, 1)
|
||||
# pyplot.title('Performance comparison of a Decision Tree relative to a maximum depth', fontsize=15)
|
||||
# pyplot.show()
|
||||
|
||||
# Criterion Experiment
|
||||
#
|
||||
# experimentDTCriterionDF = pandas.read_csv(workspace + "results/experimentDTCriterion.csv")
|
||||
#
|
||||
# seaborn.set_style("whitegrid")
|
||||
# pyplot.figure(figsize=globalFigsize)
|
||||
# seaborn.barplot(x="Value", y="Measure", hue="Criterion",
|
||||
# data=experimentDTCriterionDF)
|
||||
# pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
# pyplot.ylabel('Measure', fontsize=12)
|
||||
# pyplot.xlabel('Value', fontsize=12)
|
||||
# pyplot.xlim(0.5, 1)
|
||||
# pyplot.title('Performance comparison of a Decision Tree relative to a splitting quality criterion', fontsize=15)
|
||||
# pyplot.show()
|
||||
|
||||
# MinSampleSplit Experiment
|
||||
#
|
||||
# experimentDTMinSampleSplitDF = pandas.read_csv(workspace + "results/experimentDTMinSampleSplit.csv")
|
||||
#
|
||||
# seaborn.set_style("whitegrid")
|
||||
# pyplot.figure(figsize=globalFigsize)
|
||||
# seaborn.barplot(x="Value", y="Measure", hue="MinSampleSplit",
|
||||
# data=experimentDTMinSampleSplitDF)
|
||||
# pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
# pyplot.ylabel('Measure', fontsize=12)
|
||||
# pyplot.xlabel('Value', fontsize=12)
|
||||
# pyplot.xlim(0.5, 1)
|
||||
# pyplot.title('Insert Title', fontsize=15)
|
||||
# pyplot.xticks(rotation='vertical')
|
||||
# pyplot.show()
|
||||
|
||||
# MaxFeature Experiment
|
||||
#
|
||||
# experimentDTMaxFeatureDF = pandas.read_csv(workspace + "results/experimentDTMaxFeature.csv")
|
||||
#
|
||||
# seaborn.set_style("whitegrid")
|
||||
# pyplot.figure(figsize=globalFigsize)
|
||||
# seaborn.barplot(x="Value", y="Measure", hue="MaxFeature",
|
||||
# data=experimentDTMaxFeatureDF)
|
||||
# pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
# pyplot.ylabel('Measure', fontsize=12)
|
||||
# pyplot.xlabel('Value', fontsize=12)
|
||||
# pyplot.xlim(0.5, 1)
|
||||
# pyplot.title('Insert Title', fontsize=15)
|
||||
# pyplot.xticks(rotation='vertical')
|
||||
# pyplot.show()
|
||||
|
||||
# MaxLeafNodes Experiment
|
||||
|
||||
# experimentDTMaxLeafNodesDF = pandas.read_csv(workspace + "results/experimentDTMaxLeafNodes.csv")
|
||||
#
|
||||
# seaborn.set_style("whitegrid")
|
||||
# pyplot.figure(figsize=globalFigsize)
|
||||
# seaborn.pointplot(y="Value", hue="Measure", x="MaxLeafNodes",
|
||||
# data=experimentDTMaxLeafNodesDF)
|
||||
# pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
# pyplot.ylabel('Measure', fontsize=12)
|
||||
# pyplot.xlabel('Value', fontsize=12)
|
||||
# pyplot.xlim(0.5, 1)
|
||||
# pyplot.title('Insert Title', fontsize=15)
|
||||
# pyplot.xticks(rotation='vertical')
|
||||
# pyplot.show()
|
||||
|
||||
# MinImpurityDecrease Experiment
|
||||
#
|
||||
# experimentDTMinImpurityDecreaseDF = pandas.read_csv(workspace + "results/experimentDTMinImpurityDecrease.csv")
|
||||
#
|
||||
# seaborn.set_style("whitegrid")
|
||||
# pyplot.figure(figsize=globalFigsize)
|
||||
# seaborn.pointplot(y="Value", hue="Measure", x="MinImpurityDecrease",
|
||||
# data=experimentDTMinImpurityDecreaseDF, palette="Greens_d")
|
||||
# pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
# pyplot.ylabel('Measure Value', fontsize=12)
|
||||
# pyplot.xlabel('Min Impurity Decrease', fontsize=12)
|
||||
# pyplot.title('', fontsize=15)
|
||||
# pyplot.xticks(rotation='vertical')
|
||||
# pyplot.show()
|
||||
|
||||
# BasicVsOptimized Experiment
|
||||
|
||||
experimentDTBasicVsOptimizedDF = pandas.read_csv(workspace + "results/experimentDTBasicVsOptimized.csv")
|
||||
|
||||
seaborn.set_style("whitegrid")
|
||||
pyplot.figure(figsize=globalFigsize)
|
||||
seaborn.barplot(x="measure", y="value", hue="method",
|
||||
data=experimentOneDF, palette="Blues_d")
|
||||
pyplot.ylabel('value', fontsize=12)
|
||||
pyplot.xlabel('measure', fontsize=12)
|
||||
seaborn.barplot(x="Value", y="Measure", hue="Tuning",
|
||||
data=experimentDTBasicVsOptimizedDF)
|
||||
pyplot.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
|
||||
pyplot.ylabel('Measure', fontsize=12)
|
||||
pyplot.xlabel('Value', fontsize=12)
|
||||
pyplot.xlim(0.5, 1)
|
||||
pyplot.title('Insert Title', fontsize=15)
|
||||
pyplot.xticks(rotation='vertical')
|
||||
pyplot.show()
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -0,0 +1,121 @@
|
|||
,Measure,Value,Method
|
||||
0,Precision,0.8333333333333334,Decision Tree
|
||||
1,Recall,0.8648648648648649,Decision Tree
|
||||
2,Accuracy,0.95908111988514,Decision Tree
|
||||
3,F1Score,0.8488063660477454,Decision Tree
|
||||
4,Precision,0.9941176470588236,Logistic Regression
|
||||
5,Recall,0.9135135135135135,Logistic Regression
|
||||
6,Accuracy,0.9877961234745154,Logistic Regression
|
||||
7,F1Score,0.9521126760563381,Logistic Regression
|
||||
8,Precision,1.0,Neural Network
|
||||
9,Recall,0.9297297297297298,Neural Network
|
||||
10,Accuracy,0.990667623833453,Neural Network
|
||||
11,F1Score,0.9635854341736695,Neural Network
|
||||
12,Precision,0.8631578947368421,Decision Tree
|
||||
13,Recall,0.8864864864864865,Decision Tree
|
||||
14,Accuracy,0.9662598707824839,Decision Tree
|
||||
15,F1Score,0.8746666666666667,Decision Tree
|
||||
16,Precision,0.9941176470588236,Logistic Regression
|
||||
17,Recall,0.9135135135135135,Logistic Regression
|
||||
18,Accuracy,0.9877961234745154,Logistic Regression
|
||||
19,F1Score,0.9521126760563381,Logistic Regression
|
||||
20,Precision,1.0,Neural Network
|
||||
21,Recall,0.9297297297297298,Neural Network
|
||||
22,Accuracy,0.990667623833453,Neural Network
|
||||
23,F1Score,0.9635854341736695,Neural Network
|
||||
24,Precision,0.8465608465608465,Decision Tree
|
||||
25,Recall,0.8648648648648649,Decision Tree
|
||||
26,Accuracy,0.9612347451543432,Decision Tree
|
||||
27,F1Score,0.8556149732620321,Decision Tree
|
||||
28,Precision,0.9941176470588236,Logistic Regression
|
||||
29,Recall,0.9135135135135135,Logistic Regression
|
||||
30,Accuracy,0.9877961234745154,Logistic Regression
|
||||
31,F1Score,0.9521126760563381,Logistic Regression
|
||||
32,Precision,1.0,Neural Network
|
||||
33,Recall,0.9351351351351351,Neural Network
|
||||
34,Accuracy,0.9913854989231874,Neural Network
|
||||
35,F1Score,0.9664804469273743,Neural Network
|
||||
36,Precision,0.8743169398907104,Decision Tree
|
||||
37,Recall,0.8648648648648649,Decision Tree
|
||||
38,Accuracy,0.9655419956927495,Decision Tree
|
||||
39,F1Score,0.8695652173913042,Decision Tree
|
||||
40,Precision,0.9941176470588236,Logistic Regression
|
||||
41,Recall,0.9135135135135135,Logistic Regression
|
||||
42,Accuracy,0.9877961234745154,Logistic Regression
|
||||
43,F1Score,0.9521126760563381,Logistic Regression
|
||||
44,Precision,1.0,Neural Network
|
||||
45,Recall,0.9297297297297298,Neural Network
|
||||
46,Accuracy,0.990667623833453,Neural Network
|
||||
47,F1Score,0.9635854341736695,Neural Network
|
||||
48,Precision,0.8578947368421053,Decision Tree
|
||||
49,Recall,0.8810810810810811,Decision Tree
|
||||
50,Accuracy,0.964824120603015,Decision Tree
|
||||
51,F1Score,0.8693333333333333,Decision Tree
|
||||
52,Precision,0.9941176470588236,Logistic Regression
|
||||
53,Recall,0.9135135135135135,Logistic Regression
|
||||
54,Accuracy,0.9877961234745154,Logistic Regression
|
||||
55,F1Score,0.9521126760563381,Logistic Regression
|
||||
56,Precision,0.9942857142857143,Neural Network
|
||||
57,Recall,0.9405405405405406,Neural Network
|
||||
58,Accuracy,0.9913854989231874,Neural Network
|
||||
59,F1Score,0.9666666666666667,Neural Network
|
||||
60,Precision,0.8624338624338624,Decision Tree
|
||||
61,Recall,0.8810810810810811,Decision Tree
|
||||
62,Accuracy,0.9655419956927495,Decision Tree
|
||||
63,F1Score,0.8716577540106951,Decision Tree
|
||||
64,Precision,0.9941176470588236,Logistic Regression
|
||||
65,Recall,0.9135135135135135,Logistic Regression
|
||||
66,Accuracy,0.9877961234745154,Logistic Regression
|
||||
67,F1Score,0.9521126760563381,Logistic Regression
|
||||
68,Precision,0.9942528735632183,Neural Network
|
||||
69,Recall,0.9351351351351351,Neural Network
|
||||
70,Accuracy,0.990667623833453,Neural Network
|
||||
71,F1Score,0.9637883008356545,Neural Network
|
||||
72,Precision,0.8695652173913043,Decision Tree
|
||||
73,Recall,0.8648648648648649,Decision Tree
|
||||
74,Accuracy,0.964824120603015,Decision Tree
|
||||
75,F1Score,0.8672086720867209,Decision Tree
|
||||
76,Precision,0.9941176470588236,Logistic Regression
|
||||
77,Recall,0.9135135135135135,Logistic Regression
|
||||
78,Accuracy,0.9877961234745154,Logistic Regression
|
||||
79,F1Score,0.9521126760563381,Logistic Regression
|
||||
80,Precision,1.0,Neural Network
|
||||
81,Recall,0.9297297297297298,Neural Network
|
||||
82,Accuracy,0.990667623833453,Neural Network
|
||||
83,F1Score,0.9635854341736695,Neural Network
|
||||
84,Precision,0.8797814207650273,Decision Tree
|
||||
85,Recall,0.8702702702702703,Decision Tree
|
||||
86,Accuracy,0.9669777458722182,Decision Tree
|
||||
87,F1Score,0.875,Decision Tree
|
||||
88,Precision,0.9941176470588236,Logistic Regression
|
||||
89,Recall,0.9135135135135135,Logistic Regression
|
||||
90,Accuracy,0.9877961234745154,Logistic Regression
|
||||
91,F1Score,0.9521126760563381,Logistic Regression
|
||||
92,Precision,0.9942528735632183,Neural Network
|
||||
93,Recall,0.9351351351351351,Neural Network
|
||||
94,Accuracy,0.990667623833453,Neural Network
|
||||
95,F1Score,0.9637883008356545,Neural Network
|
||||
96,Precision,0.8601036269430051,Decision Tree
|
||||
97,Recall,0.8972972972972973,Decision Tree
|
||||
98,Accuracy,0.9669777458722182,Decision Tree
|
||||
99,F1Score,0.8783068783068783,Decision Tree
|
||||
100,Precision,0.9941176470588236,Logistic Regression
|
||||
101,Recall,0.9135135135135135,Logistic Regression
|
||||
102,Accuracy,0.9877961234745154,Logistic Regression
|
||||
103,F1Score,0.9521126760563381,Logistic Regression
|
||||
104,Precision,0.9942196531791907,Neural Network
|
||||
105,Recall,0.9297297297297298,Neural Network
|
||||
106,Accuracy,0.9899497487437185,Neural Network
|
||||
107,F1Score,0.9608938547486033,Neural Network
|
||||
108,Precision,0.8609625668449198,Decision Tree
|
||||
109,Recall,0.8702702702702703,Decision Tree
|
||||
110,Accuracy,0.9641062455132807,Decision Tree
|
||||
111,F1Score,0.8655913978494624,Decision Tree
|
||||
112,Precision,0.9941176470588236,Logistic Regression
|
||||
113,Recall,0.9135135135135135,Logistic Regression
|
||||
114,Accuracy,0.9877961234745154,Logistic Regression
|
||||
115,F1Score,0.9521126760563381,Logistic Regression
|
||||
116,Precision,1.0,Neural Network
|
||||
117,Recall,0.9297297297297298,Neural Network
|
||||
118,Accuracy,0.990667623833453,Neural Network
|
||||
119,F1Score,0.9635854341736695,Neural Network
|
||||
|
|
|
@ -0,0 +1,161 @@
|
|||
,Measure,Value,Tuning
|
||||
0,Precision,0.8602150537634409,Basic Decision Tree
|
||||
1,Recall,0.8648648648648649,Basic Decision Tree
|
||||
2,Accuracy,0.9633883704235463,Basic Decision Tree
|
||||
3,F1Score,0.8625336927223719,Basic Decision Tree
|
||||
4,Precision,0.9010989010989011,Custom Decision Tree
|
||||
5,Recall,0.8864864864864865,Custom Decision Tree
|
||||
6,Accuracy,0.9720028715003589,Custom Decision Tree
|
||||
7,F1Score,0.8937329700272478,Custom Decision Tree
|
||||
8,Precision,0.8563829787234043,Basic Decision Tree
|
||||
9,Recall,0.8702702702702703,Basic Decision Tree
|
||||
10,Accuracy,0.9633883704235463,Basic Decision Tree
|
||||
11,F1Score,0.8632707774798928,Basic Decision Tree
|
||||
12,Precision,0.9585798816568047,Custom Decision Tree
|
||||
13,Recall,0.8756756756756757,Custom Decision Tree
|
||||
14,Accuracy,0.9784637473079684,Custom Decision Tree
|
||||
15,F1Score,0.9152542372881357,Custom Decision Tree
|
||||
16,Precision,0.8804347826086957,Basic Decision Tree
|
||||
17,Recall,0.8756756756756757,Basic Decision Tree
|
||||
18,Accuracy,0.9676956209619526,Basic Decision Tree
|
||||
19,F1Score,0.8780487804878049,Basic Decision Tree
|
||||
20,Precision,0.9209039548022598,Custom Decision Tree
|
||||
21,Recall,0.8810810810810811,Custom Decision Tree
|
||||
22,Accuracy,0.9741564967695621,Custom Decision Tree
|
||||
23,F1Score,0.9005524861878452,Custom Decision Tree
|
||||
24,Precision,0.8412698412698413,Basic Decision Tree
|
||||
25,Recall,0.8594594594594595,Basic Decision Tree
|
||||
26,Accuracy,0.9597989949748744,Basic Decision Tree
|
||||
27,F1Score,0.8502673796791443,Basic Decision Tree
|
||||
28,Precision,0.9349112426035503,Custom Decision Tree
|
||||
29,Recall,0.8540540540540541,Custom Decision Tree
|
||||
30,Accuracy,0.9727207465900933,Custom Decision Tree
|
||||
31,F1Score,0.8926553672316384,Custom Decision Tree
|
||||
32,Precision,0.8586956521739131,Basic Decision Tree
|
||||
33,Recall,0.8540540540540541,Basic Decision Tree
|
||||
34,Accuracy,0.9619526202440776,Basic Decision Tree
|
||||
35,F1Score,0.8563685636856369,Basic Decision Tree
|
||||
36,Precision,0.9081081081081082,Custom Decision Tree
|
||||
37,Recall,0.9081081081081082,Custom Decision Tree
|
||||
38,Accuracy,0.9755922469490309,Custom Decision Tree
|
||||
39,F1Score,0.9081081081081082,Custom Decision Tree
|
||||
40,Precision,0.8534031413612565,Basic Decision Tree
|
||||
41,Recall,0.8810810810810811,Basic Decision Tree
|
||||
42,Accuracy,0.9641062455132807,Basic Decision Tree
|
||||
43,F1Score,0.8670212765957447,Basic Decision Tree
|
||||
44,Precision,0.8695652173913043,Custom Decision Tree
|
||||
45,Recall,0.8648648648648649,Custom Decision Tree
|
||||
46,Accuracy,0.964824120603015,Custom Decision Tree
|
||||
47,F1Score,0.8672086720867209,Custom Decision Tree
|
||||
48,Precision,0.8709677419354839,Basic Decision Tree
|
||||
49,Recall,0.8756756756756757,Basic Decision Tree
|
||||
50,Accuracy,0.9662598707824839,Basic Decision Tree
|
||||
51,F1Score,0.8733153638814016,Basic Decision Tree
|
||||
52,Precision,0.8950276243093923,Custom Decision Tree
|
||||
53,Recall,0.8756756756756757,Custom Decision Tree
|
||||
54,Accuracy,0.9698492462311558,Custom Decision Tree
|
||||
55,F1Score,0.8852459016393444,Custom Decision Tree
|
||||
56,Precision,0.8541666666666666,Basic Decision Tree
|
||||
57,Recall,0.8864864864864865,Basic Decision Tree
|
||||
58,Accuracy,0.964824120603015,Basic Decision Tree
|
||||
59,F1Score,0.870026525198939,Basic Decision Tree
|
||||
60,Precision,0.9239766081871345,Custom Decision Tree
|
||||
61,Recall,0.8540540540540541,Custom Decision Tree
|
||||
62,Accuracy,0.9712849964106246,Custom Decision Tree
|
||||
63,F1Score,0.8876404494382022,Custom Decision Tree
|
||||
64,Precision,0.8518518518518519,Basic Decision Tree
|
||||
65,Recall,0.8702702702702703,Basic Decision Tree
|
||||
66,Accuracy,0.9626704953338119,Basic Decision Tree
|
||||
67,F1Score,0.8609625668449198,Basic Decision Tree
|
||||
68,Precision,0.9005524861878453,Custom Decision Tree
|
||||
69,Recall,0.8810810810810811,Custom Decision Tree
|
||||
70,Accuracy,0.9712849964106246,Custom Decision Tree
|
||||
71,F1Score,0.8907103825136612,Custom Decision Tree
|
||||
72,Precision,0.8602150537634409,Basic Decision Tree
|
||||
73,Recall,0.8648648648648649,Basic Decision Tree
|
||||
74,Accuracy,0.9633883704235463,Basic Decision Tree
|
||||
75,F1Score,0.8625336927223719,Basic Decision Tree
|
||||
76,Precision,0.8852459016393442,Custom Decision Tree
|
||||
77,Recall,0.8756756756756757,Custom Decision Tree
|
||||
78,Accuracy,0.968413496051687,Custom Decision Tree
|
||||
79,F1Score,0.8804347826086957,Custom Decision Tree
|
||||
80,Precision,0.8473684210526315,Basic Decision Tree
|
||||
81,Recall,0.8702702702702703,Basic Decision Tree
|
||||
82,Accuracy,0.9619526202440776,Basic Decision Tree
|
||||
83,F1Score,0.8586666666666666,Basic Decision Tree
|
||||
84,Precision,0.873015873015873,Custom Decision Tree
|
||||
85,Recall,0.8918918918918919,Custom Decision Tree
|
||||
86,Accuracy,0.968413496051687,Custom Decision Tree
|
||||
87,F1Score,0.8823529411764706,Custom Decision Tree
|
||||
88,Precision,0.8541666666666666,Basic Decision Tree
|
||||
89,Recall,0.8864864864864865,Basic Decision Tree
|
||||
90,Accuracy,0.964824120603015,Basic Decision Tree
|
||||
91,F1Score,0.870026525198939,Basic Decision Tree
|
||||
92,Precision,0.9204545454545454,Custom Decision Tree
|
||||
93,Recall,0.8756756756756757,Custom Decision Tree
|
||||
94,Accuracy,0.9734386216798278,Custom Decision Tree
|
||||
95,F1Score,0.8975069252077563,Custom Decision Tree
|
||||
96,Precision,0.8315789473684211,Basic Decision Tree
|
||||
97,Recall,0.8540540540540541,Basic Decision Tree
|
||||
98,Accuracy,0.9576453697056713,Basic Decision Tree
|
||||
99,F1Score,0.8426666666666667,Basic Decision Tree
|
||||
100,Precision,0.873015873015873,Custom Decision Tree
|
||||
101,Recall,0.8918918918918919,Custom Decision Tree
|
||||
102,Accuracy,0.968413496051687,Custom Decision Tree
|
||||
103,F1Score,0.8823529411764706,Custom Decision Tree
|
||||
104,Precision,0.8638743455497382,Basic Decision Tree
|
||||
105,Recall,0.8918918918918919,Basic Decision Tree
|
||||
106,Accuracy,0.9669777458722182,Basic Decision Tree
|
||||
107,F1Score,0.8776595744680851,Basic Decision Tree
|
||||
108,Precision,0.9044943820224719,Custom Decision Tree
|
||||
109,Recall,0.8702702702702703,Custom Decision Tree
|
||||
110,Accuracy,0.9705671213208902,Custom Decision Tree
|
||||
111,F1Score,0.8870523415977961,Custom Decision Tree
|
||||
112,Precision,0.8617021276595744,Basic Decision Tree
|
||||
113,Recall,0.8756756756756757,Basic Decision Tree
|
||||
114,Accuracy,0.964824120603015,Basic Decision Tree
|
||||
115,F1Score,0.8686327077747988,Basic Decision Tree
|
||||
116,Precision,0.8950276243093923,Custom Decision Tree
|
||||
117,Recall,0.8756756756756757,Custom Decision Tree
|
||||
118,Accuracy,0.9698492462311558,Custom Decision Tree
|
||||
119,F1Score,0.8852459016393444,Custom Decision Tree
|
||||
120,Precision,0.8549222797927462,Basic Decision Tree
|
||||
121,Recall,0.8918918918918919,Basic Decision Tree
|
||||
122,Accuracy,0.9655419956927495,Basic Decision Tree
|
||||
123,F1Score,0.8730158730158729,Basic Decision Tree
|
||||
124,Precision,0.9005524861878453,Custom Decision Tree
|
||||
125,Recall,0.8810810810810811,Custom Decision Tree
|
||||
126,Accuracy,0.9712849964106246,Custom Decision Tree
|
||||
127,F1Score,0.8907103825136612,Custom Decision Tree
|
||||
128,Precision,0.8617021276595744,Basic Decision Tree
|
||||
129,Recall,0.8756756756756757,Basic Decision Tree
|
||||
130,Accuracy,0.964824120603015,Basic Decision Tree
|
||||
131,F1Score,0.8686327077747988,Basic Decision Tree
|
||||
132,Precision,0.8978494623655914,Custom Decision Tree
|
||||
133,Recall,0.9027027027027027,Custom Decision Tree
|
||||
134,Accuracy,0.9734386216798278,Custom Decision Tree
|
||||
135,F1Score,0.9002695417789757,Custom Decision Tree
|
||||
136,Precision,0.8497409326424871,Basic Decision Tree
|
||||
137,Recall,0.8864864864864865,Basic Decision Tree
|
||||
138,Accuracy,0.9641062455132807,Basic Decision Tree
|
||||
139,F1Score,0.8677248677248677,Basic Decision Tree
|
||||
140,Precision,0.8776595744680851,Custom Decision Tree
|
||||
141,Recall,0.8918918918918919,Custom Decision Tree
|
||||
142,Accuracy,0.9691313711414213,Custom Decision Tree
|
||||
143,F1Score,0.8847184986595175,Custom Decision Tree
|
||||
144,Precision,0.8556149732620321,Basic Decision Tree
|
||||
145,Recall,0.8648648648648649,Basic Decision Tree
|
||||
146,Accuracy,0.9626704953338119,Basic Decision Tree
|
||||
147,F1Score,0.860215053763441,Basic Decision Tree
|
||||
148,Precision,0.8350515463917526,Custom Decision Tree
|
||||
149,Recall,0.8756756756756757,Custom Decision Tree
|
||||
150,Accuracy,0.9605168700646087,Custom Decision Tree
|
||||
151,F1Score,0.8548812664907652,Custom Decision Tree
|
||||
152,Precision,0.8663101604278075,Basic Decision Tree
|
||||
153,Recall,0.8756756756756757,Basic Decision Tree
|
||||
154,Accuracy,0.9655419956927495,Basic Decision Tree
|
||||
155,F1Score,0.8709677419354839,Basic Decision Tree
|
||||
156,Precision,0.9034090909090909,Custom Decision Tree
|
||||
157,Recall,0.8594594594594595,Custom Decision Tree
|
||||
158,Accuracy,0.9691313711414213,Custom Decision Tree
|
||||
159,F1Score,0.8808864265927977,Custom Decision Tree
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
,Measure,Value,Criterion
|
||||
0,Accuracy,0.9619526202440776,Criterion-gini
|
||||
1,F1Score,0.8579088471849866,Criterion-gini
|
||||
2,Accuracy,0.9698492462311558,Criterion-entropy
|
||||
3,F1Score,0.8870967741935484,Criterion-entropy
|
||||
4,Accuracy,0.9655419956927495,Criterion-gini
|
||||
5,F1Score,0.8709677419354839,Criterion-gini
|
||||
6,Accuracy,0.9691313711414213,Criterion-entropy
|
||||
7,F1Score,0.8853333333333333,Criterion-entropy
|
||||
8,Accuracy,0.9641062455132807,Criterion-gini
|
||||
9,F1Score,0.8655913978494624,Criterion-gini
|
||||
10,Accuracy,0.968413496051687,Criterion-entropy
|
||||
11,F1Score,0.8810810810810811,Criterion-entropy
|
||||
12,Accuracy,0.9698492462311558,Criterion-gini
|
||||
13,F1Score,0.8864864864864865,Criterion-gini
|
||||
14,Accuracy,0.9691313711414213,Criterion-entropy
|
||||
15,F1Score,0.8847184986595175,Criterion-entropy
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
,Measure,Value,Depth
|
||||
0,Accuracy,0.9612347451543432,Depth-None
|
||||
1,F1Score,0.8586387434554974,Depth-None
|
||||
2,Accuracy,0.9633883704235463,Depth-50
|
||||
3,F1Score,0.8632707774798928,Depth-50
|
||||
4,Accuracy,0.9597989949748744,Depth-25
|
||||
5,F1Score,0.8478260869565218,Depth-25
|
||||
6,Accuracy,0.9612347451543432,Depth-10
|
||||
7,F1Score,0.8524590163934427,Depth-10
|
||||
8,Accuracy,0.9361091170136396,Depth-5
|
||||
9,F1Score,0.7613941018766757,Depth-5
|
||||
10,Accuracy,0.9411342426417804,Depth-3
|
||||
11,F1Score,0.7630057803468209,Depth-3
|
||||
12,Accuracy,0.9662598707824839,Depth-None
|
||||
13,F1Score,0.8733153638814016,Depth-None
|
||||
14,Accuracy,0.9676956209619526,Depth-50
|
||||
15,F1Score,0.8780487804878049,Depth-50
|
||||
16,Accuracy,0.9655419956927495,Depth-25
|
||||
17,F1Score,0.8695652173913042,Depth-25
|
||||
18,Accuracy,0.9612347451543432,Depth-10
|
||||
19,F1Score,0.8524590163934427,Depth-10
|
||||
20,Accuracy,0.9361091170136396,Depth-5
|
||||
21,F1Score,0.7613941018766757,Depth-5
|
||||
22,Accuracy,0.9411342426417804,Depth-3
|
||||
23,F1Score,0.7630057803468209,Depth-3
|
||||
24,Accuracy,0.9633883704235463,Depth-None
|
||||
25,F1Score,0.8640000000000001,Depth-None
|
||||
26,Accuracy,0.9641062455132807,Depth-50
|
||||
27,F1Score,0.8648648648648649,Depth-50
|
||||
28,Accuracy,0.9662598707824839,Depth-25
|
||||
29,F1Score,0.8719346049046321,Depth-25
|
||||
30,Accuracy,0.9641062455132807,Depth-10
|
||||
31,F1Score,0.861878453038674,Depth-10
|
||||
32,Accuracy,0.9353912419239052,Depth-5
|
||||
33,F1Score,0.7593582887700535,Depth-5
|
||||
34,Accuracy,0.9404163675520459,Depth-3
|
||||
35,F1Score,0.7608069164265131,Depth-3
|
||||
36,Accuracy,0.968413496051687,Depth-None
|
||||
37,F1Score,0.8823529411764706,Depth-None
|
||||
38,Accuracy,0.9641062455132807,Depth-50
|
||||
39,F1Score,0.8655913978494624,Depth-50
|
||||
40,Accuracy,0.9633883704235463,Depth-25
|
||||
41,F1Score,0.8587257617728532,Depth-25
|
||||
42,Accuracy,0.9605168700646087,Depth-10
|
||||
43,F1Score,0.8501362397820164,Depth-10
|
||||
44,Accuracy,0.9346733668341709,Depth-5
|
||||
45,F1Score,0.7547169811320754,Depth-5
|
||||
46,Accuracy,0.9411342426417804,Depth-3
|
||||
47,F1Score,0.7630057803468209,Depth-3
|
||||
|
|
|
@ -0,0 +1,121 @@
|
|||
,Measure,Value,MaxFeature
|
||||
0,Accuracy,0.9612347451543432,MaxFeature-None
|
||||
1,F1Score,0.8571428571428572,MaxFeature-None
|
||||
2,Accuracy,0.9540559942569993,MaxFeature-sqrt
|
||||
3,F1Score,0.815028901734104,MaxFeature-sqrt
|
||||
4,Accuracy,0.9526202440775305,MaxFeature-log2
|
||||
5,F1Score,0.8092485549132947,MaxFeature-log2
|
||||
6,Accuracy,0.9741564967695621,MaxFeature-0.25
|
||||
7,F1Score,0.9027027027027028,MaxFeature-0.25
|
||||
8,Accuracy,0.9691313711414213,MaxFeature-0.5
|
||||
9,F1Score,0.8840970350404314,MaxFeature-0.5
|
||||
10,Accuracy,0.9734386216798278,MaxFeature-0.75
|
||||
11,F1Score,0.899728997289973,MaxFeature-0.75
|
||||
12,Accuracy,0.964824120603015,MaxFeature-None
|
||||
13,F1Score,0.8672086720867209,MaxFeature-None
|
||||
14,Accuracy,0.9576453697056713,MaxFeature-sqrt
|
||||
15,F1Score,0.8374655647382921,MaxFeature-sqrt
|
||||
16,Accuracy,0.9526202440775305,MaxFeature-log2
|
||||
17,F1Score,0.8081395348837209,MaxFeature-log2
|
||||
18,Accuracy,0.9734386216798278,MaxFeature-0.25
|
||||
19,F1Score,0.9008042895442359,MaxFeature-0.25
|
||||
20,Accuracy,0.9727207465900933,MaxFeature-0.5
|
||||
21,F1Score,0.8961748633879781,MaxFeature-0.5
|
||||
22,Accuracy,0.9669777458722182,MaxFeature-0.75
|
||||
23,F1Score,0.875,MaxFeature-0.75
|
||||
24,Accuracy,0.9619526202440776,MaxFeature-None
|
||||
25,F1Score,0.8571428571428572,MaxFeature-None
|
||||
26,Accuracy,0.9748743718592965,MaxFeature-sqrt
|
||||
27,F1Score,0.9046321525885558,MaxFeature-sqrt
|
||||
28,Accuracy,0.9511844938980617,MaxFeature-log2
|
||||
29,F1Score,0.8045977011494253,MaxFeature-log2
|
||||
30,Accuracy,0.9691313711414213,MaxFeature-0.25
|
||||
31,F1Score,0.8847184986595175,MaxFeature-0.25
|
||||
32,Accuracy,0.9705671213208902,MaxFeature-0.5
|
||||
33,F1Score,0.8906666666666666,MaxFeature-0.5
|
||||
34,Accuracy,0.9662598707824839,MaxFeature-0.75
|
||||
35,F1Score,0.872628726287263,MaxFeature-0.75
|
||||
36,Accuracy,0.9662598707824839,MaxFeature-None
|
||||
37,F1Score,0.873994638069705,MaxFeature-None
|
||||
38,Accuracy,0.95908111988514,MaxFeature-sqrt
|
||||
39,F1Score,0.8438356164383561,MaxFeature-sqrt
|
||||
40,Accuracy,0.9490308686288585,MaxFeature-log2
|
||||
41,F1Score,0.8011204481792717,MaxFeature-log2
|
||||
42,Accuracy,0.9755922469490309,MaxFeature-0.25
|
||||
43,F1Score,0.9050279329608939,MaxFeature-0.25
|
||||
44,Accuracy,0.9712849964106246,MaxFeature-0.5
|
||||
45,F1Score,0.8901098901098903,MaxFeature-0.5
|
||||
46,Accuracy,0.9705671213208902,MaxFeature-0.75
|
||||
47,F1Score,0.888888888888889,MaxFeature-0.75
|
||||
48,Accuracy,0.9619526202440776,MaxFeature-None
|
||||
49,F1Score,0.8579088471849866,MaxFeature-None
|
||||
50,Accuracy,0.9576453697056713,MaxFeature-sqrt
|
||||
51,F1Score,0.8319088319088319,MaxFeature-sqrt
|
||||
52,Accuracy,0.9432878679109835,MaxFeature-log2
|
||||
53,F1Score,0.774928774928775,MaxFeature-log2
|
||||
54,Accuracy,0.9619526202440776,MaxFeature-0.25
|
||||
55,F1Score,0.8594164456233423,MaxFeature-0.25
|
||||
56,Accuracy,0.9626704953338119,MaxFeature-0.5
|
||||
57,F1Score,0.8624338624338624,MaxFeature-0.5
|
||||
58,Accuracy,0.9641062455132807,MaxFeature-0.75
|
||||
59,F1Score,0.8641304347826086,MaxFeature-0.75
|
||||
60,Accuracy,0.9612347451543432,MaxFeature-None
|
||||
61,F1Score,0.8571428571428572,MaxFeature-None
|
||||
62,Accuracy,0.9547738693467337,MaxFeature-sqrt
|
||||
63,F1Score,0.8205128205128205,MaxFeature-sqrt
|
||||
64,Accuracy,0.9511844938980617,MaxFeature-log2
|
||||
65,F1Score,0.8121546961325967,MaxFeature-log2
|
||||
66,Accuracy,0.9755922469490309,MaxFeature-0.25
|
||||
67,F1Score,0.9065934065934066,MaxFeature-0.25
|
||||
68,Accuracy,0.9705671213208902,MaxFeature-0.5
|
||||
69,F1Score,0.8894878706199462,MaxFeature-0.5
|
||||
70,Accuracy,0.968413496051687,MaxFeature-0.75
|
||||
71,F1Score,0.8848167539267016,MaxFeature-0.75
|
||||
72,Accuracy,0.9633883704235463,MaxFeature-None
|
||||
73,F1Score,0.8625336927223719,MaxFeature-None
|
||||
74,Accuracy,0.9626704953338119,MaxFeature-sqrt
|
||||
75,F1Score,0.8539325842696629,MaxFeature-sqrt
|
||||
76,Accuracy,0.9425699928212491,MaxFeature-log2
|
||||
77,F1Score,0.7660818713450294,MaxFeature-log2
|
||||
78,Accuracy,0.9705671213208902,MaxFeature-0.25
|
||||
79,F1Score,0.8894878706199462,MaxFeature-0.25
|
||||
80,Accuracy,0.9770279971284996,MaxFeature-0.5
|
||||
81,F1Score,0.9148936170212766,MaxFeature-0.5
|
||||
82,Accuracy,0.9633883704235463,MaxFeature-0.75
|
||||
83,F1Score,0.8640000000000001,MaxFeature-0.75
|
||||
84,Accuracy,0.9662598707824839,MaxFeature-None
|
||||
85,F1Score,0.8733153638814016,MaxFeature-None
|
||||
86,Accuracy,0.95908111988514,MaxFeature-sqrt
|
||||
87,F1Score,0.841225626740947,MaxFeature-sqrt
|
||||
88,Accuracy,0.9440057430007178,MaxFeature-log2
|
||||
89,F1Score,0.7758620689655172,MaxFeature-log2
|
||||
90,Accuracy,0.9763101220387652,MaxFeature-0.25
|
||||
91,F1Score,0.9095890410958904,MaxFeature-0.25
|
||||
92,Accuracy,0.9712849964106246,MaxFeature-0.5
|
||||
93,F1Score,0.8895027624309393,MaxFeature-0.5
|
||||
94,Accuracy,0.9626704953338119,MaxFeature-0.75
|
||||
95,F1Score,0.8594594594594595,MaxFeature-0.75
|
||||
96,Accuracy,0.9619526202440776,MaxFeature-None
|
||||
97,F1Score,0.8579088471849866,MaxFeature-None
|
||||
98,Accuracy,0.9626704953338119,MaxFeature-sqrt
|
||||
99,F1Score,0.8452380952380952,MaxFeature-sqrt
|
||||
100,Accuracy,0.9547738693467337,MaxFeature-log2
|
||||
101,F1Score,0.8130563798219584,MaxFeature-log2
|
||||
102,Accuracy,0.9705671213208902,MaxFeature-0.25
|
||||
103,F1Score,0.8876712328767123,MaxFeature-0.25
|
||||
104,Accuracy,0.9777458722182341,MaxFeature-0.5
|
||||
105,F1Score,0.9155313351498637,MaxFeature-0.5
|
||||
106,Accuracy,0.9655419956927495,MaxFeature-0.75
|
||||
107,F1Score,0.8702702702702703,MaxFeature-0.75
|
||||
108,Accuracy,0.9655419956927495,MaxFeature-None
|
||||
109,F1Score,0.8702702702702703,MaxFeature-None
|
||||
110,Accuracy,0.9490308686288585,MaxFeature-sqrt
|
||||
111,F1Score,0.8116710875331564,MaxFeature-sqrt
|
||||
112,Accuracy,0.9526202440775305,MaxFeature-log2
|
||||
113,F1Score,0.8070175438596491,MaxFeature-log2
|
||||
114,Accuracy,0.9669777458722182,MaxFeature-0.25
|
||||
115,F1Score,0.8776595744680851,MaxFeature-0.25
|
||||
116,Accuracy,0.9734386216798278,MaxFeature-0.5
|
||||
117,F1Score,0.8969359331476323,MaxFeature-0.5
|
||||
118,Accuracy,0.9748743718592965,MaxFeature-0.75
|
||||
119,F1Score,0.9030470914127423,MaxFeature-0.75
|
||||
|
|
|
@ -0,0 +1,271 @@
|
|||
,Measure,Value,MaxLeafNodes
|
||||
0,Accuracy,0.8786791098348887,2
|
||||
1,F1Score,0.48318042813455664,2
|
||||
2,Accuracy,0.9404163675520459,10
|
||||
3,F1Score,0.7844155844155843,10
|
||||
4,Accuracy,0.95908111988514,20
|
||||
5,F1Score,0.841225626740947,20
|
||||
6,Accuracy,0.9626704953338119,30
|
||||
7,F1Score,0.8547486033519553,30
|
||||
8,Accuracy,0.968413496051687,40
|
||||
9,F1Score,0.8784530386740332,40
|
||||
10,Accuracy,0.9669777458722182,50
|
||||
11,F1Score,0.87292817679558,50
|
||||
12,Accuracy,0.9626704953338119,60
|
||||
13,F1Score,0.8609625668449198,60
|
||||
14,Accuracy,0.9655419956927495,70
|
||||
15,F1Score,0.8688524590163934,70
|
||||
16,Accuracy,0.964824120603015,80
|
||||
17,F1Score,0.8672086720867209,80
|
||||
18,Accuracy,0.9626704953338119,90
|
||||
19,F1Score,0.8631578947368422,90
|
||||
20,Accuracy,0.9641062455132807,100
|
||||
21,F1Score,0.8648648648648649,100
|
||||
22,Accuracy,0.9612347451543432,110
|
||||
23,F1Score,0.8556149732620321,110
|
||||
24,Accuracy,0.9641062455132807,120
|
||||
25,F1Score,0.8670212765957447,120
|
||||
26,Accuracy,0.9655419956927495,130
|
||||
27,F1Score,0.8723404255319149,130
|
||||
28,Accuracy,0.9655419956927495,140
|
||||
29,F1Score,0.8702702702702703,140
|
||||
30,Accuracy,0.9633883704235463,150
|
||||
31,F1Score,0.8647214854111406,150
|
||||
32,Accuracy,0.9605168700646087,160
|
||||
33,F1Score,0.8517520215633423,160
|
||||
34,Accuracy,0.9612347451543432,170
|
||||
35,F1Score,0.8571428571428572,170
|
||||
36,Accuracy,0.9641062455132807,180
|
||||
37,F1Score,0.8648648648648649,180
|
||||
38,Accuracy,0.9576453697056713,190
|
||||
39,F1Score,0.8426666666666667,190
|
||||
40,Accuracy,0.9662598707824839,200
|
||||
41,F1Score,0.872628726287263,200
|
||||
42,Accuracy,0.9612347451543432,210
|
||||
43,F1Score,0.8578947368421053,210
|
||||
44,Accuracy,0.9669777458722182,220
|
||||
45,F1Score,0.8763440860215054,220
|
||||
46,Accuracy,0.9619526202440776,230
|
||||
47,F1Score,0.8601583113456465,230
|
||||
48,Accuracy,0.9669777458722182,240
|
||||
49,F1Score,0.8756756756756757,240
|
||||
50,Accuracy,0.964824120603015,250
|
||||
51,F1Score,0.8679245283018868,250
|
||||
52,Accuracy,0.9641062455132807,260
|
||||
53,F1Score,0.8670212765957447,260
|
||||
54,Accuracy,0.8786791098348887,2
|
||||
55,F1Score,0.48318042813455664,2
|
||||
56,Accuracy,0.9382627422828428,10
|
||||
57,F1Score,0.7783505154639175,10
|
||||
58,Accuracy,0.95908111988514,20
|
||||
59,F1Score,0.8463611859838276,20
|
||||
60,Accuracy,0.9633883704235463,30
|
||||
61,F1Score,0.8571428571428571,30
|
||||
62,Accuracy,0.9698492462311558,40
|
||||
63,F1Score,0.8820224719101123,40
|
||||
64,Accuracy,0.968413496051687,50
|
||||
65,F1Score,0.8791208791208792,50
|
||||
66,Accuracy,0.9626704953338119,60
|
||||
67,F1Score,0.8586956521739131,60
|
||||
68,Accuracy,0.964824120603015,70
|
||||
69,F1Score,0.8693333333333333,70
|
||||
70,Accuracy,0.964824120603015,80
|
||||
71,F1Score,0.8679245283018868,80
|
||||
72,Accuracy,0.9655419956927495,90
|
||||
73,F1Score,0.8702702702702703,90
|
||||
74,Accuracy,0.9612347451543432,100
|
||||
75,F1Score,0.8556149732620321,100
|
||||
76,Accuracy,0.9612347451543432,110
|
||||
77,F1Score,0.8532608695652173,110
|
||||
78,Accuracy,0.9612347451543432,120
|
||||
79,F1Score,0.8586387434554974,120
|
||||
80,Accuracy,0.9676956209619526,130
|
||||
81,F1Score,0.88,130
|
||||
82,Accuracy,0.9655419956927495,140
|
||||
83,F1Score,0.8709677419354839,140
|
||||
84,Accuracy,0.9626704953338119,150
|
||||
85,F1Score,0.8624338624338624,150
|
||||
86,Accuracy,0.968413496051687,160
|
||||
87,F1Score,0.8810810810810811,160
|
||||
88,Accuracy,0.9641062455132807,170
|
||||
89,F1Score,0.8655913978494624,170
|
||||
90,Accuracy,0.9641062455132807,180
|
||||
91,F1Score,0.868421052631579,180
|
||||
92,Accuracy,0.9597989949748744,190
|
||||
93,F1Score,0.8526315789473685,190
|
||||
94,Accuracy,0.9605168700646087,200
|
||||
95,F1Score,0.8533333333333334,200
|
||||
96,Accuracy,0.9676956209619526,210
|
||||
97,F1Score,0.8787061994609164,210
|
||||
98,Accuracy,0.9619526202440776,220
|
||||
99,F1Score,0.8579088471849866,220
|
||||
100,Accuracy,0.9641062455132807,230
|
||||
101,F1Score,0.8663101604278075,230
|
||||
102,Accuracy,0.964824120603015,240
|
||||
103,F1Score,0.8664850136239781,240
|
||||
104,Accuracy,0.9641062455132807,250
|
||||
105,F1Score,0.8677248677248677,250
|
||||
106,Accuracy,0.9662598707824839,260
|
||||
107,F1Score,0.873994638069705,260
|
||||
108,Accuracy,0.8786791098348887,2
|
||||
109,F1Score,0.48318042813455664,2
|
||||
110,Accuracy,0.9382627422828428,10
|
||||
111,F1Score,0.7783505154639175,10
|
||||
112,Accuracy,0.9597989949748744,20
|
||||
113,F1Score,0.849462365591398,20
|
||||
114,Accuracy,0.964824120603015,30
|
||||
115,F1Score,0.8635097493036211,30
|
||||
116,Accuracy,0.9662598707824839,40
|
||||
117,F1Score,0.8705234159779616,40
|
||||
118,Accuracy,0.9691313711414213,50
|
||||
119,F1Score,0.8802228412256268,50
|
||||
120,Accuracy,0.9655419956927495,60
|
||||
121,F1Score,0.8709677419354839,60
|
||||
122,Accuracy,0.9633883704235463,70
|
||||
123,F1Score,0.8647214854111406,70
|
||||
124,Accuracy,0.9569274946159368,80
|
||||
125,F1Score,0.8412698412698413,80
|
||||
126,Accuracy,0.9633883704235463,90
|
||||
127,F1Score,0.8640000000000001,90
|
||||
128,Accuracy,0.9619526202440776,100
|
||||
129,F1Score,0.8601583113456465,100
|
||||
130,Accuracy,0.9576453697056713,110
|
||||
131,F1Score,0.8426666666666667,110
|
||||
132,Accuracy,0.9612347451543432,120
|
||||
133,F1Score,0.8571428571428572,120
|
||||
134,Accuracy,0.9633883704235463,130
|
||||
135,F1Score,0.8640000000000001,130
|
||||
136,Accuracy,0.9669777458722182,140
|
||||
137,F1Score,0.8756756756756757,140
|
||||
138,Accuracy,0.9612347451543432,150
|
||||
139,F1Score,0.8548387096774193,150
|
||||
140,Accuracy,0.9612347451543432,160
|
||||
141,F1Score,0.8524590163934427,160
|
||||
142,Accuracy,0.9619526202440776,170
|
||||
143,F1Score,0.8594164456233423,170
|
||||
144,Accuracy,0.9633883704235463,180
|
||||
145,F1Score,0.861788617886179,180
|
||||
146,Accuracy,0.9669777458722182,190
|
||||
147,F1Score,0.8756756756756757,190
|
||||
148,Accuracy,0.95908111988514,200
|
||||
149,F1Score,0.8488063660477454,200
|
||||
150,Accuracy,0.9626704953338119,210
|
||||
151,F1Score,0.860215053763441,210
|
||||
152,Accuracy,0.9655419956927495,220
|
||||
153,F1Score,0.8709677419354839,220
|
||||
154,Accuracy,0.9633883704235463,230
|
||||
155,F1Score,0.8632707774798928,230
|
||||
156,Accuracy,0.9655419956927495,240
|
||||
157,F1Score,0.8723404255319149,240
|
||||
158,Accuracy,0.968413496051687,250
|
||||
159,F1Score,0.8810810810810811,250
|
||||
160,Accuracy,0.964824120603015,260
|
||||
161,F1Score,0.8672086720867209,260
|
||||
162,Accuracy,0.8786791098348887,2
|
||||
163,F1Score,0.48318042813455664,2
|
||||
164,Accuracy,0.9404163675520459,10
|
||||
165,F1Score,0.7844155844155843,10
|
||||
166,Accuracy,0.9569274946159368,20
|
||||
167,F1Score,0.8351648351648352,20
|
||||
168,Accuracy,0.9612347451543432,30
|
||||
169,F1Score,0.848314606741573,30
|
||||
170,Accuracy,0.9676956209619526,40
|
||||
171,F1Score,0.8760330578512396,40
|
||||
172,Accuracy,0.9669777458722182,50
|
||||
173,F1Score,0.87292817679558,50
|
||||
174,Accuracy,0.9655419956927495,60
|
||||
175,F1Score,0.8709677419354839,60
|
||||
176,Accuracy,0.9662598707824839,70
|
||||
177,F1Score,0.8733153638814016,70
|
||||
178,Accuracy,0.9626704953338119,80
|
||||
179,F1Score,0.8617021276595744,80
|
||||
180,Accuracy,0.9662598707824839,90
|
||||
181,F1Score,0.8746666666666667,90
|
||||
182,Accuracy,0.9655419956927495,100
|
||||
183,F1Score,0.8709677419354839,100
|
||||
184,Accuracy,0.9626704953338119,110
|
||||
185,F1Score,0.8617021276595744,110
|
||||
186,Accuracy,0.9641062455132807,120
|
||||
187,F1Score,0.8670212765957447,120
|
||||
188,Accuracy,0.964824120603015,130
|
||||
189,F1Score,0.8679245283018868,130
|
||||
190,Accuracy,0.9641062455132807,140
|
||||
191,F1Score,0.8655913978494624,140
|
||||
192,Accuracy,0.9655419956927495,150
|
||||
193,F1Score,0.8716577540106951,150
|
||||
194,Accuracy,0.9669777458722182,160
|
||||
195,F1Score,0.8756756756756757,160
|
||||
196,Accuracy,0.9597989949748744,170
|
||||
197,F1Score,0.8478260869565218,170
|
||||
198,Accuracy,0.9605168700646087,180
|
||||
199,F1Score,0.8556430446194225,180
|
||||
200,Accuracy,0.9669777458722182,190
|
||||
201,F1Score,0.8770053475935828,190
|
||||
202,Accuracy,0.964824120603015,200
|
||||
203,F1Score,0.8686327077747988,200
|
||||
204,Accuracy,0.9655419956927495,210
|
||||
205,F1Score,0.8709677419354839,210
|
||||
206,Accuracy,0.9641062455132807,220
|
||||
207,F1Score,0.8677248677248677,220
|
||||
208,Accuracy,0.9691313711414213,230
|
||||
209,F1Score,0.8840970350404314,230
|
||||
210,Accuracy,0.9612347451543432,240
|
||||
211,F1Score,0.8578947368421053,240
|
||||
212,Accuracy,0.9655419956927495,250
|
||||
213,F1Score,0.8709677419354839,250
|
||||
214,Accuracy,0.9633883704235463,260
|
||||
215,F1Score,0.8647214854111406,260
|
||||
216,Accuracy,0.8786791098348887,2
|
||||
217,F1Score,0.48318042813455664,2
|
||||
218,Accuracy,0.9382627422828428,10
|
||||
219,F1Score,0.7783505154639175,10
|
||||
220,Accuracy,0.9597989949748744,20
|
||||
221,F1Score,0.8435754189944135,20
|
||||
222,Accuracy,0.9626704953338119,30
|
||||
223,F1Score,0.8547486033519553,30
|
||||
224,Accuracy,0.9655419956927495,40
|
||||
225,F1Score,0.8666666666666667,40
|
||||
226,Accuracy,0.9705671213208902,50
|
||||
227,F1Score,0.8864265927977839,50
|
||||
228,Accuracy,0.9633883704235463,60
|
||||
229,F1Score,0.8610354223433242,60
|
||||
230,Accuracy,0.964824120603015,70
|
||||
231,F1Score,0.8672086720867209,70
|
||||
232,Accuracy,0.9633883704235463,80
|
||||
233,F1Score,0.8625336927223719,80
|
||||
234,Accuracy,0.9669777458722182,90
|
||||
235,F1Score,0.8770053475935828,90
|
||||
236,Accuracy,0.9676956209619526,100
|
||||
237,F1Score,0.8787061994609164,100
|
||||
238,Accuracy,0.9655419956927495,110
|
||||
239,F1Score,0.8716577540106951,110
|
||||
240,Accuracy,0.9619526202440776,120
|
||||
241,F1Score,0.8601583113456465,120
|
||||
242,Accuracy,0.9662598707824839,130
|
||||
243,F1Score,0.8733153638814016,130
|
||||
244,Accuracy,0.9698492462311558,140
|
||||
245,F1Score,0.8864864864864865,140
|
||||
246,Accuracy,0.9669777458722182,150
|
||||
247,F1Score,0.8776595744680851,150
|
||||
248,Accuracy,0.9641062455132807,160
|
||||
249,F1Score,0.8626373626373626,160
|
||||
250,Accuracy,0.9655419956927495,170
|
||||
251,F1Score,0.8736842105263158,170
|
||||
252,Accuracy,0.9641062455132807,180
|
||||
253,F1Score,0.8677248677248677,180
|
||||
254,Accuracy,0.9669777458722182,190
|
||||
255,F1Score,0.8776595744680851,190
|
||||
256,Accuracy,0.9662598707824839,200
|
||||
257,F1Score,0.8753315649867375,200
|
||||
258,Accuracy,0.9626704953338119,210
|
||||
259,F1Score,0.8638743455497383,210
|
||||
260,Accuracy,0.964824120603015,220
|
||||
261,F1Score,0.8693333333333333,220
|
||||
262,Accuracy,0.9662598707824839,230
|
||||
263,F1Score,0.8746666666666667,230
|
||||
264,Accuracy,0.9641062455132807,240
|
||||
265,F1Score,0.8648648648648649,240
|
||||
266,Accuracy,0.9669777458722182,250
|
||||
267,F1Score,0.8756756756756757,250
|
||||
268,Accuracy,0.9669777458722182,260
|
||||
269,F1Score,0.8763440860215054,260
|
||||
|
|
|
@ -0,0 +1,261 @@
|
|||
,Measure,Value,MinImpurityDecrease
|
||||
0,Accuracy,0.9583632447954056,0.0%
|
||||
1,F1Score,0.8473684210526314,0.0%
|
||||
2,Accuracy,0.9310839913854989,0.5%
|
||||
3,F1Score,0.7587939698492463,0.5%
|
||||
4,Accuracy,0.9339554917444365,1.0%
|
||||
5,F1Score,0.7444444444444445,1.0%
|
||||
6,Accuracy,0.9167264895908112,1.5%
|
||||
7,F1Score,0.6979166666666666,1.5%
|
||||
8,Accuracy,0.908829863603733,2.0%
|
||||
9,F1Score,0.6576819407008087,2.0%
|
||||
10,Accuracy,0.908829863603733,2.5%
|
||||
11,F1Score,0.6576819407008087,2.5%
|
||||
12,Accuracy,0.908829863603733,3.0%
|
||||
13,F1Score,0.6576819407008087,3.0%
|
||||
14,Accuracy,0.908829863603733,3.5%
|
||||
15,F1Score,0.6576819407008087,3.5%
|
||||
16,Accuracy,0.8786791098348887,4.0%
|
||||
17,F1Score,0.48318042813455664,4.0%
|
||||
18,Accuracy,0.8786791098348887,4.5%
|
||||
19,F1Score,0.48318042813455664,4.5%
|
||||
20,Accuracy,0.8671931083991385,5.0%
|
||||
21,F1Score,0.0,5.0%
|
||||
22,Accuracy,0.8671931083991385,5.5%
|
||||
23,F1Score,0.0,5.5%
|
||||
24,Accuracy,0.8671931083991385,6.0%
|
||||
25,F1Score,0.0,6.0%
|
||||
26,Accuracy,0.9633883704235463,0.0%
|
||||
27,F1Score,0.8654353562005278,0.0%
|
||||
28,Accuracy,0.9332376166547021,0.5%
|
||||
29,F1Score,0.7645569620253164,0.5%
|
||||
30,Accuracy,0.9339554917444365,1.0%
|
||||
31,F1Score,0.7444444444444445,1.0%
|
||||
32,Accuracy,0.9167264895908112,1.5%
|
||||
33,F1Score,0.6979166666666666,1.5%
|
||||
34,Accuracy,0.908829863603733,2.0%
|
||||
35,F1Score,0.6576819407008087,2.0%
|
||||
36,Accuracy,0.908829863603733,2.5%
|
||||
37,F1Score,0.6576819407008087,2.5%
|
||||
38,Accuracy,0.908829863603733,3.0%
|
||||
39,F1Score,0.6576819407008087,3.0%
|
||||
40,Accuracy,0.908829863603733,3.5%
|
||||
41,F1Score,0.6576819407008087,3.5%
|
||||
42,Accuracy,0.8786791098348887,4.0%
|
||||
43,F1Score,0.48318042813455664,4.0%
|
||||
44,Accuracy,0.8786791098348887,4.5%
|
||||
45,F1Score,0.48318042813455664,4.5%
|
||||
46,Accuracy,0.8671931083991385,5.0%
|
||||
47,F1Score,0.0,5.0%
|
||||
48,Accuracy,0.8671931083991385,5.5%
|
||||
49,F1Score,0.0,5.5%
|
||||
50,Accuracy,0.8671931083991385,6.0%
|
||||
51,F1Score,0.0,6.0%
|
||||
52,Accuracy,0.964824120603015,0.0%
|
||||
53,F1Score,0.8693333333333333,0.0%
|
||||
54,Accuracy,0.9332376166547021,0.5%
|
||||
55,F1Score,0.7645569620253164,0.5%
|
||||
56,Accuracy,0.9339554917444365,1.0%
|
||||
57,F1Score,0.7444444444444445,1.0%
|
||||
58,Accuracy,0.9167264895908112,1.5%
|
||||
59,F1Score,0.6979166666666666,1.5%
|
||||
60,Accuracy,0.908829863603733,2.0%
|
||||
61,F1Score,0.6576819407008087,2.0%
|
||||
62,Accuracy,0.908829863603733,2.5%
|
||||
63,F1Score,0.6576819407008087,2.5%
|
||||
64,Accuracy,0.908829863603733,3.0%
|
||||
65,F1Score,0.6576819407008087,3.0%
|
||||
66,Accuracy,0.908829863603733,3.5%
|
||||
67,F1Score,0.6576819407008087,3.5%
|
||||
68,Accuracy,0.8786791098348887,4.0%
|
||||
69,F1Score,0.48318042813455664,4.0%
|
||||
70,Accuracy,0.8786791098348887,4.5%
|
||||
71,F1Score,0.48318042813455664,4.5%
|
||||
72,Accuracy,0.8671931083991385,5.0%
|
||||
73,F1Score,0.0,5.0%
|
||||
74,Accuracy,0.8671931083991385,5.5%
|
||||
75,F1Score,0.0,5.5%
|
||||
76,Accuracy,0.8671931083991385,6.0%
|
||||
77,F1Score,0.0,6.0%
|
||||
78,Accuracy,0.9705671213208902,0.0%
|
||||
79,F1Score,0.888888888888889,0.0%
|
||||
80,Accuracy,0.9310839913854989,0.5%
|
||||
81,F1Score,0.7587939698492463,0.5%
|
||||
82,Accuracy,0.9339554917444365,1.0%
|
||||
83,F1Score,0.7444444444444445,1.0%
|
||||
84,Accuracy,0.9167264895908112,1.5%
|
||||
85,F1Score,0.6979166666666666,1.5%
|
||||
86,Accuracy,0.908829863603733,2.0%
|
||||
87,F1Score,0.6576819407008087,2.0%
|
||||
88,Accuracy,0.908829863603733,2.5%
|
||||
89,F1Score,0.6576819407008087,2.5%
|
||||
90,Accuracy,0.908829863603733,3.0%
|
||||
91,F1Score,0.6576819407008087,3.0%
|
||||
92,Accuracy,0.908829863603733,3.5%
|
||||
93,F1Score,0.6576819407008087,3.5%
|
||||
94,Accuracy,0.8786791098348887,4.0%
|
||||
95,F1Score,0.48318042813455664,4.0%
|
||||
96,Accuracy,0.8786791098348887,4.5%
|
||||
97,F1Score,0.48318042813455664,4.5%
|
||||
98,Accuracy,0.8671931083991385,5.0%
|
||||
99,F1Score,0.0,5.0%
|
||||
100,Accuracy,0.8671931083991385,5.5%
|
||||
101,F1Score,0.0,5.5%
|
||||
102,Accuracy,0.8671931083991385,6.0%
|
||||
103,F1Score,0.0,6.0%
|
||||
104,Accuracy,0.964824120603015,0.0%
|
||||
105,F1Score,0.870026525198939,0.0%
|
||||
106,Accuracy,0.9310839913854989,0.5%
|
||||
107,F1Score,0.7587939698492463,0.5%
|
||||
108,Accuracy,0.9339554917444365,1.0%
|
||||
109,F1Score,0.7444444444444445,1.0%
|
||||
110,Accuracy,0.9167264895908112,1.5%
|
||||
111,F1Score,0.6979166666666666,1.5%
|
||||
112,Accuracy,0.908829863603733,2.0%
|
||||
113,F1Score,0.6576819407008087,2.0%
|
||||
114,Accuracy,0.908829863603733,2.5%
|
||||
115,F1Score,0.6576819407008087,2.5%
|
||||
116,Accuracy,0.908829863603733,3.0%
|
||||
117,F1Score,0.6576819407008087,3.0%
|
||||
118,Accuracy,0.908829863603733,3.5%
|
||||
119,F1Score,0.6576819407008087,3.5%
|
||||
120,Accuracy,0.8786791098348887,4.0%
|
||||
121,F1Score,0.48318042813455664,4.0%
|
||||
122,Accuracy,0.8786791098348887,4.5%
|
||||
123,F1Score,0.48318042813455664,4.5%
|
||||
124,Accuracy,0.8671931083991385,5.0%
|
||||
125,F1Score,0.0,5.0%
|
||||
126,Accuracy,0.8671931083991385,5.5%
|
||||
127,F1Score,0.0,5.5%
|
||||
128,Accuracy,0.8671931083991385,6.0%
|
||||
129,F1Score,0.0,6.0%
|
||||
130,Accuracy,0.9633883704235463,0.0%
|
||||
131,F1Score,0.8661417322834645,0.0%
|
||||
132,Accuracy,0.9310839913854989,0.5%
|
||||
133,F1Score,0.7587939698492463,0.5%
|
||||
134,Accuracy,0.9339554917444365,1.0%
|
||||
135,F1Score,0.7444444444444445,1.0%
|
||||
136,Accuracy,0.9167264895908112,1.5%
|
||||
137,F1Score,0.6979166666666666,1.5%
|
||||
138,Accuracy,0.908829863603733,2.0%
|
||||
139,F1Score,0.6576819407008087,2.0%
|
||||
140,Accuracy,0.908829863603733,2.5%
|
||||
141,F1Score,0.6576819407008087,2.5%
|
||||
142,Accuracy,0.908829863603733,3.0%
|
||||
143,F1Score,0.6576819407008087,3.0%
|
||||
144,Accuracy,0.908829863603733,3.5%
|
||||
145,F1Score,0.6576819407008087,3.5%
|
||||
146,Accuracy,0.8786791098348887,4.0%
|
||||
147,F1Score,0.48318042813455664,4.0%
|
||||
148,Accuracy,0.8786791098348887,4.5%
|
||||
149,F1Score,0.48318042813455664,4.5%
|
||||
150,Accuracy,0.8671931083991385,5.0%
|
||||
151,F1Score,0.0,5.0%
|
||||
152,Accuracy,0.8671931083991385,5.5%
|
||||
153,F1Score,0.0,5.5%
|
||||
154,Accuracy,0.8671931083991385,6.0%
|
||||
155,F1Score,0.0,6.0%
|
||||
156,Accuracy,0.9626704953338119,0.0%
|
||||
157,F1Score,0.8609625668449198,0.0%
|
||||
158,Accuracy,0.9332376166547021,0.5%
|
||||
159,F1Score,0.7645569620253164,0.5%
|
||||
160,Accuracy,0.9339554917444365,1.0%
|
||||
161,F1Score,0.7444444444444445,1.0%
|
||||
162,Accuracy,0.9167264895908112,1.5%
|
||||
163,F1Score,0.6979166666666666,1.5%
|
||||
164,Accuracy,0.908829863603733,2.0%
|
||||
165,F1Score,0.6576819407008087,2.0%
|
||||
166,Accuracy,0.908829863603733,2.5%
|
||||
167,F1Score,0.6576819407008087,2.5%
|
||||
168,Accuracy,0.908829863603733,3.0%
|
||||
169,F1Score,0.6576819407008087,3.0%
|
||||
170,Accuracy,0.908829863603733,3.5%
|
||||
171,F1Score,0.6576819407008087,3.5%
|
||||
172,Accuracy,0.8786791098348887,4.0%
|
||||
173,F1Score,0.48318042813455664,4.0%
|
||||
174,Accuracy,0.8786791098348887,4.5%
|
||||
175,F1Score,0.48318042813455664,4.5%
|
||||
176,Accuracy,0.8671931083991385,5.0%
|
||||
177,F1Score,0.0,5.0%
|
||||
178,Accuracy,0.8671931083991385,5.5%
|
||||
179,F1Score,0.0,5.5%
|
||||
180,Accuracy,0.8671931083991385,6.0%
|
||||
181,F1Score,0.0,6.0%
|
||||
182,Accuracy,0.9633883704235463,0.0%
|
||||
183,F1Score,0.8625336927223719,0.0%
|
||||
184,Accuracy,0.9310839913854989,0.5%
|
||||
185,F1Score,0.7587939698492463,0.5%
|
||||
186,Accuracy,0.9339554917444365,1.0%
|
||||
187,F1Score,0.7444444444444445,1.0%
|
||||
188,Accuracy,0.9167264895908112,1.5%
|
||||
189,F1Score,0.6979166666666666,1.5%
|
||||
190,Accuracy,0.908829863603733,2.0%
|
||||
191,F1Score,0.6576819407008087,2.0%
|
||||
192,Accuracy,0.908829863603733,2.5%
|
||||
193,F1Score,0.6576819407008087,2.5%
|
||||
194,Accuracy,0.908829863603733,3.0%
|
||||
195,F1Score,0.6576819407008087,3.0%
|
||||
196,Accuracy,0.908829863603733,3.5%
|
||||
197,F1Score,0.6576819407008087,3.5%
|
||||
198,Accuracy,0.8786791098348887,4.0%
|
||||
199,F1Score,0.48318042813455664,4.0%
|
||||
200,Accuracy,0.8786791098348887,4.5%
|
||||
201,F1Score,0.48318042813455664,4.5%
|
||||
202,Accuracy,0.8671931083991385,5.0%
|
||||
203,F1Score,0.0,5.0%
|
||||
204,Accuracy,0.8671931083991385,5.5%
|
||||
205,F1Score,0.0,5.5%
|
||||
206,Accuracy,0.8671931083991385,6.0%
|
||||
207,F1Score,0.0,6.0%
|
||||
208,Accuracy,0.9626704953338119,0.0%
|
||||
209,F1Score,0.8609625668449198,0.0%
|
||||
210,Accuracy,0.9310839913854989,0.5%
|
||||
211,F1Score,0.7587939698492463,0.5%
|
||||
212,Accuracy,0.9339554917444365,1.0%
|
||||
213,F1Score,0.7444444444444445,1.0%
|
||||
214,Accuracy,0.9167264895908112,1.5%
|
||||
215,F1Score,0.6979166666666666,1.5%
|
||||
216,Accuracy,0.908829863603733,2.0%
|
||||
217,F1Score,0.6576819407008087,2.0%
|
||||
218,Accuracy,0.908829863603733,2.5%
|
||||
219,F1Score,0.6576819407008087,2.5%
|
||||
220,Accuracy,0.908829863603733,3.0%
|
||||
221,F1Score,0.6576819407008087,3.0%
|
||||
222,Accuracy,0.908829863603733,3.5%
|
||||
223,F1Score,0.6576819407008087,3.5%
|
||||
224,Accuracy,0.8786791098348887,4.0%
|
||||
225,F1Score,0.48318042813455664,4.0%
|
||||
226,Accuracy,0.8786791098348887,4.5%
|
||||
227,F1Score,0.48318042813455664,4.5%
|
||||
228,Accuracy,0.8671931083991385,5.0%
|
||||
229,F1Score,0.0,5.0%
|
||||
230,Accuracy,0.8671931083991385,5.5%
|
||||
231,F1Score,0.0,5.5%
|
||||
232,Accuracy,0.8671931083991385,6.0%
|
||||
233,F1Score,0.0,6.0%
|
||||
234,Accuracy,0.95908111988514,0.0%
|
||||
235,F1Score,0.8463611859838276,0.0%
|
||||
236,Accuracy,0.9332376166547021,0.5%
|
||||
237,F1Score,0.7645569620253164,0.5%
|
||||
238,Accuracy,0.9339554917444365,1.0%
|
||||
239,F1Score,0.7444444444444445,1.0%
|
||||
240,Accuracy,0.9167264895908112,1.5%
|
||||
241,F1Score,0.6979166666666666,1.5%
|
||||
242,Accuracy,0.908829863603733,2.0%
|
||||
243,F1Score,0.6576819407008087,2.0%
|
||||
244,Accuracy,0.908829863603733,2.5%
|
||||
245,F1Score,0.6576819407008087,2.5%
|
||||
246,Accuracy,0.908829863603733,3.0%
|
||||
247,F1Score,0.6576819407008087,3.0%
|
||||
248,Accuracy,0.908829863603733,3.5%
|
||||
249,F1Score,0.6576819407008087,3.5%
|
||||
250,Accuracy,0.8786791098348887,4.0%
|
||||
251,F1Score,0.48318042813455664,4.0%
|
||||
252,Accuracy,0.8786791098348887,4.5%
|
||||
253,F1Score,0.48318042813455664,4.5%
|
||||
254,Accuracy,0.8671931083991385,5.0%
|
||||
255,F1Score,0.0,5.0%
|
||||
256,Accuracy,0.8671931083991385,5.5%
|
||||
257,F1Score,0.0,5.5%
|
||||
258,Accuracy,0.8671931083991385,6.0%
|
||||
259,F1Score,0.0,6.0%
|
||||
|
|
|
@ -0,0 +1,241 @@
|
|||
,Measure,Value,MinSampleSplit
|
||||
0,Accuracy,0.9633883704235463,MinSampleSplit-2
|
||||
1,F1Score,0.8625336927223719,MinSampleSplit-2
|
||||
2,Accuracy,0.9669777458722182,MinSampleSplit-10
|
||||
3,F1Score,0.8763440860215054,MinSampleSplit-10
|
||||
4,Accuracy,0.9655419956927495,MinSampleSplit-25
|
||||
5,F1Score,0.8688524590163934,MinSampleSplit-25
|
||||
6,Accuracy,0.9669777458722182,MinSampleSplit-50
|
||||
7,F1Score,0.8722222222222223,MinSampleSplit-50
|
||||
8,Accuracy,0.9519023689877961,MinSampleSplit-100
|
||||
9,F1Score,0.8295165394402035,MinSampleSplit-100
|
||||
10,Accuracy,0.9483129935391242,MinSampleSplit-250
|
||||
11,F1Score,0.8181818181818182,MinSampleSplit-250
|
||||
12,Accuracy,0.9655419956927495,MinSampleSplit-2
|
||||
13,F1Score,0.8723404255319149,MinSampleSplit-2
|
||||
14,Accuracy,0.9641062455132807,MinSampleSplit-10
|
||||
15,F1Score,0.8677248677248677,MinSampleSplit-10
|
||||
16,Accuracy,0.9655419956927495,MinSampleSplit-25
|
||||
17,F1Score,0.8666666666666667,MinSampleSplit-25
|
||||
18,Accuracy,0.964824120603015,MinSampleSplit-50
|
||||
19,F1Score,0.8664850136239781,MinSampleSplit-50
|
||||
20,Accuracy,0.9454414931801867,MinSampleSplit-100
|
||||
21,F1Score,0.8080808080808082,MinSampleSplit-100
|
||||
22,Accuracy,0.9504666188083274,MinSampleSplit-250
|
||||
23,F1Score,0.8244274809160306,MinSampleSplit-250
|
||||
24,Accuracy,0.95908111988514,MinSampleSplit-2
|
||||
25,F1Score,0.8503937007874017,MinSampleSplit-2
|
||||
26,Accuracy,0.9676956209619526,MinSampleSplit-10
|
||||
27,F1Score,0.8787061994609164,MinSampleSplit-10
|
||||
28,Accuracy,0.9641062455132807,MinSampleSplit-25
|
||||
29,F1Score,0.8641304347826086,MinSampleSplit-25
|
||||
30,Accuracy,0.9698492462311558,MinSampleSplit-50
|
||||
31,F1Score,0.8833333333333334,MinSampleSplit-50
|
||||
32,Accuracy,0.9447236180904522,MinSampleSplit-100
|
||||
33,F1Score,0.8050632911392405,MinSampleSplit-100
|
||||
34,Accuracy,0.9468772433596554,MinSampleSplit-250
|
||||
35,F1Score,0.815,MinSampleSplit-250
|
||||
36,Accuracy,0.964824120603015,MinSampleSplit-2
|
||||
37,F1Score,0.8693333333333333,MinSampleSplit-2
|
||||
38,Accuracy,0.9669777458722182,MinSampleSplit-10
|
||||
39,F1Score,0.8756756756756757,MinSampleSplit-10
|
||||
40,Accuracy,0.9641062455132807,MinSampleSplit-25
|
||||
41,F1Score,0.8641304347826086,MinSampleSplit-25
|
||||
42,Accuracy,0.9633883704235463,MinSampleSplit-50
|
||||
43,F1Score,0.859504132231405,MinSampleSplit-50
|
||||
44,Accuracy,0.9504666188083274,MinSampleSplit-100
|
||||
45,F1Score,0.823529411764706,MinSampleSplit-100
|
||||
46,Accuracy,0.9504666188083274,MinSampleSplit-250
|
||||
47,F1Score,0.8253164556962025,MinSampleSplit-250
|
||||
48,Accuracy,0.9641062455132807,MinSampleSplit-2
|
||||
49,F1Score,0.8648648648648649,MinSampleSplit-2
|
||||
50,Accuracy,0.9676956209619526,MinSampleSplit-10
|
||||
51,F1Score,0.8787061994609164,MinSampleSplit-10
|
||||
52,Accuracy,0.9662598707824839,MinSampleSplit-25
|
||||
53,F1Score,0.8705234159779616,MinSampleSplit-25
|
||||
54,Accuracy,0.9662598707824839,MinSampleSplit-50
|
||||
55,F1Score,0.8698060941828255,MinSampleSplit-50
|
||||
56,Accuracy,0.9504666188083274,MinSampleSplit-100
|
||||
57,F1Score,0.823529411764706,MinSampleSplit-100
|
||||
58,Accuracy,0.949748743718593,MinSampleSplit-250
|
||||
59,F1Score,0.8258706467661691,MinSampleSplit-250
|
||||
60,Accuracy,0.9641062455132807,MinSampleSplit-2
|
||||
61,F1Score,0.8655913978494624,MinSampleSplit-2
|
||||
62,Accuracy,0.9676956209619526,MinSampleSplit-10
|
||||
63,F1Score,0.88,MinSampleSplit-10
|
||||
64,Accuracy,0.9619526202440776,MinSampleSplit-25
|
||||
65,F1Score,0.8547945205479452,MinSampleSplit-25
|
||||
66,Accuracy,0.9655419956927495,MinSampleSplit-50
|
||||
67,F1Score,0.8702702702702703,MinSampleSplit-50
|
||||
68,Accuracy,0.9475951184493898,MinSampleSplit-100
|
||||
69,F1Score,0.8170426065162908,MinSampleSplit-100
|
||||
70,Accuracy,0.9526202440775305,MinSampleSplit-250
|
||||
71,F1Score,0.8341708542713568,MinSampleSplit-250
|
||||
72,Accuracy,0.9641062455132807,MinSampleSplit-2
|
||||
73,F1Score,0.8663101604278075,MinSampleSplit-2
|
||||
74,Accuracy,0.9633883704235463,MinSampleSplit-10
|
||||
75,F1Score,0.8640000000000001,MinSampleSplit-10
|
||||
76,Accuracy,0.9655419956927495,MinSampleSplit-25
|
||||
77,F1Score,0.8681318681318682,MinSampleSplit-25
|
||||
78,Accuracy,0.9691313711414213,MinSampleSplit-50
|
||||
79,F1Score,0.8808864265927977,MinSampleSplit-50
|
||||
80,Accuracy,0.9504666188083274,MinSampleSplit-100
|
||||
81,F1Score,0.8244274809160306,MinSampleSplit-100
|
||||
82,Accuracy,0.9511844938980617,MinSampleSplit-250
|
||||
83,F1Score,0.8282828282828284,MinSampleSplit-250
|
||||
84,Accuracy,0.9669777458722182,MinSampleSplit-2
|
||||
85,F1Score,0.8783068783068783,MinSampleSplit-2
|
||||
86,Accuracy,0.964824120603015,MinSampleSplit-10
|
||||
87,F1Score,0.8679245283018868,MinSampleSplit-10
|
||||
88,Accuracy,0.9641062455132807,MinSampleSplit-25
|
||||
89,F1Score,0.861878453038674,MinSampleSplit-25
|
||||
90,Accuracy,0.9626704953338119,MinSampleSplit-50
|
||||
91,F1Score,0.8579234972677595,MinSampleSplit-50
|
||||
92,Accuracy,0.9468772433596554,MinSampleSplit-100
|
||||
93,F1Score,0.809278350515464,MinSampleSplit-100
|
||||
94,Accuracy,0.9504666188083274,MinSampleSplit-250
|
||||
95,F1Score,0.8261964735516373,MinSampleSplit-250
|
||||
96,Accuracy,0.9662598707824839,MinSampleSplit-2
|
||||
97,F1Score,0.873994638069705,MinSampleSplit-2
|
||||
98,Accuracy,0.9597989949748744,MinSampleSplit-10
|
||||
99,F1Score,0.851851851851852,MinSampleSplit-10
|
||||
100,Accuracy,0.964824120603015,MinSampleSplit-25
|
||||
101,F1Score,0.8664850136239781,MinSampleSplit-25
|
||||
102,Accuracy,0.968413496051687,MinSampleSplit-50
|
||||
103,F1Score,0.8777777777777779,MinSampleSplit-50
|
||||
104,Accuracy,0.9490308686288585,MinSampleSplit-100
|
||||
105,F1Score,0.8174807197943444,MinSampleSplit-100
|
||||
106,Accuracy,0.9526202440775305,MinSampleSplit-250
|
||||
107,F1Score,0.8333333333333334,MinSampleSplit-250
|
||||
108,Accuracy,0.9612347451543432,MinSampleSplit-2
|
||||
109,F1Score,0.8563829787234043,MinSampleSplit-2
|
||||
110,Accuracy,0.9633883704235463,MinSampleSplit-10
|
||||
111,F1Score,0.8610354223433242,MinSampleSplit-10
|
||||
112,Accuracy,0.9612347451543432,MinSampleSplit-25
|
||||
113,F1Score,0.8532608695652173,MinSampleSplit-25
|
||||
114,Accuracy,0.9698492462311558,MinSampleSplit-50
|
||||
115,F1Score,0.8833333333333334,MinSampleSplit-50
|
||||
116,Accuracy,0.9475951184493898,MinSampleSplit-100
|
||||
117,F1Score,0.8132992327365729,MinSampleSplit-100
|
||||
118,Accuracy,0.9511844938980617,MinSampleSplit-250
|
||||
119,F1Score,0.8274111675126904,MinSampleSplit-250
|
||||
120,Accuracy,0.9626704953338119,MinSampleSplit-2
|
||||
121,F1Score,0.8624338624338624,MinSampleSplit-2
|
||||
122,Accuracy,0.9626704953338119,MinSampleSplit-10
|
||||
123,F1Score,0.860215053763441,MinSampleSplit-10
|
||||
124,Accuracy,0.9619526202440776,MinSampleSplit-25
|
||||
125,F1Score,0.8531855955678671,MinSampleSplit-25
|
||||
126,Accuracy,0.9669777458722182,MinSampleSplit-50
|
||||
127,F1Score,0.87292817679558,MinSampleSplit-50
|
||||
128,Accuracy,0.9490308686288585,MinSampleSplit-100
|
||||
129,F1Score,0.8193384223918574,MinSampleSplit-100
|
||||
130,Accuracy,0.9504666188083274,MinSampleSplit-250
|
||||
131,F1Score,0.8270676691729324,MinSampleSplit-250
|
||||
132,Accuracy,0.9619526202440776,MinSampleSplit-2
|
||||
133,F1Score,0.8616187989556137,MinSampleSplit-2
|
||||
134,Accuracy,0.9662598707824839,MinSampleSplit-10
|
||||
135,F1Score,0.8733153638814016,MinSampleSplit-10
|
||||
136,Accuracy,0.9662598707824839,MinSampleSplit-25
|
||||
137,F1Score,0.8712328767123287,MinSampleSplit-25
|
||||
138,Accuracy,0.9705671213208902,MinSampleSplit-50
|
||||
139,F1Score,0.8857938718662953,MinSampleSplit-50
|
||||
140,Accuracy,0.9504666188083274,MinSampleSplit-100
|
||||
141,F1Score,0.8244274809160306,MinSampleSplit-100
|
||||
142,Accuracy,0.9519023689877961,MinSampleSplit-250
|
||||
143,F1Score,0.830379746835443,MinSampleSplit-250
|
||||
144,Accuracy,0.9655419956927495,MinSampleSplit-2
|
||||
145,F1Score,0.8716577540106951,MinSampleSplit-2
|
||||
146,Accuracy,0.9655419956927495,MinSampleSplit-10
|
||||
147,F1Score,0.8688524590163934,MinSampleSplit-10
|
||||
148,Accuracy,0.9641062455132807,MinSampleSplit-25
|
||||
149,F1Score,0.8633879781420766,MinSampleSplit-25
|
||||
150,Accuracy,0.9655419956927495,MinSampleSplit-50
|
||||
151,F1Score,0.8688524590163934,MinSampleSplit-50
|
||||
152,Accuracy,0.9490308686288585,MinSampleSplit-100
|
||||
153,F1Score,0.8174807197943444,MinSampleSplit-100
|
||||
154,Accuracy,0.9461593682699211,MinSampleSplit-250
|
||||
155,F1Score,0.8129675810473816,MinSampleSplit-250
|
||||
156,Accuracy,0.9626704953338119,MinSampleSplit-2
|
||||
157,F1Score,0.860215053763441,MinSampleSplit-2
|
||||
158,Accuracy,0.9619526202440776,MinSampleSplit-10
|
||||
159,F1Score,0.8586666666666666,MinSampleSplit-10
|
||||
160,Accuracy,0.9619526202440776,MinSampleSplit-25
|
||||
161,F1Score,0.8579088471849866,MinSampleSplit-25
|
||||
162,Accuracy,0.9662598707824839,MinSampleSplit-50
|
||||
163,F1Score,0.8690807799442897,MinSampleSplit-50
|
||||
164,Accuracy,0.9483129935391242,MinSampleSplit-100
|
||||
165,F1Score,0.8144329896907216,MinSampleSplit-100
|
||||
166,Accuracy,0.9526202440775305,MinSampleSplit-250
|
||||
167,F1Score,0.8333333333333334,MinSampleSplit-250
|
||||
168,Accuracy,0.9641062455132807,MinSampleSplit-2
|
||||
169,F1Score,0.8633879781420766,MinSampleSplit-2
|
||||
170,Accuracy,0.9619526202440776,MinSampleSplit-10
|
||||
171,F1Score,0.8555858310626703,MinSampleSplit-10
|
||||
172,Accuracy,0.9619526202440776,MinSampleSplit-25
|
||||
173,F1Score,0.8531855955678671,MinSampleSplit-25
|
||||
174,Accuracy,0.9662598707824839,MinSampleSplit-50
|
||||
175,F1Score,0.872628726287263,MinSampleSplit-50
|
||||
176,Accuracy,0.949748743718593,MinSampleSplit-100
|
||||
177,F1Score,0.8205128205128206,MinSampleSplit-100
|
||||
178,Accuracy,0.949748743718593,MinSampleSplit-250
|
||||
179,F1Score,0.8232323232323233,MinSampleSplit-250
|
||||
180,Accuracy,0.9655419956927495,MinSampleSplit-2
|
||||
181,F1Score,0.8709677419354839,MinSampleSplit-2
|
||||
182,Accuracy,0.9655419956927495,MinSampleSplit-10
|
||||
183,F1Score,0.8695652173913042,MinSampleSplit-10
|
||||
184,Accuracy,0.9655419956927495,MinSampleSplit-25
|
||||
185,F1Score,0.8666666666666667,MinSampleSplit-25
|
||||
186,Accuracy,0.964824120603015,MinSampleSplit-50
|
||||
187,F1Score,0.8657534246575342,MinSampleSplit-50
|
||||
188,Accuracy,0.9490308686288585,MinSampleSplit-100
|
||||
189,F1Score,0.8184143222506394,MinSampleSplit-100
|
||||
190,Accuracy,0.9454414931801867,MinSampleSplit-250
|
||||
191,F1Score,0.8109452736318408,MinSampleSplit-250
|
||||
192,Accuracy,0.9676956209619526,MinSampleSplit-2
|
||||
193,F1Score,0.8780487804878049,MinSampleSplit-2
|
||||
194,Accuracy,0.968413496051687,MinSampleSplit-10
|
||||
195,F1Score,0.8804347826086957,MinSampleSplit-10
|
||||
196,Accuracy,0.9655419956927495,MinSampleSplit-25
|
||||
197,F1Score,0.8681318681318682,MinSampleSplit-25
|
||||
198,Accuracy,0.968413496051687,MinSampleSplit-50
|
||||
199,F1Score,0.8784530386740332,MinSampleSplit-50
|
||||
200,Accuracy,0.9475951184493898,MinSampleSplit-100
|
||||
201,F1Score,0.8123393316195374,MinSampleSplit-100
|
||||
202,Accuracy,0.9447236180904522,MinSampleSplit-250
|
||||
203,F1Score,0.8098765432098766,MinSampleSplit-250
|
||||
204,Accuracy,0.9619526202440776,MinSampleSplit-2
|
||||
205,F1Score,0.8586666666666666,MinSampleSplit-2
|
||||
206,Accuracy,0.9655419956927495,MinSampleSplit-10
|
||||
207,F1Score,0.8702702702702703,MinSampleSplit-10
|
||||
208,Accuracy,0.9612347451543432,MinSampleSplit-25
|
||||
209,F1Score,0.8524590163934427,MinSampleSplit-25
|
||||
210,Accuracy,0.9676956209619526,MinSampleSplit-50
|
||||
211,F1Score,0.8746518105849582,MinSampleSplit-50
|
||||
212,Accuracy,0.9504666188083274,MinSampleSplit-100
|
||||
213,F1Score,0.823529411764706,MinSampleSplit-100
|
||||
214,Accuracy,0.9519023689877961,MinSampleSplit-250
|
||||
215,F1Score,0.8312342569269521,MinSampleSplit-250
|
||||
216,Accuracy,0.9655419956927495,MinSampleSplit-2
|
||||
217,F1Score,0.8716577540106951,MinSampleSplit-2
|
||||
218,Accuracy,0.9676956209619526,MinSampleSplit-10
|
||||
219,F1Score,0.88,MinSampleSplit-10
|
||||
220,Accuracy,0.9641062455132807,MinSampleSplit-25
|
||||
221,F1Score,0.8626373626373626,MinSampleSplit-25
|
||||
222,Accuracy,0.9698492462311558,MinSampleSplit-50
|
||||
223,F1Score,0.8833333333333334,MinSampleSplit-50
|
||||
224,Accuracy,0.9490308686288585,MinSampleSplit-100
|
||||
225,F1Score,0.8184143222506394,MinSampleSplit-100
|
||||
226,Accuracy,0.9454414931801867,MinSampleSplit-250
|
||||
227,F1Score,0.8099999999999999,MinSampleSplit-250
|
||||
228,Accuracy,0.964824120603015,MinSampleSplit-2
|
||||
229,F1Score,0.8679245283018868,MinSampleSplit-2
|
||||
230,Accuracy,0.9641062455132807,MinSampleSplit-10
|
||||
231,F1Score,0.8677248677248677,MinSampleSplit-10
|
||||
232,Accuracy,0.9669777458722182,MinSampleSplit-25
|
||||
233,F1Score,0.8736263736263735,MinSampleSplit-25
|
||||
234,Accuracy,0.964824120603015,MinSampleSplit-50
|
||||
235,F1Score,0.8650137741046833,MinSampleSplit-50
|
||||
236,Accuracy,0.9504666188083274,MinSampleSplit-100
|
||||
237,F1Score,0.8226221079691517,MinSampleSplit-100
|
||||
238,Accuracy,0.9475951184493898,MinSampleSplit-250
|
||||
239,F1Score,0.8142493638676845,MinSampleSplit-250
|
||||
|
|
|
@ -0,0 +1,161 @@
|
|||
,Measure,Value,Method
|
||||
0,Precision,0.8702702702702703,Decision Tree
|
||||
1,Recall,0.8702702702702703,Decision Tree
|
||||
2,Accuracy,0.9655419956927495,Decision Tree
|
||||
3,F1Score,0.8702702702702703,Decision Tree
|
||||
4,Precision,0.9941176470588236,Logistic Regression
|
||||
5,Recall,0.9135135135135135,Logistic Regression
|
||||
6,Accuracy,0.9877961234745154,Logistic Regression
|
||||
7,F1Score,0.9521126760563381,Logistic Regression
|
||||
8,Precision,1.0,Neural Network
|
||||
9,Recall,0.9459459459459459,Neural Network
|
||||
10,Accuracy,0.9928212491026561,Neural Network
|
||||
11,F1Score,0.9722222222222222,Neural Network
|
||||
12,Precision,0.9720670391061452,Naive Bayesian
|
||||
13,Recall,0.9405405405405406,Naive Bayesian
|
||||
14,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
15,F1Score,0.9560439560439562,Naive Bayesian
|
||||
16,Precision,0.8375634517766497,Decision Tree
|
||||
17,Recall,0.8918918918918919,Decision Tree
|
||||
18,Accuracy,0.9626704953338119,Decision Tree
|
||||
19,F1Score,0.8638743455497383,Decision Tree
|
||||
20,Precision,0.9941176470588236,Logistic Regression
|
||||
21,Recall,0.9135135135135135,Logistic Regression
|
||||
22,Accuracy,0.9877961234745154,Logistic Regression
|
||||
23,F1Score,0.9521126760563381,Logistic Regression
|
||||
24,Precision,1.0,Neural Network
|
||||
25,Recall,0.9351351351351351,Neural Network
|
||||
26,Accuracy,0.9913854989231874,Neural Network
|
||||
27,F1Score,0.9664804469273743,Neural Network
|
||||
28,Precision,0.9720670391061452,Naive Bayesian
|
||||
29,Recall,0.9405405405405406,Naive Bayesian
|
||||
30,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
31,F1Score,0.9560439560439562,Naive Bayesian
|
||||
32,Precision,0.8702702702702703,Decision Tree
|
||||
33,Recall,0.8702702702702703,Decision Tree
|
||||
34,Accuracy,0.9655419956927495,Decision Tree
|
||||
35,F1Score,0.8702702702702703,Decision Tree
|
||||
36,Precision,0.9941176470588236,Logistic Regression
|
||||
37,Recall,0.9135135135135135,Logistic Regression
|
||||
38,Accuracy,0.9877961234745154,Logistic Regression
|
||||
39,F1Score,0.9521126760563381,Logistic Regression
|
||||
40,Precision,1.0,Neural Network
|
||||
41,Recall,0.9351351351351351,Neural Network
|
||||
42,Accuracy,0.9913854989231874,Neural Network
|
||||
43,F1Score,0.9664804469273743,Neural Network
|
||||
44,Precision,0.9720670391061452,Naive Bayesian
|
||||
45,Recall,0.9405405405405406,Naive Bayesian
|
||||
46,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
47,F1Score,0.9560439560439562,Naive Bayesian
|
||||
48,Precision,0.8797814207650273,Decision Tree
|
||||
49,Recall,0.8702702702702703,Decision Tree
|
||||
50,Accuracy,0.9669777458722182,Decision Tree
|
||||
51,F1Score,0.875,Decision Tree
|
||||
52,Precision,0.9941176470588236,Logistic Regression
|
||||
53,Recall,0.9135135135135135,Logistic Regression
|
||||
54,Accuracy,0.9877961234745154,Logistic Regression
|
||||
55,F1Score,0.9521126760563381,Logistic Regression
|
||||
56,Precision,0.988950276243094,Neural Network
|
||||
57,Recall,0.9675675675675676,Neural Network
|
||||
58,Accuracy,0.994256999282125,Neural Network
|
||||
59,F1Score,0.9781420765027322,Neural Network
|
||||
60,Precision,0.9720670391061452,Naive Bayesian
|
||||
61,Recall,0.9405405405405406,Naive Bayesian
|
||||
62,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
63,F1Score,0.9560439560439562,Naive Bayesian
|
||||
64,Precision,0.8757062146892656,Decision Tree
|
||||
65,Recall,0.8378378378378378,Decision Tree
|
||||
66,Accuracy,0.9626704953338119,Decision Tree
|
||||
67,F1Score,0.856353591160221,Decision Tree
|
||||
68,Precision,0.9941176470588236,Logistic Regression
|
||||
69,Recall,0.9135135135135135,Logistic Regression
|
||||
70,Accuracy,0.9877961234745154,Logistic Regression
|
||||
71,F1Score,0.9521126760563381,Logistic Regression
|
||||
72,Precision,1.0,Neural Network
|
||||
73,Recall,0.9351351351351351,Neural Network
|
||||
74,Accuracy,0.9913854989231874,Neural Network
|
||||
75,F1Score,0.9664804469273743,Neural Network
|
||||
76,Precision,0.9720670391061452,Naive Bayesian
|
||||
77,Recall,0.9405405405405406,Naive Bayesian
|
||||
78,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
79,F1Score,0.9560439560439562,Naive Bayesian
|
||||
80,Precision,0.8481675392670157,Decision Tree
|
||||
81,Recall,0.8756756756756757,Decision Tree
|
||||
82,Accuracy,0.9626704953338119,Decision Tree
|
||||
83,F1Score,0.8617021276595744,Decision Tree
|
||||
84,Precision,0.9941176470588236,Logistic Regression
|
||||
85,Recall,0.9135135135135135,Logistic Regression
|
||||
86,Accuracy,0.9877961234745154,Logistic Regression
|
||||
87,F1Score,0.9521126760563381,Logistic Regression
|
||||
88,Precision,1.0,Neural Network
|
||||
89,Recall,0.9351351351351351,Neural Network
|
||||
90,Accuracy,0.9913854989231874,Neural Network
|
||||
91,F1Score,0.9664804469273743,Neural Network
|
||||
92,Precision,0.9720670391061452,Naive Bayesian
|
||||
93,Recall,0.9405405405405406,Naive Bayesian
|
||||
94,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
95,F1Score,0.9560439560439562,Naive Bayesian
|
||||
96,Precision,0.8663101604278075,Decision Tree
|
||||
97,Recall,0.8756756756756757,Decision Tree
|
||||
98,Accuracy,0.9655419956927495,Decision Tree
|
||||
99,F1Score,0.8709677419354839,Decision Tree
|
||||
100,Precision,0.9941176470588236,Logistic Regression
|
||||
101,Recall,0.9135135135135135,Logistic Regression
|
||||
102,Accuracy,0.9877961234745154,Logistic Regression
|
||||
103,F1Score,0.9521126760563381,Logistic Regression
|
||||
104,Precision,1.0,Neural Network
|
||||
105,Recall,0.9297297297297298,Neural Network
|
||||
106,Accuracy,0.990667623833453,Neural Network
|
||||
107,F1Score,0.9635854341736695,Neural Network
|
||||
108,Precision,0.9720670391061452,Naive Bayesian
|
||||
109,Recall,0.9405405405405406,Naive Bayesian
|
||||
110,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
111,F1Score,0.9560439560439562,Naive Bayesian
|
||||
112,Precision,0.8333333333333334,Decision Tree
|
||||
113,Recall,0.8918918918918919,Decision Tree
|
||||
114,Accuracy,0.9619526202440776,Decision Tree
|
||||
115,F1Score,0.8616187989556137,Decision Tree
|
||||
116,Precision,0.9941176470588236,Logistic Regression
|
||||
117,Recall,0.9135135135135135,Logistic Regression
|
||||
118,Accuracy,0.9877961234745154,Logistic Regression
|
||||
119,F1Score,0.9521126760563381,Logistic Regression
|
||||
120,Precision,1.0,Neural Network
|
||||
121,Recall,0.9297297297297298,Neural Network
|
||||
122,Accuracy,0.990667623833453,Neural Network
|
||||
123,F1Score,0.9635854341736695,Neural Network
|
||||
124,Precision,0.9720670391061452,Naive Bayesian
|
||||
125,Recall,0.9405405405405406,Naive Bayesian
|
||||
126,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
127,F1Score,0.9560439560439562,Naive Bayesian
|
||||
128,Precision,0.8617021276595744,Decision Tree
|
||||
129,Recall,0.8756756756756757,Decision Tree
|
||||
130,Accuracy,0.964824120603015,Decision Tree
|
||||
131,F1Score,0.8686327077747988,Decision Tree
|
||||
132,Precision,0.9941176470588236,Logistic Regression
|
||||
133,Recall,0.9135135135135135,Logistic Regression
|
||||
134,Accuracy,0.9877961234745154,Logistic Regression
|
||||
135,F1Score,0.9521126760563381,Logistic Regression
|
||||
136,Precision,1.0,Neural Network
|
||||
137,Recall,0.9243243243243243,Neural Network
|
||||
138,Accuracy,0.9899497487437185,Neural Network
|
||||
139,F1Score,0.9606741573033708,Neural Network
|
||||
140,Precision,0.9720670391061452,Naive Bayesian
|
||||
141,Recall,0.9405405405405406,Naive Bayesian
|
||||
142,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
143,F1Score,0.9560439560439562,Naive Bayesian
|
||||
144,Precision,0.8901098901098901,Decision Tree
|
||||
145,Recall,0.8756756756756757,Decision Tree
|
||||
146,Accuracy,0.9691313711414213,Decision Tree
|
||||
147,F1Score,0.8828337874659401,Decision Tree
|
||||
148,Precision,0.9941176470588236,Logistic Regression
|
||||
149,Recall,0.9135135135135135,Logistic Regression
|
||||
150,Accuracy,0.9877961234745154,Logistic Regression
|
||||
151,F1Score,0.9521126760563381,Logistic Regression
|
||||
152,Precision,1.0,Neural Network
|
||||
153,Recall,0.9297297297297298,Neural Network
|
||||
154,Accuracy,0.990667623833453,Neural Network
|
||||
155,F1Score,0.9635854341736695,Neural Network
|
||||
156,Precision,0.9720670391061452,Naive Bayesian
|
||||
157,Recall,0.9405405405405406,Naive Bayesian
|
||||
158,Accuracy,0.9885139985642498,Naive Bayesian
|
||||
159,F1Score,0.9560439560439562,Naive Bayesian
|
||||
|
Loading…
Reference in New Issue