From 4852a7f339e59f471a5915223ebcebe94c089ed1 Mon Sep 17 00:00:00 2001 From: Shreyas Date: Fri, 25 Apr 2014 11:49:21 -0700 Subject: [PATCH] added the classifier changes --- README.md | 19 ++++ classifier.py | 285 ++++++++++++++++++++++++++++++++++++++++++++++++++ data.csv | 1 + 3 files changed, 305 insertions(+) create mode 100644 classifier.py create mode 100644 data.csv diff --git a/README.md b/README.md index 965acb7..c9bf029 100644 --- a/README.md +++ b/README.md @@ -2,3 +2,22 @@ classifier-scaffold =================== My reusable code for using many different classifiers on a dataset. + +It takes some command line flags, like: + +```bash +python appClassifierBenchmark.py --file="exports/appFeatures.csv" --classifier="all" --sample="split" +``` + +where, + +- `--classifier`: takes inputs of what classifier we want to run. By default it runs all and takes key values of other models to specifically only run that model. +- `--sample`: takes inputs for whether we want to run the classifiers on all dataset, or equal splits. By default it takes all. +- `--help`: provides some output for the above help, but frankly it still needs a little improvement. + + +Save output from console to a file as: + +```bash +$python appClassifierBenchmark.py --file="exports/appFeatures.csv" > +``` diff --git a/classifier.py b/classifier.py new file mode 100644 index 0000000..e58a529 --- /dev/null +++ b/classifier.py @@ -0,0 +1,285 @@ +#! /usr/bin/env python +# -*- coding: UTF-8 -*- +""" +Benchmark different classifiers + +Classifiers tried: + +- Naive Bayes +- Random Forest +- SVM + - linear + - kernelized +""" +from __future__ import division +import sys +import pandas as pd +import numpy as np +from optparse import OptionParser +from sklearn import metrics, preprocessing +from sklearn import svm, naive_bayes, neighbors, tree +from sklearn.ensemble import AdaBoostClassifier + + + +def getUserInput(models): + """ + Get User Input + """ + optionparser = OptionParser(add_help_option=False, epilog="multiline") + + optionparser.add_option('-c', '--classifier', dest='classifier', default="all") + optionparser.add_option('-s', '--sample', dest='sample', default="all") + optionparser.add_option('-h', '--help', dest='help', action='store_true', + help='show this help message and exit') + optionparser.add_option('-f', '--file', dest='file') + + + (option, args) = optionparser.parse_args() + + if option.help: + print optionparser.print_help() + print __doc__ + print "Supported Classifier Models:" + + + # print models + for index, key in enumerate(models): + print "%2s % 20s" % (index, key) + + print "Default option: 'all'\n" + + print "To run the program, provide app features file path" + print "Usage: --file='path.to.appData'" + + sys.exit() + + + if not option.file: + return optionparser.error('Data File path not provided.\n Usage: --file="path.to.data"') + + + return { + 'classifier' : option.classifier, + 'file': option.file, + 'sample' : option.sample + } + + + + + +def loadAppData(datafile): + """ + Data File added + { + 'fair' : False, + 'unfair': True + } + """ + df = pd.read_csv(datafile) + + ## Remove the unnamed column as not sure + # cols = set(df.columns) + # cols.remove('Unnamed: 7') + # df = df[list(cols)] + + ## Convert appLabel to boolean: True for 'unfair' + df['label'] = df['label'].map(lambda x: x=='unfair') + + return df + + +def trimDf(df): + """ + Trim the dataframe provided + Remove features that we don't think are helping + """ + cols = set(df.columns) + + cols.remove('feat3') # bug in our feature extraction code + cols.remove('feat8') # considered only free apps + + + return df[list(cols)] + + + + +def prepareSplitClassifier(df, models, choice): + """ + Classify the apps for equal splits + """ + + + def classificationOutput(clf, X, Y): + """ + Fit the model and print the classification results + - confusion_matrix + - avg scores etc + """ + n_samples = 36 + + print "\n\nClassifier: \n %s" % (clf) + print "#" * 79 + # classifier_gnb = naive_bayes.GaussianNB() # initiating the classifier + + clf.fit(X[:n_samples], Y[:n_samples]) # train on first n_samples and test on last 10 + + expected = Y[n_samples:] + predicted = clf.predict(X[n_samples:]) + print("Classification report:\n%s\n" % (metrics.classification_report(expected, predicted))) + print("\nConfusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted)) + + + + + def splitclassify(cDf): + """ + Given the dataframe combined with equal fair and unfair apps, + classify them + """ + cDf = cDf.reindex(np.random.permutation(cDf.index)) # shuffle the dataframe + featCols = set(cDf.columns) + featCols.remove('label') + + features = cDf[list(featCols)].astype('float') + + ## Scale the features to a common range + min_max_scaler = preprocessing.MinMaxScaler() + X = min_max_scaler.fit_transform(features.values) + + Y = cDf['label'].values + + + if choice == 'all': + for key in models: + classifier = models[key] + classificationOutput(classifier, X, Y) + else: + if choice in models: + classifier = models[choice] + classificationOutput(classifier, X, Y) + else: + print "Incorrect Choice" + + + + fairDf = df[df['label'] == False] + unfairDf = df[df['label'] == True] + + + # calculate total possible splits of fair data frame relatie to + # size of unfair dataframe + splits = len(fairDf) // len(unfairDf) + + for i in range(splits): + clDf = fairDf[i : i+len(unfairDf)].append(unfairDf) + + # print fairDf.values, unfairDf.values + print "Classifying %d th split of fair with unfair " % (i) + print "-" * 79 + splitclassify(clDf) + print "\n\n" + + + + +def performClassification(clf, featVector, labelVector, fold=4): + """ + Perform Classification + """ + + (numrow, numcol) = featVector.shape + + foldsize = int(numrow//fold) + + print "FoldSize: %s" % (foldsize) + + for i in range(fold): + X_test = featVector[i*foldsize:(i+1)*foldsize] + Y_test = labelVector[i*foldsize:(i+1)*foldsize] + + X_train = np.concatenate((featVector[:i*foldsize], featVector[(i+1)*foldsize:])) + Y_train = np.concatenate((labelVector[:i*foldsize], labelVector[(i+1)*foldsize:])) + + print " X_train: %s, Y_train: %s, X_test: %s, Y_test: %s" % (X_train.shape, Y_train.shape, X_test.shape, Y_test.shape) + + print "#### Classifier: \n %s" % (clf) + + + clf.fit(X_train, Y_train) # train on first n_samples and test on last 10 + + expected = Y_test + predicted = clf.predict(X_test) + print "Classification report:\n%s\n" % metrics.classification_report(expected, predicted) + print "\nConfusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted) + + + + +def allClassifier(cDf, models, modelchoice): + """ + Classifier for all apps + """ + + print "Data Size: %s, \t Model Choice: %s" % (cDf.shape, modelchoice) + + cDf = cDf.reindex(np.random.permutation(cDf.index)) # shuffle the dataframe + featCols = set(cDf.columns) + featCols.remove('label') + + features = cDf[list(featCols)].astype('float') + + ## Scale the features to a common range + min_max_scaler = preprocessing.MinMaxScaler() + featVector = min_max_scaler.fit_transform(features.values) #scaled feature vector + + labelVector = cDf['label'].values #label vector + + + if modelchoice == 'all': + for key in models: + if key != 'svm-nl': + classifier = models[key] + performClassification(classifier, featVector, labelVector) + else: + if modelchoice in models and modelchoice != 'svm-nl': + classifier = models[choice] + performClassification(classifier, featVector, labelVector) + else: + print "Incorrect Choice" + + + + + + +def main(): + + # Supported classifier models + n_neighbors = 3 + models = { + 'nb' : naive_bayes.GaussianNB(), + 'svm-l' : svm.SVC(), + 'svm-nl' : svm.NuSVC(), + 'tree' : tree.DecisionTreeClassifier(), + 'forest': AdaBoostClassifier(tree.DecisionTreeClassifier(max_depth=1),algorithm="SAMME",n_estimators=200), + 'knn-uniform' : neighbors.KNeighborsClassifier(n_neighbors, weights='uniform'), + 'knn-distance' : neighbors.KNeighborsClassifier(n_neighbors, weights='distance') + } + + userInput = getUserInput(models) + appDf = loadAppData(userInput['file']) + appDf = trimDf(appDf) + + if userInput['sample'] == 'all': + allClassifier(appDf, models, userInput['classifier']) + else: + prepareSplitClassifier(appDf, models, userInput['classifier']) + + + + +if __name__ == '__main__': + main() diff --git a/data.csv b/data.csv new file mode 100644 index 0000000..a5a9d2b --- /dev/null +++ b/data.csv @@ -0,0 +1 @@ +feat0,feat1,feat2,feat3,feat4,feat5,feat6,feat7,feat8,feat9,feat10,label 4,4.051,1,0,TRUE,TRUE,TRUE,30000000,0,-3,601,fair 13,4.351,11,0,TRUE,TRUE,TRUE,30000000,0,2,1139,fair 23,4.555,20,0,FALSE,TRUE,TRUE,3000000,0,-4,2223,fair 10,4.623,5,0,TRUE,TRUE,FALSE,30000000,0,8,804,fair 22,4.046,16,0,FALSE,TRUE,TRUE,7500000,0,-11,1867,fair 18,4.595,6,0,TRUE,TRUE,FALSE,30000000,0,1,1162,fair 18,4.526,60,0,TRUE,TRUE,TRUE,30000000,0,-4,1522,fair 13,4.039,19,0,FALSE,TRUE,FALSE,30000000,0,-5,1895,fair 11,4.4,10,0,TRUE,TRUE,TRUE,3000000,0,-2,1195,fair 19,3.935,11,0,TRUE,TRUE,TRUE,300000,0,-4,1488,fair 18,4.075,35,0,TRUE,TRUE,FALSE,3000000,0,-5,1864,fair 19,3.983,14,0,FALSE,TRUE,FALSE,750000,0,-2,2049,fair 8,4.238,2,0,TRUE,TRUE,FALSE,30000000,0,1,417,fair 16,3.915,11,0,TRUE,TRUE,FALSE,3000000,0,-3,1276,fair 13,4.05,12,0,TRUE,TRUE,FALSE,750000,0,-3,1210,fair 20,3.795,24,0,TRUE,TRUE,TRUE,750000,0,-7,2038,fair 12,3.997,10,0,TRUE,TRUE,FALSE,7500000,0,1,1044,fair 15,3.212,13,0,TRUE,TRUE,TRUE,3000000,0,-5,1245,fair 2,2.611,2,0,TRUE,TRUE,FALSE,3000000,0,-1,225,fair 15,4.547,10,0,TRUE,TRUE,FALSE,30000000,0,-3,1120,fair 22,2.671,11,0,TRUE,TRUE,TRUE,3000000,0,4,1406,fair 13,4.045,10,0,FALSE,TRUE,TRUE,750000,0,-3,1063,fair 7,3.555,10,0,TRUE,TRUE,FALSE,750000,0,-9,855,fair 17,4.59,38,0,TRUE,TRUE,TRUE,3000000,0,-5,2147,fair 12,4.258,13,0,TRUE,TRUE,TRUE,7500000,0,-9,1189,fair 19,4.428,8,0,TRUE,TRUE,FALSE,750000,0,-10,1804,fair 26,4.401,5,0,TRUE,TRUE,FALSE,3000000,0,5,1514,fair 12,4.275,12,0,TRUE,TRUE,TRUE,3000000,0,-6,1272,fair 17,4.149,14,0,TRUE,TRUE,TRUE,750000,0,-8,1413,fair 7,4.396,2,0,TRUE,TRUE,TRUE,300000,0,-3,610,fair 14,4.113,5,0,TRUE,TRUE,TRUE,3000000,0,2,1145,fair 14,4.24,14,0,TRUE,TRUE,FALSE,30000,0,2,1413,fair 2,4.241,14,0,TRUE,TRUE,TRUE,300000,0,-4,573,fair 12,3.989,16,0,TRUE,TRUE,TRUE,3000000,0,-6,1387,fair 19,4.31,7,0,TRUE,TRUE,TRUE,75000,0,-7,1336,fair 7,4.451,5,0,TRUE,TRUE,TRUE,3000000,0,5,817,fair 29,3.916,16,0,TRUE,TRUE,TRUE,300000,0,-9,2205,fair 7,4.761,3,0,TRUE,TRUE,FALSE,300000,0,1,541,fair 6,4.158,1,0,TRUE,TRUE,FALSE,300000,0,0,310,fair 6,2.972,4,0,TRUE,TRUE,FALSE,300000,0,-4,880,fair 5,3.903,4,0,TRUE,TRUE,FALSE,300000,0,-6,583,fair 19,3.433,20,0,TRUE,TRUE,FALSE,3000000,0,-9,1888,fair 13,4.412,4,0,TRUE,TRUE,FALSE,3000000,0,-3,1122,fair 15,4.461,21,0,FALSE,TRUE,TRUE,3000000,0,15,1613,fair 5,3.564,11,0,TRUE,FALSE,FALSE,300000,0,-2,1014,fair 8,4.131,6,0,TRUE,FALSE,FALSE,3000000,0,5,1115,fair 6,3.55,8,0,TRUE,TRUE,TRUE,30000,0,-1,437,fair 2,4.435,0,0,TRUE,TRUE,TRUE,3000000,0,1,307,fair 13,4.233,4,0,TRUE,TRUE,TRUE,300000,0,-3,991,fair 7,3.975,6,0,TRUE,TRUE,FALSE,3000000,0,1,578,fair 10,3.926,7,0,TRUE,TRUE,TRUE,300000,0,4,962,fair 13,4.59,6,0,TRUE,TRUE,FALSE,3000000,0,2,1267,fair 9,3.601,8,0,TRUE,TRUE,FALSE,300000,0,-9,1300,fair 16,3.701,12,0,TRUE,TRUE,FALSE,300000,0,3,1051,fair 16,2.931,10,0,TRUE,TRUE,TRUE,300000,0,-13,1822,fair 7,4.564,0,0,TRUE,TRUE,FALSE,3000000,0,0,535,fair 11,4.179,8,0,TRUE,TRUE,FALSE,7500000,0,-7,1075,fair 6,4.466,4,0,TRUE,TRUE,TRUE,3000000,0,7,691,fair 7,4.34,5,0,FALSE,TRUE,TRUE,750000,0,-3,991,fair 7,4.539,14,0,TRUE,TRUE,FALSE,300000,0,3,805,fair 23,4.135,18,0,TRUE,TRUE,TRUE,3000000,0,0,1509,fair 22,4.48,13,0,TRUE,FALSE,FALSE,300000,0,0,1661,fair 22,4.141,20,0,TRUE,TRUE,TRUE,7500000,0,3,1484,fair 15,4.557,13,0,TRUE,FALSE,FALSE,3000000,0,-6,1431,fair 12,4.452,16,0,TRUE,FALSE,FALSE,300000,0,2,929,fair 10,4.482,4,0,TRUE,FALSE,FALSE,3000000,0,4,577,fair 9,4.572,6,0,TRUE,TRUE,FALSE,3000000,0,-4,833,fair 13,4.789,5,0,TRUE,TRUE,FALSE,300000,0,8,869,fair 14,4.212,21,0,TRUE,FALSE,FALSE,750000,0,-3,1140,fair 15,4.226,7,0,TRUE,TRUE,TRUE,3000000,0,3,651,fair 7,4.182,3,0,TRUE,FALSE,TRUE,3000000,0,-3,516,fair 7,4.528,3,0,TRUE,TRUE,FALSE,300000,0,0,326,fair 8,4.384,4,0,TRUE,TRUE,FALSE,3000000,0,0,790,fair 16,4.123,3,0,TRUE,TRUE,FALSE,750000,0,-7,1138,fair 19,4.399,10,0,TRUE,TRUE,FALSE,30000,0,0,1284,fair 26,4.239,18,0,TRUE,TRUE,FALSE,3000000,0,-8,2334,fair 5,4.249,1,0,TRUE,FALSE,FALSE,750000,0,-1,585,fair 30,3.228,24,0,TRUE,TRUE,TRUE,30000,0,2,2314,fair 14,4.301,10,0,TRUE,TRUE,FALSE,3000000,0,-6,1573,fair 13,4.095,11,0,TRUE,FALSE,FALSE,750000,0,5,952,fair 1,4.223,0,0,TRUE,FALSE,FALSE,75000,0,1,156,fair 9,4.413,4,0,FALSE,TRUE,FALSE,300000,0,2,607,fair 8,4.018,7,0,TRUE,FALSE,FALSE,75000,0,-1,441,fair 7,4.529,3,0,TRUE,TRUE,TRUE,3000000,0,-1,674,fair 31,4.341,16,0,TRUE,FALSE,FALSE,300000,0,-2,1784,fair 2,4.613,1,0,TRUE,TRUE,FALSE,750000,0,5,153,fair 26,4.664,7,0,TRUE,TRUE,TRUE,750000,0,-5,1339,fair 7,4.402,2,0,TRUE,TRUE,FALSE,3000000,0,4,519,fair 12,4.573,9,0,TRUE,FALSE,FALSE,300000,0,-1,928,fair 8,4.302,0,0,TRUE,TRUE,TRUE,750000,0,0,386,fair 11,4.035,0,0,TRUE,TRUE,FALSE,300000,0,-3,664,fair 12,4.256,9,0,TRUE,TRUE,TRUE,3000000,0,0,909,fair 2,3.805,2,0,TRUE,TRUE,TRUE,750000,0,3,144,fair 7,4.31,1,0,TRUE,TRUE,FALSE,300000,0,0,376,fair 14,4.528,7,0,TRUE,TRUE,FALSE,3000000,0,3,740,fair 19,2.117,17,0,TRUE,TRUE,TRUE,75000,0,-9,1787,fair 22,4.421,15,0,TRUE,TRUE,FALSE,30000,0,-2,1289,fair 3,4.261,1,0,TRUE,TRUE,TRUE,30000000,0,3,547,fair 8,4.629,8,0,TRUE,TRUE,FALSE,30000,0,0,757,fair 13,4.05,9,0,TRUE,TRUE,TRUE,75000,0,-7,1484,fair 10,4.685,4,0,TRUE,TRUE,FALSE,300000,0,0,687,fair 8,4.339,12,0,TRUE,FALSE,FALSE,30000,0,-1,636,fair 10,4.646,12,0,TRUE,TRUE,FALSE,30000,0,-4,781,fair 3,4.182,1,0,TRUE,FALSE,FALSE,75000,0,2,161,fair 5,4.656,7,0,TRUE,TRUE,FALSE,30000,0,3,614,fair 2,4.465,2,0,TRUE,FALSE,FALSE,75000,0,1,34,fair 3,4.279,2,0,TRUE,FALSE,FALSE,750000,0,1,166,fair 3,4.677,0,0,TRUE,TRUE,FALSE,7500,0,1,77,fair 2,3.901,1,0,TRUE,FALSE,FALSE,300000,0,-3,420,fair 5,4.115,3,0,TRUE,TRUE,FALSE,750000,0,4,429,fair 1,4.429,1,0,TRUE,FALSE,FALSE,3000,0,0,69,fair 9,4.787,11,0,TRUE,TRUE,FALSE,7500,0,1,969,fair 3,4.815,2,0,TRUE,TRUE,FALSE,7500,0,4,186,fair 11,4.147,15,0,TRUE,TRUE,TRUE,75000,0,-1,1138,fair 1,4.696,3,0,TRUE,TRUE,FALSE,7500,0,0,187,fair 7,4.408,10,0,TRUE,FALSE,FALSE,300000,0,7,346,fair 11,4.52,5,0,TRUE,TRUE,FALSE,300000,0,2,534,fair 0,4.222,0,0,TRUE,TRUE,FALSE,7500,0,0,17,fair 10,4.197,10,0,TRUE,TRUE,FALSE,30000,0,1,1044,fair 2,3.952,2,0,TRUE,TRUE,FALSE,7500,0,2,176,fair 17,4.271,14,0,TRUE,TRUE,TRUE,300000000,0,-7,1493,fair 13,3.956,11,0,TRUE,TRUE,TRUE,300000000,0,-10,1519,fair 16,4.424,14,0,TRUE,TRUE,TRUE,30000000,0,-4,1187,fair 14,4.539,17,0,TRUE,TRUE,TRUE,300000000,0,-11,1365,fair 15,3.994,11,0,TRUE,TRUE,TRUE,75000000,0,-5,1743,fair 4,4.436,5,0,TRUE,TRUE,TRUE,75000000,0,-8,1106,fair 15,4.559,26,0,TRUE,TRUE,TRUE,300000000,0,1,1639,fair 15,4.425,11,0,TRUE,TRUE,TRUE,300000000,0,-3,1070,fair 19,4.18,18,0,FALSE,TRUE,TRUE,300000000,0,-7,1221,fair 14,4.179,6,0,FALSE,TRUE,FALSE,300000000,0,-9,1122,fair 20,4.395,12,0,FALSE,TRUE,TRUE,75000000,0,-2,1457,fair 16,4.201,21,0,FALSE,TRUE,TRUE,30000000,0,-5,1638,fair 14,4.164,18,0,TRUE,TRUE,TRUE,3000000,0,-7,1255,fair 20,4.394,24,0,TRUE,TRUE,TRUE,30000000,0,-8,1469,fair 24,4.273,19,0,TRUE,TRUE,FALSE,7500000,0,-3,1717,fair 6,4.659,2,0,TRUE,TRUE,TRUE,30000000,0,2,404,fair 15,3.958,16,0,TRUE,TRUE,FALSE,7500000,0,-8,1561,fair 5,4.333,14,0,TRUE,TRUE,FALSE,75000000,0,-4,1100,fair 15,4.135,15,0,TRUE,TRUE,FALSE,30000000,0,-10,1389,fair 15,4.42,3,0,TRUE,TRUE,FALSE,30000000,0,-9,1508,fair 14,4.398,11,0,FALSE,TRUE,FALSE,75000000,0,-9,1243,fair 20,4.242,29,0,TRUE,TRUE,TRUE,3000000,0,-8,1591,fair 10,2.843,2,0,FALSE,TRUE,FALSE,30000000,0,-5,942,fair 14,4.229,11,0,TRUE,TRUE,TRUE,30000000,0,-9,1413,fair 18,3.991,16,0,TRUE,TRUE,TRUE,3000000,0,-4,1969,fair 14,4.12,6,0,TRUE,TRUE,FALSE,7500000,0,-9,1392,fair 12,4.33,26,0,TRUE,TRUE,TRUE,7500000,0,-3,1528,fair 16,4.263,13,0,FALSE,TRUE,TRUE,750000000,0,-9,1295,fair 9,3.97,26,0,TRUE,TRUE,TRUE,3000000,0,-13,1676,fair 8,4.545,14,0,TRUE,TRUE,TRUE,75000000,0,-6,1294,fair 16,3.77,20,0,TRUE,TRUE,TRUE,3000000,0,-9,1919,fair 7,4.386,37,0,TRUE,TRUE,TRUE,3000000,0,-4,908,fair 15,3.46,15,0,TRUE,TRUE,TRUE,300000000,0,2,1345,fair 18,4.289,11,0,TRUE,TRUE,FALSE,300000000,0,-8,1970,fair 14,4.364,24,0,TRUE,TRUE,FALSE,7500000,0,5,1337,fair 14,3.131,15,0,TRUE,TRUE,TRUE,3000000,0,-2,1123,fair 3,4.327,1,0,TRUE,TRUE,TRUE,3000000,0,-1,211,fair 9,3.616,17,0,TRUE,TRUE,FALSE,3000000,0,-1,1508,fair 13,3.683,18,0,TRUE,TRUE,FALSE,3000000,0,2,1252,fair 18,4.517,6,0,TRUE,TRUE,TRUE,3000000,0,-5,1590,fair 21,4.317,10,0,TRUE,TRUE,FALSE,30000000,0,-8,1358,fair 10,3.545,15,0,TRUE,TRUE,FALSE,3000000,0,-4,1350,fair 13,4.337,17,0,TRUE,TRUE,FALSE,3000000,0,-12,1577,fair 9,4.009,8,0,TRUE,FALSE,FALSE,300000,0,3,728,fair 19,3.957,24,0,TRUE,TRUE,FALSE,300000000,0,-1,1707,fair 16,3.298,13,0,TRUE,TRUE,FALSE,300000,0,-3,1817,fair 9,4.329,7,0,TRUE,TRUE,FALSE,30000000,0,-2,1058,fair 11,4.185,5,0,TRUE,TRUE,FALSE,3000000,0,-4,1217,fair 17,4.303,26,0,TRUE,TRUE,TRUE,3000000,0,-2,1447,fair 8,4.14,19,0,TRUE,TRUE,FALSE,30000000,0,-9,1574,fair 24,4.551,4,0,TRUE,TRUE,FALSE,300000,0,0,1354,fair 12,4.388,15,0,TRUE,TRUE,FALSE,30000000,0,-9,1427,fair 17,4.241,12,0,TRUE,TRUE,TRUE,3000000,0,-5,1685,fair 9,4.168,6,0,TRUE,TRUE,TRUE,750000,0,-3,946,fair 13,3.932,15,0,TRUE,TRUE,FALSE,300000,0,-1,1139,fair 9,4.541,15,0,TRUE,TRUE,TRUE,300000,0,-7,1251,fair 32,4.361,10,0,TRUE,TRUE,TRUE,7500000,0,-14,2454,fair 13,4.433,7,0,TRUE,TRUE,TRUE,3000000,0,3,677,fair 12,4.491,7,0,TRUE,TRUE,TRUE,75000,0,1,999,fair 15,4.198,10,0,TRUE,TRUE,TRUE,3000000,0,-3,1053,fair 17,3.822,12,0,TRUE,TRUE,TRUE,3000000,0,-9,1737,fair 16,4.436,2,0,TRUE,TRUE,TRUE,7500000,0,3,1257,fair 20,4.763,10,0,TRUE,TRUE,TRUE,3000000,0,-7,1347,fair 20,3.622,12,0,TRUE,TRUE,FALSE,3000000,0,-1,1669,fair 9,4.684,10,0,TRUE,TRUE,FALSE,30000000,0,4,877,fair 15,4.448,7,0,TRUE,TRUE,FALSE,3000000,0,-4,1015,fair 5,4.199,6,0,TRUE,TRUE,FALSE,300000,0,-2,601,fair 6,4.42,10,0,TRUE,TRUE,FALSE,3000000,0,2,1059,fair 24,4.188,10,0,TRUE,TRUE,TRUE,3000000,0,-7,1868,fair 2,3.835,1,0,TRUE,TRUE,FALSE,3000000,0,3,274,fair 18,4.468,12,0,TRUE,TRUE,TRUE,750000,0,1,1271,fair 16,4.603,9,0,TRUE,TRUE,TRUE,7500000,0,5,1251,fair 11,4.253,12,0,TRUE,TRUE,TRUE,3000000,0,-3,1568,fair 17,4.578,12,0,TRUE,TRUE,TRUE,3000000,0,2,1352,fair 16,4.282,33,0,TRUE,TRUE,FALSE,3000000,0,-4,1579,fair 13,4.287,2,0,TRUE,FALSE,FALSE,750000,0,9,719,fair 20,4.494,15,0,TRUE,TRUE,TRUE,7500000,0,-2,1460,fair 16,2.845,24,0,TRUE,TRUE,FALSE,3000000,0,-12,1792,fair 16,4.458,6,0,TRUE,TRUE,TRUE,3000000,0,-3,1299,fair 12,4.264,10,0,TRUE,TRUE,FALSE,3000000,0,9,791,fair 18,4.309,15,0,TRUE,TRUE,TRUE,3000000,0,-4,1413,fair 10,4.38,16,0,FALSE,TRUE,TRUE,300000,0,3,1126,fair 8,3.035,13,0,TRUE,TRUE,FALSE,75000,0,0,1058,fair 11,4.732,9,0,TRUE,TRUE,FALSE,300000,0,-4,860,fair 5,4.325,1,0,TRUE,TRUE,TRUE,75000,0,-2,483,fair 12,4.291,1,0,TRUE,TRUE,TRUE,3000000,0,-1,572,fair 13,4.367,7,0,TRUE,TRUE,FALSE,30000000,0,-3,931,fair 13,4.612,8,0,TRUE,TRUE,TRUE,7500000,0,-5,824,fair 23,4.2,10,0,TRUE,TRUE,FALSE,3000000,0,-1,1731,fair 14,4.594,14,0,TRUE,TRUE,FALSE,750000,0,2,960,fair 23,4.845,11,0,TRUE,TRUE,TRUE,3000000,0,10,1540,fair 6,4.208,14,0,TRUE,TRUE,TRUE,750000,0,2,570,fair 13,4.391,17,0,TRUE,TRUE,FALSE,3000000,0,-4,1617,fair 10,3.965,10,0,TRUE,TRUE,TRUE,300000,0,4,1189,fair 7,4.522,11,0,TRUE,TRUE,FALSE,7500000,0,-3,914,fair 10,3.858,4,0,TRUE,TRUE,TRUE,3000000,0,4,635,fair 5,4.405,21,0,TRUE,TRUE,FALSE,3000000,0,8,584,fair 18,4.211,11,0,TRUE,TRUE,TRUE,3000000,0,-3,1693,fair 5,3.734,28,0,TRUE,TRUE,TRUE,3000000,0,0,814,fair 13,4.021,14,0,TRUE,TRUE,TRUE,3000000,0,0,694,fair 13,3.987,17,0,TRUE,TRUE,TRUE,300000,0,1,1274,fair 14,4.126,3,0,TRUE,TRUE,FALSE,750000,0,-2,1194,fair 11,3.771,7,0,TRUE,TRUE,TRUE,3000000,0,-2,1355,fair 11,3.505,10,0,TRUE,TRUE,TRUE,300000,0,0,931,fair 18,4.528,7,0,TRUE,TRUE,FALSE,7500000,0,-1,1188,fair 16,4.527,7,0,TRUE,TRUE,TRUE,3000000,0,-9,1776,fair 18,3.603,14,0,TRUE,TRUE,FALSE,750000,0,-8,1640,fair 33,4.227,17,0,TRUE,TRUE,TRUE,300000,0,3,1782,fair 12,4.038,9,0,TRUE,TRUE,FALSE,3000000,0,3,939,fair 10,4.075,3,0,TRUE,TRUE,TRUE,3000000,0,-6,1064,fair 7,4.808,3,0,TRUE,TRUE,FALSE,3000000,0,-4,859,fair 10,4.289,6,0,TRUE,TRUE,FALSE,7500000,0,6,785,fair 24,4.807,37,0,TRUE,TRUE,FALSE,3000000,0,-1,1484,fair 21,4.45,6,0,TRUE,TRUE,TRUE,3000000,0,10,1501,fair 8,3.976,7,0,FALSE,TRUE,TRUE,300000,0,-3,770,fair 20,3.844,13,0,TRUE,TRUE,FALSE,750000,0,-15,1642,fair 28,4.592,15,0,TRUE,TRUE,TRUE,750000,0,4,1941,fair 7,3.167,12,0,TRUE,TRUE,FALSE,300000,0,-6,989,fair 8,3.998,11,0,TRUE,TRUE,TRUE,3000000,0,0,721,fair 2,3.359,23,0,TRUE,FALSE,FALSE,750000,0,0,691,fair 4,3.727,20,0,TRUE,TRUE,TRUE,750000000,0,-6,673,fair 11,4.601,30,0,TRUE,TRUE,TRUE,300000000,0,-2,1294,fair 20,3.726,14,0,TRUE,TRUE,TRUE,30000000,0,-10,1219,fair 10,3.968,13,0,TRUE,TRUE,TRUE,300000000,0,-5,1177,fair 13,4.389,13,0,TRUE,TRUE,TRUE,300000000,0,-4,1222,fair 18,4.73,15,0,FALSE,TRUE,TRUE,30000000,0,-5,1431,fair 16,3.435,9,0,TRUE,TRUE,TRUE,30000000,0,-1,1567,fair 6,4.286,8,0,TRUE,TRUE,FALSE,30000000,0,-3,873,fair 16,4.225,28,0,TRUE,TRUE,TRUE,30000000,0,-5,1353,fair 17,4.347,10,0,TRUE,TRUE,TRUE,30000000,0,-4,1390,fair 9,4.152,12,0,FALSE,TRUE,TRUE,30000000,0,-1,1112,fair 11,4.237,15,0,TRUE,TRUE,TRUE,7500000,0,-4,1553,fair 22,4.33,7,0,TRUE,TRUE,TRUE,7500000,0,-1,1380,fair 17,4.197,15,0,TRUE,TRUE,TRUE,7500000,0,-4,1156,fair 19,4.337,8,0,TRUE,TRUE,TRUE,3000000,0,5,942,fair 13,4.238,8,0,TRUE,TRUE,TRUE,7500000,0,-2,1066,fair 10,4.135,14,0,TRUE,TRUE,TRUE,7500000,0,-8,912,fair 12,4.214,20,0,TRUE,TRUE,TRUE,7500000,0,-3,1099,fair 4,4.061,6,0,TRUE,TRUE,TRUE,30000000,0,-2,672,fair 11,4.289,17,0,TRUE,TRUE,TRUE,3000000,0,-11,1376,fair 2,4.356,2,0,TRUE,TRUE,TRUE,3000000,0,-1,220,fair 18,4.527,18,0,TRUE,TRUE,TRUE,7500000,0,3,795,fair 7,4.533,7,0,TRUE,TRUE,TRUE,30000000,0,2,602,fair 3,3.724,13,0,TRUE,FALSE,FALSE,3000000,0,-5,708,fair 12,4.213,17,0,TRUE,TRUE,TRUE,30000000,0,-6,1552,fair 17,4.138,14,0,FALSE,TRUE,TRUE,300000000,0,-8,1500,fair 12,4.098,8,0,TRUE,TRUE,TRUE,30000000,0,-1,1204,fair 20,4.386,14,0,TRUE,TRUE,TRUE,7500000,0,-3,1220,fair 13,3.806,6,0,TRUE,TRUE,FALSE,3000000,0,-5,972,fair 14,4.196,10,0,TRUE,TRUE,TRUE,30000000,0,-2,968,fair 15,4.247,12,0,TRUE,TRUE,TRUE,3000000,0,-2,1534,fair 13,4.413,10,0,TRUE,TRUE,TRUE,3000000,0,0,714,fair 12,4.193,5,0,TRUE,TRUE,FALSE,7500000,0,-5,732,fair 15,4.209,8,0,TRUE,FALSE,FALSE,750000,0,-5,1102,fair 28,4.104,20,0,TRUE,TRUE,TRUE,3000000,0,-6,2239,fair 24,4.147,19,0,TRUE,TRUE,TRUE,300000,0,-13,1939,fair 5,4.405,5,0,TRUE,TRUE,TRUE,3000000,0,2,389,fair 9,4.136,5,0,TRUE,TRUE,TRUE,3000000,0,-5,598,fair 26,2.946,11,0,TRUE,TRUE,TRUE,3000000,0,-17,1904,fair 9,3.978,10,0,TRUE,TRUE,TRUE,3000000,0,-1,1029,fair 18,4.576,25,0,TRUE,TRUE,TRUE,30000000,0,1,1342,fair 13,4.548,15,0,TRUE,TRUE,TRUE,3000000,0,-1,1354,fair 17,3.099,10,0,TRUE,TRUE,TRUE,3000000,0,-2,1177,fair 12,4.025,13,0,TRUE,FALSE,FALSE,300000,0,-7,952,fair 5,4.324,10,0,TRUE,TRUE,FALSE,3000000,0,0,669,fair 15,4.335,14,0,TRUE,TRUE,TRUE,7500000,0,-9,1348,fair 3,4.381,46,0,TRUE,TRUE,TRUE,7500000,0,5,525,fair 12,3.594,12,0,TRUE,TRUE,FALSE,300000,0,-1,970,fair 2,4.403,18,0,TRUE,TRUE,TRUE,3000000,0,3,384,fair 19,4.125,46,0,TRUE,TRUE,TRUE,3000000,0,-5,1179,fair 21,3.088,9,0,TRUE,TRUE,TRUE,3000000,0,-9,1593,fair 17,4.534,11,0,TRUE,TRUE,TRUE,3000000,0,2,1085,fair 12,4.602,5,0,TRUE,TRUE,FALSE,3000000,0,-4,992,fair 9,3.977,6,0,TRUE,TRUE,TRUE,7500000,0,-2,714,fair 6,4.464,2,0,TRUE,TRUE,TRUE,3000000,0,1,433,fair 25,4.443,14,0,FALSE,TRUE,FALSE,7500000,0,-5,2242,fair 10,4.391,5,0,FALSE,TRUE,FALSE,3000000,0,-2,1176,fair 14,4.163,8,0,TRUE,TRUE,TRUE,3000000,0,-7,1554,fair 20,4.676,19,0,TRUE,TRUE,FALSE,300000,0,7,990,fair 16,4.397,109,0,TRUE,TRUE,TRUE,7500000,0,-3,1713,fair 18,3.225,9,0,TRUE,TRUE,FALSE,30000,0,-17,2087,unfair 2,1,19,0,TRUE,TRUE,FALSE,3000,0,-1,306,unfair 7,3.149,20,0,TRUE,TRUE,FALSE,30000,0,-6,588,unfair 7,1.951,4,0,TRUE,TRUE,FALSE,30000,0,-9,843,unfair 19,2.062,9,0,TRUE,TRUE,TRUE,750000,0,-7,1281,unfair 15,2.091,6,0,TRUE,TRUE,FALSE,300,0.9,-4,1002,unfair 13,4.554,12,0,FALSE,TRUE,TRUE,3000000,0,1,1492,unfair 8,4.522,98,0,TRUE,TRUE,TRUE,30000000,0,-4,1253,unfair 13,4.054,12,0,TRUE,TRUE,FALSE,750000,0,-3,1210,unfair 7,3.551,10,0,TRUE,TRUE,FALSE,750000,0,-9,855,unfair 9,4.256,9,0,TRUE,TRUE,TRUE,7500000,0,-4,955,unfair 2,4.117,2,0,TRUE,TRUE,TRUE,3000000,0,1,219,unfair 41,4.314,8,0,TRUE,TRUE,FALSE,30000,0,13,2388,unfair 17,3.987,17,0,TRUE,TRUE,TRUE,3000000,0,-10,1997,unfair 2,4.111,2,0,TRUE,TRUE,FALSE,300000,0,1,212,unfair 8,2.949,12,0,TRUE,TRUE,FALSE,300000,0,-6,969,unfair 7,3.586,14,0,TRUE,FALSE,FALSE,300000,0,-1,1054,unfair 10,4.047,1,0,TRUE,TRUE,TRUE,30000,0,1,775,unfair 10,3.586,7,0,TRUE,FALSE,FALSE,300000,0,-9,1414,unfair 7,4.175,3,0,TRUE,FALSE,TRUE,3000000,0,-3,516,unfair 11,4.424,10,0,TRUE,TRUE,TRUE,30000000,0,4,934,unfair 12,4.535,18,0,TRUE,TRUE,TRUE,300000000,0,-9,979,unfair 3,4.554,7,0,TRUE,TRUE,TRUE,300000000,0,1,559,unfair \ No newline at end of file