-
Notifications
You must be signed in to change notification settings - Fork 170
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add DecisionTreeClassifier C examples
- Loading branch information
Darius Morawiec
committed
Oct 30, 2017
1 parent
f669aab
commit 5c9da8a
Showing
4 changed files
with
572 additions
and
0 deletions.
There are no files selected for viewing
181 changes: 181 additions & 0 deletions
181
examples/estimator/classifier/DecisionTreeClassifier/c/basics.ipynb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,181 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "markdown", | ||
"metadata": {}, | ||
"source": [ | ||
"# sklearn-porter\n", | ||
"\n", | ||
"Repository: https://github.com/nok/sklearn-porter\n", | ||
"\n", | ||
"## DecisionTreeClassifier\n", | ||
"\n", | ||
"Documentation: [sklearn.tree.DecisionTreeClassifier](http://scikit-learn.org/stable/modules/generated/sklearn.tree.DecisionTreeClassifier.html)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "markdown", | ||
"metadata": {}, | ||
"source": [ | ||
"### Loading data:" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 1, | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
"((150, 4), (150,))\n" | ||
] | ||
} | ||
], | ||
"source": [ | ||
"from sklearn.datasets import load_iris\n", | ||
"\n", | ||
"iris_data = load_iris()\n", | ||
"X = iris_data.data\n", | ||
"y = iris_data.target\n", | ||
"\n", | ||
"print(X.shape, y.shape)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "markdown", | ||
"metadata": {}, | ||
"source": [ | ||
"### Train classifier:" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 2, | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"data": { | ||
"text/plain": [ | ||
"DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n", | ||
" max_features=None, max_leaf_nodes=None,\n", | ||
" min_impurity_decrease=0.0, min_impurity_split=None,\n", | ||
" min_samples_leaf=1, min_samples_split=2,\n", | ||
" min_weight_fraction_leaf=0.0, presort=False, random_state=None,\n", | ||
" splitter='best')" | ||
] | ||
}, | ||
"execution_count": 2, | ||
"metadata": {}, | ||
"output_type": "execute_result" | ||
} | ||
], | ||
"source": [ | ||
"from sklearn.tree import tree\n", | ||
"\n", | ||
"clf = tree.DecisionTreeClassifier()\n", | ||
"clf.fit(X, y)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "markdown", | ||
"metadata": {}, | ||
"source": [ | ||
"\n", | ||
"### Transpile classifier:" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 4, | ||
"metadata": { | ||
"scrolled": false | ||
}, | ||
"outputs": [ | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
"#include <stdlib.h>\n", | ||
"#include <stdio.h>\n", | ||
"#include <math.h>\n", | ||
"\n", | ||
"#define N_FEATURES 4\n", | ||
"#define N_CLASSES 3\n", | ||
"\n", | ||
"int lChilds[17] = {1, -1, 3, 4, 5, -1, -1, 8, -1, 10, -1, -1, 13, 14, -1, -1, -1};\n", | ||
"int rChilds[17] = {2, -1, 12, 7, 6, -1, -1, 9, -1, 11, -1, -1, 16, 15, -1, -1, -1};\n", | ||
"double thresholds[17] = {2.45000004768, -2.0, 1.75, 4.94999980927, 1.65000009537, -2.0, -2.0, 1.54999995232, -2.0, 5.44999980927, -2.0, -2.0, 4.85000038147, 5.94999980927, -2.0, -2.0, -2.0};\n", | ||
"int indices[17] = {2, 2, 3, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 0, 2, 2, 2};\n", | ||
"int classes[17][3] = {{50, 50, 50}, {50, 0, 0}, {0, 50, 50}, {0, 49, 5}, {0, 47, 1}, {0, 47, 0}, {0, 0, 1}, {0, 2, 4}, {0, 0, 3}, {0, 2, 1}, {0, 2, 0}, {0, 0, 1}, {0, 1, 45}, {0, 1, 2}, {0, 1, 0}, {0, 0, 2}, {0, 0, 43}};\n", | ||
"\n", | ||
"int findMax(int nums[N_CLASSES]) {\n", | ||
" int index = 0;\n", | ||
" for (int i = 0; i < N_CLASSES; i++) {\n", | ||
" index = nums[i] > nums[index] ? i : index;\n", | ||
" }\n", | ||
" return index;\n", | ||
"}\n", | ||
"\n", | ||
"int predict(double features[N_FEATURES], int node) {\n", | ||
" if (thresholds[node] != -2) {\n", | ||
" if (features[indices[node]] <= thresholds[node]) {\n", | ||
" return predict(features, lChilds[node]);\n", | ||
" } else {\n", | ||
" return predict(features, rChilds[node]);\n", | ||
" }\n", | ||
" }\n", | ||
" return findMax(classes[node]);\n", | ||
"}\n", | ||
"\n", | ||
"int main(int argc, const char * argv[]) {\n", | ||
"\n", | ||
" /* Features: */\n", | ||
" double features[argc-1];\n", | ||
" int i;\n", | ||
" for (i = 1; i < argc; i++) {\n", | ||
" features[i-1] = atof(argv[i]);\n", | ||
" }\n", | ||
"\n", | ||
" /* Prediction: */\n", | ||
" printf(\"%d\", predict(features, 0));\n", | ||
" return 0;\n", | ||
"\n", | ||
"}\n", | ||
"\n" | ||
] | ||
} | ||
], | ||
"source": [ | ||
"from sklearn_porter import Porter\n", | ||
"\n", | ||
"porter = Porter(clf, language='c')\n", | ||
"output = porter.export()\n", | ||
"\n", | ||
"print(output)" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"kernelspec": { | ||
"display_name": "Python 2", | ||
"language": "python", | ||
"name": "python2" | ||
}, | ||
"language_info": { | ||
"codemirror_mode": { | ||
"name": "ipython", | ||
"version": 2 | ||
}, | ||
"file_extension": ".py", | ||
"mimetype": "text/x-python", | ||
"name": "python", | ||
"nbconvert_exporter": "python", | ||
"pygments_lexer": "ipython2", | ||
"version": "2.7.13" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 2 | ||
} |
66 changes: 66 additions & 0 deletions
66
examples/estimator/classifier/DecisionTreeClassifier/c/basics.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
# -*- coding: utf-8 -*- | ||
|
||
from sklearn.tree import tree | ||
from sklearn.datasets import load_iris | ||
from sklearn_porter import Porter | ||
|
||
|
||
iris_data = load_iris() | ||
X = iris_data.data | ||
y = iris_data.target | ||
|
||
clf = tree.DecisionTreeClassifier() | ||
clf.fit(X, y) | ||
|
||
porter = Porter(clf, language='c') | ||
output = porter.export() | ||
print(output) | ||
|
||
""" | ||
#include <stdlib.h> | ||
#include <stdio.h> | ||
#include <math.h> | ||
#define N_FEATURES 4 | ||
#define N_CLASSES 3 | ||
int lChilds[17] = {1, -1, 3, 4, 5, -1, -1, 8, -1, 10, -1, -1, 13, 14, -1, -1, -1}; | ||
int rChilds[17] = {2, -1, 12, 7, 6, -1, -1, 9, -1, 11, -1, -1, 16, 15, -1, -1, -1}; | ||
double thresholds[17] = {2.45000004768, -2.0, 1.75, 4.94999980927, 1.65000009537, -2.0, -2.0, 1.54999995232, -2.0, 6.94999980927, -2.0, -2.0, 4.85000038147, 3.09999990463, -2.0, -2.0, -2.0}; | ||
int indices[17] = {2, 2, 3, 2, 3, 2, 2, 3, 2, 0, 2, 2, 2, 1, 2, 2, 2}; | ||
int classes[17][3] = {{50, 50, 50}, {50, 0, 0}, {0, 50, 50}, {0, 49, 5}, {0, 47, 1}, {0, 47, 0}, {0, 0, 1}, {0, 2, 4}, {0, 0, 3}, {0, 2, 1}, {0, 2, 0}, {0, 0, 1}, {0, 1, 45}, {0, 1, 2}, {0, 0, 2}, {0, 1, 0}, {0, 0, 43}}; | ||
int findMax(int nums[N_CLASSES]) { | ||
int index = 0; | ||
for (int i = 0; i < N_CLASSES; i++) { | ||
index = nums[i] > nums[index] ? i : index; | ||
} | ||
return index; | ||
} | ||
int predict(double features[N_FEATURES], int node) { | ||
if (thresholds[node] != -2) { | ||
if (features[indices[node]] <= thresholds[node]) { | ||
return predict(features, lChilds[node]); | ||
} else { | ||
return predict(features, rChilds[node]); | ||
} | ||
} | ||
return findMax(classes[node]); | ||
} | ||
int main(int argc, const char * argv[]) { | ||
/* Features: */ | ||
double features[argc-1]; | ||
int i; | ||
for (i = 1; i < argc; i++) { | ||
features[i-1] = atof(argv[i]); | ||
} | ||
/* Prediction: */ | ||
printf("%d", predict(features, 0)); | ||
return 0; | ||
} | ||
""" |
Oops, something went wrong.