Skip to content

Commit

Permalink
update examples and little fix on docs
Browse files Browse the repository at this point in the history
  • Loading branch information
reiinakano committed Feb 18, 2017
1 parent a0254de commit dc31ac0
Show file tree
Hide file tree
Showing 8 changed files with 42 additions and 34 deletions.
2 changes: 1 addition & 1 deletion docs/Quickstart.rst
Original file line number Diff line number Diff line change
Expand Up @@ -108,4 +108,4 @@ And again, that's it! You'll notice that in this plot, all we needed to do was p
More Plots
----------

Want to know the other plots you can generate using Scikit-plot? Visit the :ref:`factoryapidocs`.
Want to know the other plots you can generate using Scikit-plot? Visit the :ref:`factoryapidocs` or the :ref:`functionsapidocs`.
4 changes: 2 additions & 2 deletions examples/plot_confusion_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@
plt.show()

# Using the more flexible functions API
from scikitplot import plotters
from scikitplot import plotters as skplt
rf = RandomForestClassifier()
rf = rf.fit(X, y)
preds = rf.predict(X)
plotters.plot_confusion_matrix(y_true=y, y_pred=preds)
skplt.plot_confusion_matrix(y_true=y, y_pred=preds)
plt.show()
6 changes: 3 additions & 3 deletions examples/plot_feature_importances.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
plt.show()

# Using the more flexible functions API
from scikitplot import plotters
from scikitplot import plotters as skplt
rf = RandomForestClassifier()
rf = rf.fit(X, y)
plotters.plot_feature_importances(rf, feature_names=['petal length', 'petal width',
'sepal length', 'sepal width'])
skplt.plot_feature_importances(rf, feature_names=['petal length', 'petal width',
'sepal length', 'sepal width'])
plt.show()
4 changes: 2 additions & 2 deletions examples/plot_ks_statistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
plt.show()

# Using the more flexible functions API
from scikitplot import plotters
from scikitplot import plotters as skplt
lr = LogisticRegression()
lr = lr.fit(X, y)
probas = lr.predict_proba(X)
plotters.plot_ks_statistic(y_true=y, y_probas=probas)
skplt.plot_ks_statistic(y_true=y, y_probas=probas)
plt.show()
4 changes: 2 additions & 2 deletions examples/plot_learning_curve.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
plt.show()

# Using the more flexible functions API
from scikitplot import plotters
from scikitplot import plotters as skplt
rf = RandomForestClassifier()
plotters.plot_learning_curve(rf, X, y)
skplt.plot_learning_curve(rf, X, y)
plt.show()
4 changes: 2 additions & 2 deletions examples/plot_precision_recall_curve.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
plt.show()

# Using the more flexible functions API
from scikitplot import plotters
from scikitplot import plotters as skplt
nb = GaussianNB()
nb = nb.fit(X, y)
probas = nb.predict_proba(X)
plotters.plot_precision_recall_curve(y_true=y, y_probas=probas)
skplt.plot_precision_recall_curve(y_true=y, y_probas=probas)
plt.show()
4 changes: 2 additions & 2 deletions examples/plot_roc_curve.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
plt.show()

# Using the more flexible functions API
from scikitplot import plotters
from scikitplot import plotters as skplt
nb = GaussianNB()
nb = nb.fit(X, y)
probas = nb.predict_proba(X)
plotters.plot_roc_curve(y_true=y, y_probas=probas)
skplt.plot_roc_curve(y_true=y, y_probas=probas)
plt.show()
48 changes: 28 additions & 20 deletions scikitplot/plotters.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,11 @@ def plot_confusion_matrix(y_true, y_pred, title=None, normalize=False, ax=None):
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> import scikitplot.plotters as skplt
>>> rf = RandomForestClassifier()
>>> rf = rf.fit(X_train, y_train)
>>> y_pred = rf.predict(X_test)
>>> plot_confusion_matrix(y_test, y_pred, normalize=True)
>>> skplt.plot_confusion_matrix(y_test, y_pred, normalize=True)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
Expand Down Expand Up @@ -112,10 +113,11 @@ def plot_roc_curve(y_true, y_probas, title='ROC Curves', ax=None):
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> import scikitplot.plotters as skplt
>>> nb = GaussianNB()
>>> nb = nb.fit(X_train, y_train)
>>> y_probas = nb.predict_proba(X_test)
>>> plot_roc_curve(y_test, y_probas)
>>> skplt.plot_roc_curve(y_test, y_probas)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
Expand Down Expand Up @@ -215,10 +217,11 @@ def plot_ks_statistic(y_true, y_probas, title='KS Statistic Plot', ax=None):
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> import scikitplot.plotters as skplt
>>> lr = LogisticRegression()
>>> lr = lr.fit(X_train, y_train)
>>> y_probas = lr.predict_proba(X_test)
>>> plot_ks_statistic(y_test, y_probas)
>>> skplt.plot_ks_statistic(y_test, y_probas)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
Expand Down Expand Up @@ -275,10 +278,11 @@ def plot_precision_recall_curve(y_true, y_probas, title='Precision-Recall Curve'
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> import scikitplot.plotters as skplt
>>> nb = GaussianNB()
>>> nb = nb.fit(X_train, y_train)
>>> y_probas = nb.predict_proba(X_test)
>>> plot_precision_recall_curve(y_test, y_probas)
>>> skplt.plot_precision_recall_curve(y_test, y_probas)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
Expand Down Expand Up @@ -360,12 +364,13 @@ def plot_feature_importances(clf, title='Feature Importance', feature_names=None
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> rf = RandomForestClassifier()
>>> rf.fit(X, y)
>>> plot_feature_importances(rf, feature_names=['petal length', 'petal width',
... 'sepal length', 'sepal width'])
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
>>> import scikitplot.plotters as skplt
>>> rf = RandomForestClassifier()
>>> rf.fit(X, y)
>>> skplt.plot_feature_importances(rf, feature_names=['petal length', 'petal width',
... 'sepal length', 'sepal width'])
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
.. image:: _static/examples/plot_feature_importances.png
:align: center
Expand Down Expand Up @@ -465,8 +470,9 @@ def plot_learning_curve(clf, X, y, title='Learning Curve', cv=None, train_sizes=
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> rf = classifier_factory(RandomForestClassifier())
>>> rf.plot_learning_curve(X, y)
>>> import scikitplot.plotters as skplt
>>> rf = RandomForestClassifier()
>>> skplt.plot_learning_curve(rf, X, y)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
Expand Down Expand Up @@ -530,10 +536,11 @@ def plot_silhouette(clf, X, title='Silhouette Analysis', metric='euclidean', cop
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> kmeans = KMeans(n_clusters=4, random_state=1)
>>> plot_silhouette(kmeans, X)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
>>> import scikitplot.plotters as skplt
>>> kmeans = KMeans(n_clusters=4, random_state=1)
>>> skplt.plot_silhouette(kmeans, X)
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
.. image:: _static/examples/plot_silhouette.png
:align: center
Expand Down Expand Up @@ -618,10 +625,11 @@ def plot_elbow_curve(clf, X, title='Elbow Plot', cluster_ranges=None, ax=None):
ax (:class:`matplotlib.axes.Axes`): The axes on which the plot was drawn.
Example:
>>> kmeans = KMeans(random_state=1)
>>> plot_elbow_curve(kmeans, cluster_ranges=range(1, 11))
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
>>> import scikitplot.plotters as skplt
>>> kmeans = KMeans(random_state=1)
>>> skplt.plot_elbow_curve(kmeans, cluster_ranges=range(1, 11))
<matplotlib.axes._subplots.AxesSubplot object at 0x7fe967d64490>
>>> plt.show()
.. image:: _static/examples/plot_elbow_curve.png
:align: center
Expand Down

0 comments on commit dc31ac0

Please sign in to comment.