Did I find the right examples for you? yes no

All Samples(93)  |  Call(49)  |  Derive(0)  |  Import(44)

src/d/s/DS_GA_NYC-HEAD/DS_NYC3_sample_code/lesson_10_scikitRF.py   DS_GA_NYC(Download)
from sklearn.cross_validation import cross_val_score
from sklearn.datasets import make_blobs
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.tree import DecisionTreeClassifier
 
X, y = make_blobs(n_samples=10000, n_features=10, centers=100,
     random_state=0)
 
clf = DecisionTreeClassifier(max_depth=None, min_samples_split=1,

src/s/c/scikit-learn-0.14.1/examples/ensemble/plot_adaboost_hastie_10_2.py   scikit-learn(Download)
import numpy as np
from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import zero_one_loss
from sklearn.ensemble import AdaBoostClassifier
X_train, y_train = X[:2000], y[:2000]
 
dt_stump = DecisionTreeClassifier(max_depth=1, min_samples_leaf=1)
dt_stump.fit(X_train, y_train)
dt_stump_err = 1.0 - dt_stump.score(X_test, y_test)
 
dt = DecisionTreeClassifier(max_depth=9, min_samples_leaf=1)

src/s/c/scikit-learn-0.14.1/examples/ensemble/plot_forest_iris.py   scikit-learn(Download)
                              AdaBoostClassifier)
from sklearn.externals.six.moves import xrange
from sklearn.tree import DecisionTreeClassifier
 
# Parameters
plot_idx = 1
 
models = [DecisionTreeClassifier(max_depth=None),
          RandomForestClassifier(n_estimators=n_estimators),
          ExtraTreesClassifier(n_estimators=n_estimators),
          AdaBoostClassifier(DecisionTreeClassifier(max_depth=3),

src/s/c/scikit-learn-0.14.1/examples/ensemble/plot_adaboost_multiclass.py   scikit-learn(Download)
from sklearn.externals.six.moves import xrange
from sklearn.metrics import accuracy_score
from sklearn.tree import DecisionTreeClassifier
 
 
 
bdt_real = AdaBoostClassifier(
    DecisionTreeClassifier(max_depth=2),
    n_estimators=600,
    learning_rate=1)
 
bdt_discrete = AdaBoostClassifier(
    DecisionTreeClassifier(max_depth=2),

src/s/c/scikit-learn-0.14.1/examples/ensemble/plot_adaboost_twoclass.py   scikit-learn(Download)
 
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.datasets import make_gaussian_quantiles
 
 
# Create and fit an AdaBoosted decision tree
bdt = AdaBoostClassifier(DecisionTreeClassifier(max_depth=1),
                         algorithm="SAMME",
                         n_estimators=200)

src/s/c/scikit-learn-0.14.1/examples/plot_classifier_comparison.py   scikit-learn(Download)
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.naive_bayes import GaussianNB
    SVC(kernel="linear", C=0.025),
    SVC(gamma=2, C=1),
    DecisionTreeClassifier(max_depth=5),
    RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1),
    AdaBoostClassifier(),

src/a/s/astroML-0.2/book_figures/chapter9/fig_rrlyrae_treevis.py   astroML(Download)
from matplotlib import pyplot as plt
 
from sklearn.tree import DecisionTreeClassifier
from astroML.datasets import fetch_rrlyrae_combined
from astroML.utils import split_samples
N_plot = 5000 + N_rr
 
clf = DecisionTreeClassifier(compute_importances=True,
                             random_state=0, criterion='entropy')
clf.fit(X_train, y_train)
#--------------------------------------------------
# compute statistics for a larger training set
clf = DecisionTreeClassifier(compute_importances=True,
                             random_state=0, criterion='entropy')
clf.fit(X_train, y_train)

src/a/s/astroML-HEAD/book_figures/chapter9/fig_rrlyrae_treevis.py   astroML(Download)
from matplotlib import pyplot as plt
 
from sklearn.tree import DecisionTreeClassifier
from astroML.datasets import fetch_rrlyrae_combined
from astroML.utils import split_samples
N_plot = 5000 + N_rr
 
clf = DecisionTreeClassifier(compute_importances=True,
                             random_state=0, criterion='entropy')
clf.fit(X_train, y_train)
#--------------------------------------------------
# compute statistics for a larger training set
clf = DecisionTreeClassifier(compute_importances=True,
                             random_state=0, criterion='entropy')
clf.fit(X_train, y_train)

src/a/s/astroML-0.2/book_figures/chapter9/fig_rrlyrae_decisiontree.py   astroML(Download)
from matplotlib import pyplot as plt
 
from sklearn.tree import DecisionTreeClassifier
from astroML.datasets import fetch_rrlyrae_combined
from astroML.utils import split_samples
    predictions.append([])
    for nc in Ncolors:
        clf = DecisionTreeClassifier(random_state=0, max_depth=depth,
                                     criterion='entropy')
        clf.fit(X_train[:, :nc], y_train)

src/a/s/astroML-HEAD/book_figures/chapter9/fig_rrlyrae_decisiontree.py   astroML(Download)
from matplotlib import pyplot as plt
 
from sklearn.tree import DecisionTreeClassifier
from astroML.datasets import fetch_rrlyrae_combined
from astroML.utils import split_samples
    predictions.append([])
    for nc in Ncolors:
        clf = DecisionTreeClassifier(random_state=0, max_depth=depth,
                                     criterion='entropy')
        clf.fit(X_train[:, :nc], y_train)

  1 | 2 | 3 | 4 | 5  Next