Did I find the right examples for you? yes no

All Samples(56)  |  Call(38)  |  Derive(0)  |  Import(18)

src/s/c/scikit-learn-0.14.1/sklearn/datasets/tests/test_samples_generator.py   scikit-learn(Download)
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
 
from sklearn.datasets import make_classification
    from numpy.linalg import svd
    u, s, v = svd(X)
    assert_less(sum(s) - 5, 0.1, "X rank is not approximately 5")
 
 

src/s/c/scikit-learn-0.14.1/sklearn/tree/tests/test_tree.py   scikit-learn(Download)
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
 
from sklearn.tree import DecisionTreeClassifier
        reg.fit(boston.data, boston.target)
        score = mean_squared_error(boston.target, reg.predict(boston.data))
        assert_less(score, 1,
                    "Failed with {0}, criterion = {1} and score = {2}"
                    "".format(name, criterion, score))
        reg.fit(boston.data, boston.target)
        score = mean_squared_error(boston.target, reg.predict(boston.data))
        assert_less(score, 2,
                    "Failed with {0}, criterion = {1} and score = {2}"
                    "".format(name, criterion, score))
 
        X_new = clf.transform(X, threshold="mean")
        assert_less(0, X_new.shape[1], "Failed with {0}".format(name))
        assert_less(X_new.shape[1], X.shape[1], "Failed with {0}".format(name))
 

src/s/c/scikit-learn-0.14.1/sklearn/cluster/tests/test_k_means.py   scikit-learn(Download)
 
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.fixes import unique
from sklearn.metrics.cluster import v_measure_score
        X_mb, x_mb_squared_norms, new_centers)
    assert_greater(new_inertia, 0.0)
    assert_less(new_inertia, old_inertia)
 
    # check that the incremental difference computation is matching the
        X_mb_csr, x_mb_squared_norms_csr, new_centers_csr)
    assert_greater(new_inertia_csr, 0.0)
    assert_less(new_inertia_csr, old_inertia_csr)
 
    # check that the incremental difference computation is matching the

src/s/c/scikit-learn-0.14.1/sklearn/tests/test_random_projection.py   scikit-learn(Download)
    GaussianRandomProjection)
 
from sklearn.utils.testing import (
    assert_less,
    assert_raises,
        # contract for eps: pairwise distances are preserved according to the
        # Johnson-Lindenstrauss lemma
        assert_less(distances_ratio.max(), 1 + eps)
        assert_less(1 - eps, distances_ratio.min())
 
            assert_equal(projected.shape, (n_samples, 100))
            assert_equal(rp.components_.shape, (100, n_features))
            assert_less(rp.components_.nnz, 115)  # close to 1% density
            assert_less(85, rp.components_.nnz)  # close to 1% density
 

src/s/c/scikit-learn-0.14.1/sklearn/tests/test_cross_validation.py   scikit-learn(Download)
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
                                                        scoring="accuracy")
 
    assert_less(score, 0.5)
    assert_greater(pvalue, 0.4)
 
    # test with deprecated interface
    with warnings.catch_warnings(record=True):
        score, scores, pvalue = cval.permutation_test_score(
            svm, X, y, score_func=accuracy_score, cv=cv)
    assert_less(score, 0.5)

src/s/c/scikit-learn-0.14.1/sklearn/decomposition/tests/test_dict_learning.py   scikit-learn(Download)
from sklearn.utils.testing import SkipTest
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
 
    code = sparse_encode(X, V, alpha=0.001)
    assert_true(not np.all(code == 0))
    assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
 
 
                       transform_alpha=0.001).transform(X)
    assert_true(not np.all(code == 0))
    assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
 

src/s/c/scikit-learn-0.14.1/sklearn/linear_model/tests/test_sgd.py   scikit-learn(Download)
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import raises
from sklearn.utils.testing import assert_raises
        clf.fit(X_imbalanced, y_imbalanced)
        y_pred = clf.predict(X)
        assert_less(metrics.f1_score(y, y_pred), 0.96)
 
        # fit a model with auto class_weight enabled

src/s/c/scikit-learn-0.14.1/sklearn/linear_model/tests/test_least_angle.py   scikit-learn(Download)
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raises
    assert_true(not np.isnan(coef_path_).any())
    residual = np.dot(X, coef_path_[:, -1]) - y
    assert_less((residual ** 2).sum(), 1.)  # just make sure it's bounded
 
    n_samples = 10
        lasso_cd.fit(X, y)
        error = np.linalg.norm(c - lasso_cd.coef_)
        assert_less(error, 0.01)
 
    # similar test, with the classifiers
                                  normalize=False).fit(X, y)
        err = np.linalg.norm(clf1.coef_ - clf2.coef_)
        assert_less(err, 1e-3)
 
    # same test, with normalized data
        lasso_cd.fit(X, y)
        error = np.linalg.norm(c - lasso_cd.coef_)
        assert_less(error, 0.01)
 
 

src/s/c/scikit-learn-0.14.1/sklearn/linear_model/tests/test_sparse_coordinate_descent.py   scikit-learn(Download)
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_greater
 
 
    # check that the coefs are sparse
    assert_less(np.sum(s_clf.coef_ != 0.0), 2 * n_informative)
 
 

src/s/c/scikit-learn-0.14.1/sklearn/linear_model/tests/test_passive_aggressive.py   scikit-learn(Download)
import numpy as np
import scipy.sparse as sp
 
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_greater
            reg.fit(data, y_bin)
            pred = reg.predict(data)
            assert_less(np.mean((pred - y_bin) ** 2), 1.7)
 
 
                reg.partial_fit(data, y_bin)
            pred = reg.predict(data)
            assert_less(np.mean((pred - y_bin) ** 2), 1.7)
 
 

  1 | 2  Next