Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(5)  |  Call(4)  |  Derive(0)  |  Import(1)

src/p/y/pylearn2-HEAD/pylearn2/utils/utlc.py   pylearn2(Download)
import numpy
import theano
from pylearn2.datasets.utlc import load_ndarray_dataset, load_sparse_dataset
from pylearn2.utils import subdict, sharedX
 
    # Load as the usual ndarray
    expected = inspect.getargspec(load_ndarray_dataset)[0][1:]
    data = load_ndarray_dataset(conf['dataset'], **subdict(conf, expected))
 
    # Special case for on-the-fly normalization

src/p/y/pylearn2-HEAD/pylearn2/datasets/avicenna.py   pylearn2(Download)
    def __init__(self, which_set, standardize):
        train, valid, test = utlc.load_ndarray_dataset('avicenna')
 
        if which_set == 'train':
            self.X = train

src/p/y/pylearn2-HEAD/pylearn2/datasets/tests/test_utlc.py   pylearn2(Download)
def test_ule():
    skip_if_no_data()
    # Test loading of transfer data
    train, valid, test, transfer = utlc.load_ndarray_dataset("ule", normalize=True, transfer=True)
    assert train.shape[0]==transfer.shape[0]
def test_all_utlc():
    skip_if_no_data()
    for name in ['avicenna','harry','ule']:   # not testing rita, because it requires a lot of memorz and is slow
        print "Loading ", name
        train, valid, test = utlc.load_ndarray_dataset(name, normalize=True)