Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(22)  |  Call(16)  |  Derive(0)  |  Import(6)

src/p/y/pylearn2-HEAD/pylearn2/optimization/batch_gradient_descent.py   pylearn2(Download)
import theano.tensor as T
 
from pylearn2.utils import function
from pylearn2.utils import grad
from pylearn2.utils import safe_zip
            self._compute_grad = Accumulator(inputs, updates = updates)
        else:
            self._compute_grad = function(inputs, updates = updates,
                    mode=self.theano_function_mode,
                    name='BatchGradientDescent._compute_grad')
            self.obj = Accumulator(inputs, obj)
        else:
            self.obj = function(inputs, obj, mode=self.theano_function_mode,
                    name='BatchGradientDescent.obj')
 
            diff = cached - mul
            goto_updates[param] = diff
        self._cache_values = function([], updates = cache_updates, mode=self.theano_function_mode, name='BatchGradientDescent._cache_values')
        assert isinstance(param_constrainers, (list, tuple))
        for param_constrainer in param_constrainers:
            param_constrainer(goto_updates)
        self._goto_alpha = function([alpha], updates=goto_updates,

src/p/y/pylearn2-HEAD/pylearn2/monitor.py   pylearn2(Download)
from pylearn2.datasets.dataset import Dataset
from pylearn2.space import Space, CompositeSpace, NullSpace
from pylearn2.utils import function, sharedX, safe_zip, safe_izip
from pylearn2.utils.iteration import is_stochastic
from pylearn2.utils.data_specs import DataSpecsMapping
        with log_timing(log, "compiling begin_record_entry"):
            self.begin_record_entry = function(
                inputs=[],
                updates=updates,
                mode=self.theano_function_mode,
                # by the training algorithm, so we need to ignore the unused
                # input error
                self.accum.append(function(theano_args,
                                           givens=g,
                                           updates=u,

src/p/y/pylearn2-HEAD/pylearn2/costs/dbm.py   pylearn2(Download)
                        size=elem.shape, dtype=elem.dtype, n=1) / include_prob
 
        rval.on_load_batch = [utils.function(inputs, updates=updates)]
 
        return rval

src/p/y/pylearn2-HEAD/pylearn2/models/model.py   pylearn2(Download)
from pylearn2.model_extensions.model_extension import ModelExtension
from pylearn2.space import NullSpace
from pylearn2.utils import function
from pylearn2.utils import safe_zip
from pylearn2.utils.track_version import MetaLibVersion
        updates = OrderedDict(izip_no_length_check(params, params))
        self.modify_updates(updates)
        f = function([], updates=updates)
        f()
 

src/p/y/pylearn2-HEAD/pylearn2/models/mlp.py   pylearn2(Download)
from pylearn2.space import Space
from pylearn2.space import VectorSpace
from pylearn2.utils import function
from pylearn2.utils import is_iterable
from pylearn2.utils import py_float_types

src/p/y/pylearn2-HEAD/pylearn2/training_algorithms/tests/test_bgd.py   pylearn2(Download)
from pylearn2.utils.data_specs import DataSpecsMapping
from theano import shared
from pylearn2.utils import function
from pylearn2.costs.cost import FixedVarDescr
from pylearn2.costs.cost import SumOfCosts
            mapping = DataSpecsMapping(data_specs)
            data_tuple = mapping.flatten(data, return_tuple=True)
            theano_func = function([],
                    updates=[(unsup_counter, unsup_counter + 1)])
            def on_load(batch, mapping=mapping, theano_func=theano_func):
            rval.fixed_vars = {'sup_aux_var': sup_counter}
 
            theano_func = function([], updates=[(sup_counter,
                sup_counter + 1)])
            def on_load(data):

src/p/y/pylearn2-HEAD/pylearn2/space/tests/test_space.py   pylearn2(Download)
                            NullSpace,
                            is_symbolic_batch)
from pylearn2.utils import function, safe_zip
 
 
    C = conv.format_as(B, vec)
 
    f = function([X, A], [Z, C])
 
    X = rng.randn(*(conv.get_origin_batch(batch_size).shape)).astype(X.dtype)