Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(10)  |  Call(6)  |  Derive(1)  |  Import(3)

src/p/y/pylearn2-HEAD/pylearn2/models/softmax_regression.py   pylearn2(Download)
 
        super(SoftmaxRegression, self).__init__(
                layers=[mlp.Softmax(n_classes=n_classes, layer_name='y',
                    irange=irange, istdev=istdev, sparse_init=sparse_init,
                    W_lr_scale=W_lr_scale, b_lr_scale=b_lr_scale,

src/k/a/kaggle-cifar10-HEAD/kaggle_train.py   kaggle-cifar10(Download)
                   max_col_norm=1.9)
 
output = mlp.Softmax(layer_name='y',
                     n_classes=10,
                     irange=.005,

src/p/y/pylearn2-HEAD/pylearn2/costs/tests/test_lp_penalty_cost.py   pylearn2(Download)
import theano
from pylearn2.models.mlp import Linear
from pylearn2.models.mlp import Softmax
from pylearn2.models.mlp import MLP
from pylearn2.costs.cost import LpPenalty
def test_correctness():
    model = MLP(
        layers=[Linear(dim=10, layer_name='linear', irange=1.0),
                Softmax(n_classes=2, layer_name='softmax', irange=1.0)],
        batch_size=10,

src/p/y/pylearn2-HEAD/pylearn2/termination_criteria/tests/test_init.py   pylearn2(Download)
 
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.models.mlp import MLP, Softmax
from pylearn2.monitor import push_monitor
from pylearn2.train import Train
    def produce_train_obj(new_epochs, model=None):
        if model is None:
            model = MLP(layers=[Softmax(layer_name='y',
                                        n_classes=2,
                                        irange=0.)],

src/p/y/pylearn2-HEAD/pylearn2/models/tests/test_mlp.py   pylearn2(Download)
import numpy as np
import theano
from theano import tensor
from pylearn2.models.mlp import (MLP, Linear, Softmax, Sigmoid,
                                 exhaustive_dropout_average,
    mlp = MLP(nvis=2, layers=[Linear(2, 'h0', irange=0.8),
                              Linear(2, 'h1', irange=0.8),
                              Softmax(3, 'out', irange=0.8)])
    out = sampled_dropout_average(mlp, inp, 5)
    f = theano.function([inp], out, allow_input_downcast=True)
    mlp = MLP(nvis=2, layers=[Linear(2, 'h0', irange=0.8),
                              Linear(2, 'h1', irange=0.8),
                              Softmax(3, 'out', irange=0.8)])
    out = exhaustive_dropout_average(mlp, inp)
    f = theano.function([inp], out, allow_input_downcast=True)

src/p/y/pylearn2-HEAD/pylearn2/sandbox/nlp/models/mlp.py   pylearn2(Download)
class Softmax(mlp.Softmax):
    """
    An extension of the MLP's softmax layer which monitors
    the perplexity