Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(80)  |  Call(49)  |  Derive(0)  |  Import(31)

src/p/y/pylearn2-HEAD/pylearn2/sandbox/lisa_rl/bandit/gaussian_bandit.py   pylearn2(Download)
from pylearn2.sandbox.lisa_rl.bandit.environment import Environment
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_np_rng, make_theano_rng
 
 
    def __init__(self, num_arms, mean_std = 1.0, std_std = 1.0):
        self.rng = make_np_rng(None, [2013, 11, 12], which_method="randn")
        self.means = sharedX(self.rng.randn(num_arms) * mean_std)
        self.stds = sharedX(np.abs(self.rng.randn(num_arms) * std_std))
        self.theano_rng = make_theano_rng(None, self.rng.randint(2 ** 16), which_method="normal")

src/p/y/pylearn2-HEAD/pylearn2/datasets/utlc.py   pylearn2(Download)
import pylearn2.datasets.filetensor as ft
from pylearn2.utils.string_utils import preprocess
from pylearn2.utils.rng import make_np_rng
 
 
    test = load_filetensor(tename)
    if randomize_valid:
        rng = make_np_rng(None, [1,2,3,4], which_method='permutation')
        perm = rng.permutation(valid.shape[0])
        valid = valid[perm]
    if randomize_test:
        rng = make_np_rng(None, [1,2,3,4], which_method='permutation')
    # this type of indexing.
    if randomize_valid:
        rng = make_np_rng(None, [1,2,3,4], which_method='permutation')
        perm = rng.permutation(valid.shape[0])
        valid = valid[perm]
    if randomize_test:
        rng = make_np_rng(None, [1,2,3,4], which_method='permutation')

src/p/y/pylearn2-HEAD/pylearn2/models/rbm.py   pylearn2(Download)
from pylearn2.space import VectorSpace
from pylearn2.utils import safe_union
from pylearn2.utils.rng import make_np_rng, make_theano_rng
theano.config.warn.sum_div_dimshuffle_bug = False
 
    def __init__(self, rbm, particles, rng):
        self.__dict__.update(rbm=rbm)
 
        rng = make_np_rng(rng, which_method="randn")
        seed = int(rng.randint(2 ** 30))
            init_bias_vis = 0.0
 
        rng = make_np_rng(rng, 1001, which_method="uniform")
        self.rng = rng
 
            rng=None):
 
        rng = make_np_rng(rng, 1001, which_method="rand")
 
        self.nhid = nhid

src/p/y/pylearn2-HEAD/pylearn2/datasets/cos_dataset.py   pylearn2(Download)
from theano import config
import theano.tensor as T
from pylearn2.utils.rng import make_np_rng
 
 
        """
        self.min_x, self.max_x, self.std = min_x, max_x, std
        rng = make_np_rng(rng, [17, 2, 946], which_method=['uniform', 'randn'])
        self.default_rng = copy.copy(rng)
        self.rng = rng

src/p/y/pylearn2-HEAD/pylearn2/scripts/papers/jia_huang_wkshp_11/extract_features.py   pylearn2(Download)
from pylearn2.datasets.preprocessing import ExtractPatches, ExtractGridPatches, ReassembleGridPatches
from pylearn2.utils import serial
from pylearn2.utils.rng import make_np_rng
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix, DefaultViewConverter
from pylearn2.datasets.cifar10 import CIFAR10
        self.size = int(np.sqrt(nvis/3))
 
        rng = make_np_rng(None, [1,2,3], which_method="randint")
 
        #Generate the random pooling structure

src/p/y/pylearn2-HEAD/pylearn2/linear/local_c01b.py   pylearn2(Download)
 
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_np_rng
from pylearn2.linear.conv2d import default_seed, default_sparse_seed
from pylearn2.linear.linear_transform import LinearTransform
    """
 
    rng = make_np_rng(rng, default_seed, which_method='uniform')
 
    def num_pos(img, stride, kwidth):
    values are sparse"""
 
    rng = make_np_rng(rng, default_sparse_seed, which_method=['randn','randint'])
 
    W = np.zeros(( output_space.num_channels, input_space.num_channels, \

src/p/y/pylearn2-HEAD/pylearn2/linear/conv2d_c01b.py   pylearn2(Download)
from pylearn2.utils.call_check import checked_call
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_np_rng
 
 
    """
 
    rng = make_np_rng(rng, default_seed, which_method='uniform')
 
    W = sharedX(rng.uniform(-irange, irange,
    """
 
    rng = make_np_rng(rng, default_sparse_seed,
                      which_method=['randn', 'randint'])
 

src/p/y/pylearn2-HEAD/pylearn2/linear/conv2d.py   pylearn2(Download)
from pylearn2.linear.linear_transform import LinearTransform as P2LT
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_np_rng
 
 
    """
 
    rng = make_np_rng(rng, default_seed, which_method='uniform')
 
    W = sharedX(rng.uniform(
    )
 
    rng = make_np_rng(rng, default_sparse_seed,
                      which_method=['randn', 'randint'])
 

src/p/y/pylearn2-HEAD/pylearn2/datasets/svhn.py   pylearn2(Download)
from pylearn2.utils.serial import load
from pylearn2.utils.string_utils import preprocess
from pylearn2.utils.rng import make_np_rng
 
 
 
        # For consistency between experiments better to make new random stream
        rng = make_np_rng(None, 322, which_method="shuffle")
 
        def design_matrix_view(data_x, data_y):
 
        # For consistency between experiments better to make new random stream
        rng = make_np_rng(None, 322, which_method="shuffle")
 
        def design_matrix_view(data_x, data_y):

src/p/y/pylearn2-HEAD/pylearn2/datasets/preprocessing.py   pylearn2(Download)
from pylearn2.utils.insert_along_axis import insert_columns
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_np_rng
 
 
    def __init__(self, patch_shape, num_patches, rng=None):
        self.patch_shape = patch_shape
        self.num_patches = num_patches
 
        self.start_rng = make_np_rng(copy.copy(rng), [1,2,3], which_method="randint")
        start = self.start
        stop = self.stop
        rng = make_np_rng(self.seed, which_method="randint")
        X = dataset.X
        y = dataset.y

  1 | 2 | 3 | 4  Next