Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(12)  |  Call(12)  |  Derive(0)  |  Import(0)

src/s/a/sahara-2014.1/sahara/plugins/intel/v3_0_2/installer.py   sahara(Download)
    all_hosts = list(set([i.fqdn() for i in u.get_instances(cluster)]))
 
    client = c.IntelClient(mng_instance, cluster.name)
 
    LOG.info("Create cluster")
def start_cluster(cluster):
    client = c.IntelClient(u.get_instance(cluster, 'manager'), cluster.name)
 
    LOG.debug("Starting hadoop services")
    client.services.hdfs.start()
            to_scale_nm.append(i)
 
    client = c.IntelClient(u.get_instance(cluster, 'manager'), cluster.name)
    rack = '/Default'
    client.nodes.add(scale_ins_hosts, rack, 'hadoop',
    nm_hosts = [nm.fqdn() for nm in u.get_nodemanagers(cluster)]
 
    client = c.IntelClient(u.get_instance(cluster, 'manager'), cluster.name)
 
    dec_dn_hosts = []

src/s/a/sahara-2014.1/sahara/tests/unit/plugins/intel/v3_0_2/test_client.py   sahara(Download)
    def test_cluster_op(self, get, post):
        client = c.IntelClient(self._get_instance(), 'rty')
 
        data = {'lelik': 'bolik'}
 
    def test_nodes_op(self, get, post, delete):
        client = c.IntelClient(self._get_instance(), 'rty')
 
        # add
        post.return_value = r.make_resp(data={
    def test_params_op(self, post, put):
        client = c.IntelClient(self._get_instance(), 'rty')
        post.return_value = r.make_resp()
        put.return_value = r.make_resp()
 
    def test_base_services_op(self, get, post):
        client = c.IntelClient(self._get_instance(), 'rty')
 
        # start
        post.return_value = r.make_resp()
    def test_services_op(self, get, post, delete):
        client = c.IntelClient(self._get_instance(), 'rty')
 
        # add
        post.return_value = r.make_resp()