Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(23)  |  Call(23)  |  Derive(0)  |  Import(0)

src/q/u/quality_report-1.0/qualitylib/metric_source/birt.py   quality_report(Download)
    def has_test_design(self, product):
        ''' Return whether the product has a test design (i.e. user stories,
            logical test cases, reviews of user stories and logical test
            cases, etc.) '''
        soup = self.soup(self.__test_design_url)
    def __performance_pages(self, product, version):
        ''' Return the rows with page performance numbers. '''
        soup = self.soup(self.page_performance_url(product, version))
        inner_table = soup('table')[0]('table')[0]
        return inner_table('tr')[1:]  # Skip header row
    def __manual_test_dates(self, product, version):
        ''' Return the manual test cases. '''
        soup = self.soup(self.__manual_test_execution_url % (product, version))
        inner_table = soup('table')[0]('table')[0]
        rows = inner_table('tr')[1:]  # Skip header row
 
        if not self.__test_design_report:
            self.__test_design_report = self.soup(self.__test_design_url)
        for row in rows(table(table_nr)):
            if row_contains_product(row, product):

src/q/u/quality_report-1.0/qualitylib/metric_source/performance_report.py   quality_report(Download)
        urls = self.urls(product, version)
        for url in urls:
            soup = self.soup(url)
            for row in soup('tr'):
                query_names = row('td', attrs={'class': 'name'})
    def date(self, product, version):
        ''' Return the date when performance was last measured. '''
        urls = self.urls(product, version)
        if urls:
            soup = self.soup(list(urls)[0])  # Any url is fine
    def urls(self, product, version):
        ''' Return the url(s) of the performance report for the specified
            product and version. '''
        soup = self.soup(self.__report_folder_url)
        urls = {0: set()}  # {test_run_number: set(of urls)}
    def __report_covers_product_and_version(self, url, product, version):
        ''' Return whether the performance report covers the specified product
            and version. '''
        product_query_id, product_long_id = product
        soup = self.soup(url)

src/q/u/quality_report-1.0/qualitylib/metric_source/sonar.py   quality_report(Download)
    def __init__(self, url):
        super(SonarDashboard, self).__init__()
        self.__url = url
        self.__soup = self.soup(url)
 
    def __init__(self, url):
        super(SonarViolations, self).__init__()
        self.__soup = self.soup(url)
 
    @utils.memoized
    def __analysis_exists(self, product):
        ''' Return whether a Sonar analysis for this product already exists. '''
        sonar_id = '"%s"' % str(product)
        result = sonar_id in str(self.soup(self.__sonar_url + 'api/resources'))
        logging.info('Sonar analysis for %s %s', sonar_id,

src/q/u/quality_report-1.0/qualitylib/metric_source/wiki.py   quality_report(Download)
    def team_spirit(self, team):
        ''' Return the team spirit of the team. Team spirit is either :-), :-|,
            or :-( '''
        soup = self.soup(self.__wiki_url)
        try:
    def date_of_last_team_spirit_measurement(self, team):
        ''' Return the date that the team spirit of the team was last
            measured. '''
        soup = self.soup(self.__wiki_url)
        columns = len(soup('tr', id='smiley_%s' % team.id_string())[0]('td'))
    def comment(self, metric_id):
        ''' Return a comment on a metric from the Wiki. '''
        soup = self.soup(self.__wiki_url)
        try:
            metric_row = soup('table')[1]('tr', id=metric_id)[0]

src/q/u/quality_report-1.0/qualitylib/metric_source/coverage_report.py   quality_report(Download)
    def coverage(self, product):
        ''' Return the ART coverage for a specific product. '''
        coverage_url = self.get_coverage_url(product)
        try:
            soup = self.soup(coverage_url)
        coverage_url = self.get_coverage_date_url(product)
        try:
            soup = self.soup(coverage_url)
        except urllib2.HTTPError:
            coverage_date = now()

src/q/u/quality_report-1.0/qualitylib/metric_source/nagios.py   quality_report(Download)
            server. This assumes there is just one service for each server. '''
        url = self.service_availability_url(time_period, report_time_period)
        soup = self.soup(url)
        table = soup('table', 'data')[0]
        ok_label = 'serviceOK' if self.__availability_via_service else 'hostUP'
        availabilities = []
        for host_group in self.__host_groups:
            soup = self.soup(self.__hostgroup_availability_url % \
                             (dict(host_group=host_group, 
                                   time_period=time_period,
    def __servers_and_availability(self):
        ''' Get the servers and their availability. '''
        servers = {}
        soup = self.soup(self.availability_url(time_period='last7days'))
        table_headers = soup('div', 'dataTitle')

src/q/u/quality_report-1.0/qualitylib/metric_source/jenkins.py   quality_report(Download)
    def unstable_arts_url(self, projects, days):
        ''' Return the urls for the ARTs that have been unstable for the
            specified number of days. '''
        soup = self.soup(self.__jenkins_url)
        arts = soup('table', id='projectstatus')[0]('a',
    def __build_date(self, url):
        ''' Return the date and time of the build. '''
        try:
            datetime_text = self.__get_build_date_time(self.soup(url))
            return self.__parse_build_date(datetime_text)

src/q/u/quality_report-1.0/qualitylib/metric_source/pom.py   quality_report(Download)
    def __get_pom_soup(self, url, log_level=logging.WARNING):
        ''' Return the soup version of the pom. '''
        pom_url = url + '/pom.xml'
        try:
            return self.soup(pom_url)

src/q/u/quality_report-1.0/qualitylib/metric_source/javamelody.py   quality_report(Download)
    def mean_request_times(self, product_id, start, end):
        ''' Return a list of the mean request times of the specified product
            in the specified period. '''
        soup = self.soup(self.url(product_id, start, end, sep='/'))
        mean_request_times = []