Did I find the right examples for you? yes no      Crawl my project      Python Jobs

All Samples(16)  |  Call(8)  |  Derive(4)  |  Import(4)
Class that adds Sitemap based IO to ResourceContainer

resources - an iterable of resources

count - add optional explicit setting of the number of items in 
    resources which is useful when this is an iterator/generator.
    Is used instead of trying len(resources)

md - metadata information for the list (<rs:md>)
 (more...)

src/r/e/resync-0.9.5/resync/list_base_with_index.py   resync(Download)
from urllib import URLopener
 
from list_base import ListBase
from resource import Resource
from sitemap import Sitemap
class ListBaseWithIndex(ListBase):
    """Class that add handling of sitemapindexes to ListBase
 
    Splitting of a list into multiple sitemaps with a sitemapindex is currently
    handled based solely on the number of entries in the list. The configurable
        if (not num_parts):
            raise ListBaseIndexError("Request for sitemapindex for list with only %d entries when max_sitemap_entries is set to %s" % (len(self),str(self.max_sitemap_entries)))
        index=ListBase()
        index.sitemapindex=True
        index.capability_name = self.capability_name
        if (stop>len(self)):
            stop=len(self)
        part = ListBase( itertools.islice(self.resources,start,stop) )
        part.capability_name = self.capability_name
        part.default_capability()
            # names of the sitemaps as we go. Copy md from self into
            # the index and use this for all chunks also
            index=ListBase(md=self.md.copy(), ln=list(self.ln))
            index.capability_name = self.capability_name
            index.default_capability()

src/r/e/resync-HEAD/resync/list_base_with_index.py   resync(Download)
from urllib import URLopener
 
from list_base import ListBase
from resource import Resource
from sitemap import Sitemap
class ListBaseWithIndex(ListBase):
    """Class that add handling of sitemapindexes to ListBase
 
    Splitting of a list into multiple sitemaps with a sitemapindex is currently
    handled based solely on the number of entries in the list. The configurable
        if (not num_parts):
            raise ListBaseIndexError("Request for sitemapindex for list with only %d entries when max_sitemap_entries is set to %s" % (len(self),str(self.max_sitemap_entries)))
        index=ListBase()
        index.sitemapindex=True
        index.capability_name = self.capability_name
        if (stop>len(self)):
            stop=len(self)
        part = ListBase( itertools.islice(self.resources,start,stop) )
        part.capability_name = self.capability_name
        part.default_capability()
            # names of the sitemaps as we go. Copy md from self into
            # the index and use this for all chunks also
            index=ListBase(md=self.md.copy(), ln=list(self.ln))
            index.capability_name = self.capability_name
            index.default_capability()

src/r/e/resync-0.9.5/resync/capability_list.py   resync(Download)
from resource import Resource
from resource_set import ResourceSet
from list_base import ListBase
from sitemap import Sitemap
 
class CapabilityList(ListBase):
    """Class representing a Capability List
 
    An Capability List will admit only one resource with any given 
    URI. The iterator over resources is expected to return them in

src/r/e/resync-HEAD/resync/capability_list.py   resync(Download)
from resource import Resource
from resource_set import ResourceSet
from list_base import ListBase
from sitemap import Sitemap
 
class CapabilityList(ListBase):
    """Class representing a Capability List
 
    An Capability List will admit only one resource with any given 
    URI. The iterator over resources is expected to return them in