Did I find the right examples for you? yes no

All Samples(6)  |  Call(3)  |  Derive(0)  |  Import(3)
Split a command line's arguments in a shell-like manner.

This is a modified version of the standard library's shlex.split()
function, but with a default of posix=False for splitting, so that quotes
in inputs are respected.

if strict=False, then any errors shlex.split would raise will result in the
unparsed remainder being the last element of the list, rather than raising.
This is because we sometimes use arg_split to parse things other than
command-line args.

        def arg_split(s, posix=False, strict=True):
    """Split a command line's arguments in a shell-like manner.

    This is a modified version of the standard library's shlex.split()
    function, but with a default of posix=False for splitting, so that quotes
    in inputs are respected.

    if strict=False, then any errors shlex.split would raise will result in the
    unparsed remainder being the last element of the list, rather than raising.
    This is because we sometimes use arg_split to parse things other than
    command-line args.
    """

    # Unfortunately, python's shlex module is buggy with unicode input:
    # http://bugs.python.org/issue1170
    # At least encoding the input when it's unicode seems to help, but there
    # may be more problems lurking.  Apparently this is fixed in python3.
    is_unicode = False
    if (not py3compat.PY3) and isinstance(s, unicode):
        is_unicode = True
        s = s.encode('utf-8')
    lex = shlex.shlex(s, posix=posix)
    lex.whitespace_split = True
    # Extract tokens, ensuring that things like leaving open quotes
    # does not cause this to raise.  This is important, because we
    # sometimes pass Python source through this (e.g. %timeit f(" ")),
    # and it shouldn't raise an exception.
    # It may be a bad idea to parse things that are not command-line args
    # through this function, but we do, so let's be safe about it.
    lex.commenters='' #fix for GH-1269
    tokens = []
    while True:
        try:
            tokens.append(next(lex))
        except StopIteration:
            break
        except ValueError:
            if strict:
                raise
            # couldn't parse, get remaining blob as last token
            tokens.append(lex.token)
            break
    
    if is_unicode:
        # Convert the tokens back to unicode.
        tokens = [x.decode('utf-8') for x in tokens]
    return tokens
        


src/a/n/antisocial-HEAD/ve/lib/python2.7/site-packages/IPython/core/completerlib.py   antisocial(Download)
from IPython.core.error import TryNext
from IPython.utils import py3compat
from IPython.utils._process_common import arg_split
 
# FIXME: this should be pulled in with the right call via the component system
def magic_run_completer(self, event):
    """Complete files that end in .py or .ipy for the %run command.
    """
    comps = arg_split(event.line, strict=False)
    relpath = (len(comps) > 1 and comps[-1] or '').strip("'\"")

src/i/p/ipython-2.0.0/IPython/core/completerlib.py   ipython(Download)
from IPython.core.completer import expand_user, compress_user
from IPython.core.error import TryNext
from IPython.utils._process_common import arg_split
from IPython.utils.py3compat import string_types
 
def magic_run_completer(self, event):
    """Complete files that end in .py or .ipy or .ipynb for the %run command.
    """
    comps = arg_split(event.line, strict=False)
    # relpath should be the current token that we need to complete.

src/i/p/ipython-HEAD/IPython/core/completerlib.py   ipython(Download)
from IPython.core.completer import expand_user, compress_user
from IPython.core.error import TryNext
from IPython.utils._process_common import arg_split
from IPython.utils.py3compat import string_types
 
def magic_run_completer(self, event):
    """Complete files that end in .py or .ipy or .ipynb for the %run command.
    """
    comps = arg_split(event.line, strict=False)
    # relpath should be the current token that we need to complete.