Did I find the right examples for you? yes no

All Samples(3)  |  Call(2)  |  Derive(0)  |  Import(1)

        def just_tokenize(*tokens):
    g = pgm.Grammar(noop, tokens)
    def meta(text):
        _tokens = g.get_tokens(text)
        assert ''.join(tok.value for tok in _tokens) == text
        return _tokens
    return meta
        


src/c/o/CodeTalker-1.1/tests/tokenize/ctokens.py   CodeTalker(Download)
#!/usr/bin/env python
 
from util import just_tokenize, make_tests, make_fails, TSTRING, STRING, SSTRING, ID, WHITE, NUMBER, INT, HEX, CCOMMENT, CMCOMMENT, PYCOMMENT, NEWLINE, ANY
 
def make_single(tok, *tests):
    fn = just_tokenize(tok, WHITE)
    return make_tests(globals(), tok.__name__, fn, tests)
 
def fail_single(tok, *tests):
    fn = just_tokenize(tok)