Generators are cool. Now we have them in Python 2.2 and later.  

See also http://python.org/cgi-bin/moinmoin/Generators


Alternative Viewpoint:

You don't need generators if you have proper lexical closures/first class functions/lambdas.  You don't even need CallWithCurrentContinuation to implement CoRoutine''''''s (which is what generators are).  Generators add unnecessary complexity that could simply be achieved by enhancing existing features in the PythonLanguage.

''Agreed. I think it's funny that Python code using "yield" looks so remarkably similar to equivalent Ruby code using "yield", but uses a completely different underlying mechanism.''

GeneratorsAreNotCoroutines. See the following excellent post by TimPeters, which explains the difference between generators, coroutines and continuations. http://mail.python.org/pipermail/python-dev/1999-July/000467.html 

----
'''Some examples of using generators...'''
----
Probably the most frequent use of generators would be in lexing/parsing.

    """
    Generator Example for Python 2.2 by JuneKim
    refer to Mc''''''Millan's coroutine example at
    http://davidf.sjsoft.com/mirrors/mcmillan-inc/tutorial4.html
    and PEP-255

        The result should be:

        d = sqrt(b^2 - 4*a*c); twoa = 2*a;
        L = -b/twoa; R = d/twoa; A1 = L + R;
        A2 = L - R                                                             
    done
    """

    test = """
               d    =   sqrt(b**2  -  4*a*c)
            twoa    =   2*a
               L    =   -b/twoa
               R    =   d/twoa
              A1    =   L + R
              A2    =   L - R\0
            """
    def getline(text):
        for line in text.split('\n'):
            yield line
            
    def disassembler(text):
        getlineGen=getline(text)
        for eachline in getlineGen:
            for c in eachline:
                if c=='\0':
                    yield c
                    return #raise Stop''''''Iteration(shouldn't reach)
                yield c
            yield ';'

    def squasher(text):
        disGen=disassembler(text)
        for c in disGen:
            if c=='*':
                c2=disGen.next()
                if c2=='*':
                    c='^'
                else:
                    yield c
                    c=c2
            if c in ' \t':
                for c2 in disGen:
                    if c2 not in ' \t':
                        break
                yield ' '
                c=c2
            if c=='\0':
                yield c
                return #raise Stop''''''Iteration(shouldn't reach)
            yield c

    def assembler(text):
        squasherGen=squasher(text)
        line=''
        for c in squasherGen:
            if c=='\0':
                break
            if len(line)==72:
                yield line
                line=''
            line+=c
        line+=' '*(72-len(line))
        yield line

    def putline(text):
        assemblerGen=assembler(text)
        for eachline in assemblerGen:
            print eachline

    if __name__=='__main__':
        putline(test)
        print 'done'

-- JuneKim
----
You can emulate the unix-style shell piping with generators.

    from __future__ import generators

    class Gen''''''Pipe:
        def __init__(self, generator):
            self.gen = generator #sanity check omitted
        def __or__(self,nextPipe):
            self.gen=nextPipe._run(self.gen)
            return self
        def __iter__(self):
            return self.gen

    class Pipe''''''Filter:
        def __init__(self):
            self._followingPipes=[]
        def __or__(self,nextPipe):
            self._followingPipes+=(nextPipe,)
            return self
        def _run(self,gen):
            gen=self.run(gen)
            while self._followingPipes:
                gen=self._followingPipes[0].run(gen)
                self._followingPipes.pop(0)
            return gen
        def run(self,gen):
            raise NotImplementedE''''''rror

    class Test(Pipe''''''Filter):
        def __init__(self,test):
            Pipe''''''Filter.__init__(self)
            self.test=test
        def run(self,gen):
            for x in gen:
                if self.test(x):
                    yield x

    class Quit(Pipe''''''Filter):
        def __init__(self,test):
            Pipe''''''Filter.__init__(self)
            self.test=test
        def run(self,gen):
            for x in gen:
                if self.test(x): 
                    raise Stop''''''Iteration
                else:
                    yield x

    class Count(Pipe''''''Filter):
        def __init__(self,n):
            Pipe''''''Filter.__init__(self)
            self.n=n
        def run(self,gen):
            for x in gen:
                yield x
                self.n -= 1
                if self.n == 0 :
                    break 

    def Ints():
        n = 0
        while 1:
            yield n
            n += 1

    def Files( start ):
        import os
        for file in os.listdir( start ):
            file = os.path.join( start, file )
            if os.path.isfile( file ): yield file
            elif os.path.isdir(file):
                for more in Files( file ):
                    yield more

    >>> odd = Test(lambda x:x % 2)
    >>> notDivBy3 = Test(lambda x: x % 3 )
    >>> oddNotDivBy3 = odd|notDivBy3
    >>> firstTen= Count(10)
    >>> result=Gen''''''Pipe(Ints())|firstTen|oddNotDivBy3
    >>> for each in result:
    >>>     print each

    >>> oddLowerThanTen=odd|Quit(lambda y:y>=10)
    >>> p3=Gen''''''Pipe(Ints())|oddLowerThanTen|notdiv3

    >>> isPyFile=Test(lambda s:s.split('.')[-1].lower()=='py')
    >>> tenPyFiles=Gen''''''Pipe(Files('/'))|isPyFile|Count(10)
    >>> for each in tenPyFiles:
    >>>     print each

-- JuneKim
----
Take a look at this site which shows how to use generators to load data from a database: http://www.halfcooked.com/mt/archives/000497.html

----
CategoryPython