Examples

Here are a bunch of small example programs that use Eventlet. All of these examples can be found in the examples directory of a source copy of Eventlet.

Web Crawler

examples/webcrawler.py

#! /usr/bin/env python
"""
This is a simple web "crawler" that fetches a bunch of urls using a pool to 
control the number of outbound connections. It has as many simultaneously open
connections as coroutines in the pool.

The prints in the body of the fetch function are there to demonstrate that the
requests are truly made in parallel.
"""

urls = ["http://www.google.com/intl/en_ALL/images/logo.gif",
     "https://wiki.secondlife.com/w/images/secondlife.jpg",
     "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif"]

import eventlet
from eventlet.green import urllib2  

def fetch(url):
  print "opening", url
  body = urllib2.urlopen(url).read()
  print "done with", url
  return url, body

pool = eventlet.GreenPool(200)
for url, body in pool.imap(fetch, urls):
  print "got body from", url, "of length", len(body)

WSGI Server

examples/wsgi.py

"""This is a simple example of running a wsgi application with eventlet.
For a more fully-featured server which supports multiple processes,
multiple threads, and graceful code reloading, see:

http://pypi.python.org/pypi/Spawning/
"""

import eventlet
from eventlet import wsgi

def hello_world(env, start_response):
    if env['PATH_INFO'] != '/':
        start_response('404 Not Found', [('Content-Type', 'text/plain')])
        return ['Not Found\r\n']
    start_response('200 OK', [('Content-Type', 'text/plain')])
    return ['Hello, World!\r\n']
        
wsgi.server(eventlet.listen(('', 8090)), hello_world)

Echo Server

examples/echoserver.py

#! /usr/bin/env python
"""\
Simple server that listens on port 6000 and echos back every input to
the client.  To try out the server, start it up by running this file.

Connect to it with:
  telnet localhost 6000

You terminate your connection by terminating telnet (typically Ctrl-]
and then 'quit')
"""

import eventlet

def handle(fd):
    print "client connected"
    while True:
        # pass through every non-eof line
        x = fd.readline()
        if not x: break
        fd.write(x)
        fd.flush()
        print "echoed", x,
    print "client disconnected"

print "server socket listening on port 6000"
server = eventlet.listen(('0.0.0.0', 6000))
pool = eventlet.GreenPool()
while True:
    try:
        new_sock, address = server.accept()
        print "accepted", address
        pool.spawn_n(handle, new_sock.makefile('rw'))
    except (SystemExit, KeyboardInterrupt):
        break

Socket Connect

examples/connect.py

"""Spawn multiple workers and collect their results.

Demonstrates how to use the eventlet.green.socket module.
"""
import eventlet
from eventlet.green import socket

def geturl(url):
    c = socket.socket()
    ip = socket.gethostbyname(url)
    c.connect((ip, 80))
    print '%s connected' % url
    c.sendall('GET /\r\n\r\n')
    return c.recv(1024)

urls = ['www.google.com', 'www.yandex.ru', 'www.python.org']
pile = eventlet.GreenPile()
for x in urls:
    pile.spawn(geturl, x)

# note that the pile acts as a collection of return values from the functions
# if any exceptions are raised by the function they'll get raised here
for url, result in zip(urls, pile):
    print '%s: %s' % (url, repr(result)[:50])

Multi-User Chat Server

examples/chat_server.py

This is a little different from the echo server, in that it broadcasts the messages to all participants, not just the sender.

import eventlet

participants = []

def read_chat_forever(writer, reader):
    line = reader.readline()
    while line:
        print "Chat:", line.strip()
        for p in participants:
            if p is not writer: # Don't echo
                p.write(line)
                p.flush()
        line = reader.readline()
    participants.remove(writer)
    print "Participant left chat."

try:
    print "ChatServer starting up on port 3000"
    server = eventlet.listen(('0.0.0.0', 3000))
    while True:
        new_connection, address = server.accept()
        print "Participant joined chat."
        new_writer = new_connection.makefile('w')
        participants.append(new_writer)
        eventlet.spawn_n(read_chat_forever, 
                         new_writer, 
                         new_connection.makefile('r'))
except (KeyboardInterrupt, SystemExit):
    print "ChatServer exiting."

Feed Scraper

examples/feedscraper.py

This example requires Feedparser to be installed or on the PYTHONPATH.

"""A simple web server that accepts POSTS containing a list of feed urls,
and returns the titles of those feeds.
"""
import eventlet
feedparser = eventlet.import_patched('feedparser')

# the pool provides a safety limit on our concurrency
pool = eventlet.GreenPool()

def fetch_title(url):
    d = feedparser.parse(url)
    return d.feed.get('title', '')

def app(environ, start_response):
    if environ['REQUEST_METHOD'] != 'POST':
        start_response('403 Forbidden', [])
        return []
    
    # the pile collects the result of a concurrent operation -- in this case,
    # the collection of feed titles
    pile = eventlet.GreenPile(pool)
    for line in environ['wsgi.input'].readlines():
        url = line.strip()
        if url:
            pile.spawn(fetch_title, url)
    # since the pile is an iterator over the results, 
    # you can use it in all sorts of great Pythonic ways
    titles = '\n'.join(pile)
    start_response('200 OK', [('Content-type', 'text/plain')])
    return [titles]


if __name__ == '__main__':
    from eventlet import wsgi
    wsgi.server(eventlet.listen(('localhost', 9010)), app)

Port Forwarder

examples/forwarder.py

Producer Consumer/Recursive Web Crawler

examples/producer_consumer.py

This is an example implementation of the producer/consumer pattern as well as a functional recursive web crawler.

"""This is a recursive web crawler.  Don't go pointing this at random sites;
it doesn't respect robots.txt and it is pretty brutal about how quickly it 
fetches pages.

This is a kind of "producer/consumer" example; the producer function produces 
jobs, and the GreenPool itself is the consumer, farming out work concurrently.  
It's easier to write it this way rather than writing a standard consumer loop;
GreenPool handles any exceptions raised and arranges so that there's a set
number of "workers", so you don't have to write that tedious management code 
yourself.
"""

from eventlet.green import urllib2
import eventlet
import re

# http://daringfireball.net/2009/11/liberal_regex_for_matching_urls
url_regex = re.compile(r'\b(([\w-]+://?|www[.])[^\s()<>]+(?:\([\w\d]+\)|([^[:punct:]\s]|/)))')


def fetch(url, outq):
    """Fetch a url and push any urls found into a queue."""
    print "fetching", url
    data = ''
    with eventlet.Timeout(5, False):
        data = urllib2.urlopen(url).read()
    for url_match in url_regex.finditer(data):
        new_url = url_match.group(0)
        outq.put(new_url)

            
def producer(start_url):
    """Recursively crawl starting from *start_url*.  Returns a set of 
    urls that were found."""
    pool = eventlet.GreenPool()
    seen = set()
    q = eventlet.Queue()
    q.put(start_url)
    # keep looping if there are new urls, or workers that may produce more urls
    while not q.empty() or pool.running() != 0:
        url = eventlet.with_timeout(0.1, q.get, timeout_value='')
        # limit requests to eventlet.net so we don't crash all over the internet
        if url not in seen and 'eventlet.net' in url:
            seen.add(url)
            pool.spawn(fetch, url, q)
    return seen


seen = producer("http://eventlet.net")
print "I saw these urls:"
print "\n".join(seen)

Table Of Contents

Previous topic

Greening The World

Next topic

Using SSL With Eventlet

This Page