python snippets

SQLAlchemy example

Tagged sqlalchemy, python  Languages python

SQLAlchemy example:

from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker

engine = create_engine("mysql://username:password@localhost/database_name")
engine.echo = False

Base = declarative_base()

class Entry(Base):
    __tablename__ = "entries"
    id = Column(Integer, primary_key=True)
    title = Column(String(255))
    url = Column(String)

    def __repr__(self):
        return "<Entry('%s', '%s')>" % (self.title, self.url)

# Set up handles
entry_table = Entry.__table__
metadata = Base.metadata

# Start a session
Session = sessionmaker(bind=engine)
session = Session()
# Query entries
entries = session.query(Entry) \
    .filter(Entry.title != 'Zermatt')

# Print all entries
for entry in entries.all():
    print entry.update_named_entities()

# Print first entry
entry = entries.first()

# Update entry
entry.title = 'Zermatt, Verbier'

# Commit changes

NER with Python and NLTK

Tagged ner, python  Languages python

Code from

import nltk

def named_entities(text):
    sentences = nltk.sent_tokenize(text)
    tokenized_sentences = [nltk.word_tokenize(sentence) for sentence in sentences]
    tagged_sentences = [nltk.pos_tag(sentence) for sentence in tokenized_sentences]
    chunked_sentences = nltk.batch_ne_chunk(tagged_sentences, binary=True)
    def extract_entity_names(t):
        entity_names = []
        if hasattr(t, 'node') and t.node:
            if t.node == 'NE':
                entity_names.append(' '.join([child[0] for child in t]))
                for child in t:
        return entity_names
    entity_names = []
    for tree in chunked_sentences:
    return set(entity_names)

How to use ElasticSearch with Python

Tagged elasticsearch, python, pyes  Languages python

This is a short example on how to use ElasticSearch with Python.

First install pyes (pyes documentation).

Then run this code:

from pyes import *

index_name = 'xxx'
type_name = 'car'

conn = ES('', timeout=3.5)

docs = [
    {"name":"good",  "id":'1'},
    {"name":"bad", "id":'2'},
    {"name":"ugly", "id":'3'}

# Bulk index
for doc in docs:
    # index(doc, index, doc_type, id=None, parent=None, force_insert=False, op_type=None, bulk=False, version=None, querystring_args=None)
    conn.index(doc, index_name, type_name, id=doc['id'], bulk=True)

print conn.refresh()

# Search
def search(query):
    q = StringQuery(query, default_operator="AND")
    result =, indices=[index_name])
    for r in result:
        print r


You can also use CURL to verify that it works:

# Show index mapping
curl -vvv ""

# Delete index
curl -XDELETE -vvv ""

# Search
curl -vvv ""