SQLite is a self-contained database that requires no server. Python's sqlite3 module makes it easy to use.

Basic Usage

import sqlite3
 
# Connect (creates file if doesn't exist)
conn = sqlite3.connect("data.db")
cursor = conn.cursor()
 
# Create table
cursor.execute("""
    CREATE TABLE IF NOT EXISTS users (
        id INTEGER PRIMARY KEY,
        name TEXT NOT NULL,
        email TEXT UNIQUE
    )
""")
 
# Insert data
cursor.execute("INSERT INTO users (name, email) VALUES (?, ?)", 
               ("Alice", "alice@example.com"))
 
# Commit and close
conn.commit()
conn.close()

Context Manager

import sqlite3
 
# Automatic cleanup
with sqlite3.connect("data.db") as conn:
    cursor = conn.cursor()
    cursor.execute("SELECT * FROM users")
    rows = cursor.fetchall()
    # Commits automatically on success
    # Rolls back on exception

Query Results

import sqlite3
 
conn = sqlite3.connect("data.db")
cursor = conn.cursor()
 
# Fetch all
cursor.execute("SELECT * FROM users")
all_rows = cursor.fetchall()
 
# Fetch one
cursor.execute("SELECT * FROM users WHERE id = ?", (1,))
one_row = cursor.fetchone()
 
# Fetch N rows
cursor.execute("SELECT * FROM users")
some_rows = cursor.fetchmany(5)
 
# Iterate
cursor.execute("SELECT * FROM users")
for row in cursor:
    print(row)

Row Factory

import sqlite3
 
conn = sqlite3.connect("data.db")
 
# Access columns by name
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
 
cursor.execute("SELECT * FROM users WHERE id = ?", (1,))
row = cursor.fetchone()
 
print(row["name"])   # Access by name
print(row[1])        # Still works by index
print(dict(row))     # Convert to dict

Parameterized Queries

import sqlite3
 
conn = sqlite3.connect("data.db")
cursor = conn.cursor()
 
# ALWAYS use parameterized queries (prevents SQL injection)
 
# Positional parameters
cursor.execute("SELECT * FROM users WHERE id = ?", (user_id,))
 
# Named parameters
cursor.execute(
    "SELECT * FROM users WHERE name = :name AND email = :email",
    {"name": "Alice", "email": "alice@example.com"}
)
 
# NEVER do this:
# cursor.execute(f"SELECT * FROM users WHERE id = {user_id}")  # SQL injection!

Insert Many

import sqlite3
 
conn = sqlite3.connect("data.db")
cursor = conn.cursor()
 
users = [
    ("Alice", "alice@example.com"),
    ("Bob", "bob@example.com"),
    ("Carol", "carol@example.com"),
]
 
cursor.executemany(
    "INSERT INTO users (name, email) VALUES (?, ?)",
    users
)
 
conn.commit()
 
# Get last inserted ID
print(cursor.lastrowid)

Transactions

import sqlite3
 
conn = sqlite3.connect("data.db")
cursor = conn.cursor()
 
try:
    cursor.execute("INSERT INTO users (name, email) VALUES (?, ?)",
                   ("Alice", "alice@example.com"))
    cursor.execute("INSERT INTO orders (user_id, total) VALUES (?, ?)",
                   (cursor.lastrowid, 99.99))
    conn.commit()
except Exception as e:
    conn.rollback()
    raise
 
# Or with context manager
with conn:  # Auto-commit on success, rollback on exception
    cursor.execute("INSERT INTO users (name, email) VALUES (?, ?)",
                   ("Bob", "bob@example.com"))

Isolation Levels

import sqlite3
 
# Default: deferred transactions
conn = sqlite3.connect("data.db")
 
# Immediate locking
conn = sqlite3.connect("data.db", isolation_level="IMMEDIATE")
 
# Exclusive locking
conn = sqlite3.connect("data.db", isolation_level="EXCLUSIVE")
 
# Autocommit mode
conn = sqlite3.connect("data.db", isolation_level=None)

Custom Functions

import sqlite3
 
def upper_reverse(s):
    return s.upper()[::-1]
 
conn = sqlite3.connect("data.db")
conn.create_function("upper_reverse", 1, upper_reverse)
 
cursor = conn.cursor()
cursor.execute("SELECT upper_reverse(name) FROM users")

Aggregate Functions

import sqlite3
 
class Concatenate:
    def __init__(self):
        self.values = []
    
    def step(self, value):
        if value:
            self.values.append(value)
    
    def finalize(self):
        return ", ".join(self.values)
 
conn = sqlite3.connect("data.db")
conn.create_aggregate("concat_all", 1, Concatenate)
 
cursor = conn.cursor()
cursor.execute("SELECT concat_all(name) FROM users")
print(cursor.fetchone()[0])  # "Alice, Bob, Carol"

Collations

import sqlite3
 
def case_insensitive(a, b):
    a, b = a.lower(), b.lower()
    if a < b:
        return -1
    elif a > b:
        return 1
    return 0
 
conn = sqlite3.connect("data.db")
conn.create_collation("NOCASE", case_insensitive)
 
cursor = conn.cursor()
cursor.execute("SELECT * FROM users ORDER BY name COLLATE NOCASE")

In-Memory Database

import sqlite3
 
# Temporary database (lost when closed)
conn = sqlite3.connect(":memory:")
cursor = conn.cursor()
 
cursor.execute("CREATE TABLE temp (id INTEGER, value TEXT)")
cursor.execute("INSERT INTO temp VALUES (1, 'test')")
cursor.execute("SELECT * FROM temp")
print(cursor.fetchall())

Backup

import sqlite3
 
# Backup to file
source = sqlite3.connect("data.db")
backup = sqlite3.connect("backup.db")
 
source.backup(backup)
backup.close()
source.close()
 
# Or with progress callback
def progress(status, remaining, total):
    print(f"Copied {total - remaining}/{total} pages")
 
source.backup(backup, pages=1, progress=progress)

Execute Script

import sqlite3
 
conn = sqlite3.connect("data.db")
cursor = conn.cursor()
 
# Execute multiple statements
cursor.executescript("""
    DROP TABLE IF EXISTS users;
    CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT);
    INSERT INTO users (name) VALUES ('Alice');
    INSERT INTO users (name) VALUES ('Bob');
""")
 
conn.commit()

Type Adapters

import sqlite3
from datetime import datetime, date
import json
 
# Register adapter for custom type
def adapt_datetime(dt):
    return dt.isoformat()
 
def convert_datetime(s):
    return datetime.fromisoformat(s.decode())
 
sqlite3.register_adapter(datetime, adapt_datetime)
sqlite3.register_converter("DATETIME", convert_datetime)
 
# Enable type detection
conn = sqlite3.connect("data.db", detect_types=sqlite3.PARSE_DECLTYPES)
 
cursor = conn.cursor()
cursor.execute("CREATE TABLE events (id INTEGER, created DATETIME)")
cursor.execute("INSERT INTO events VALUES (?, ?)", (1, datetime.now()))
cursor.execute("SELECT * FROM events")
row = cursor.fetchone()
print(type(row[1]))  # <class 'datetime.datetime'>

JSON Storage

import sqlite3
import json
 
def adapt_json(data):
    return json.dumps(data)
 
def convert_json(s):
    return json.loads(s.decode())
 
sqlite3.register_adapter(dict, adapt_json)
sqlite3.register_adapter(list, adapt_json)
sqlite3.register_converter("JSON", convert_json)
 
conn = sqlite3.connect("data.db", detect_types=sqlite3.PARSE_DECLTYPES)
cursor = conn.cursor()
 
cursor.execute("CREATE TABLE configs (id INTEGER, data JSON)")
cursor.execute("INSERT INTO configs VALUES (?, ?)", 
               (1, {"key": "value", "numbers": [1, 2, 3]}))
 
cursor.execute("SELECT * FROM configs")
row = cursor.fetchone()
print(row[1])  # {'key': 'value', 'numbers': [1, 2, 3]}

Common Patterns

import sqlite3
from contextlib import contextmanager
 
@contextmanager
def get_db(path="data.db"):
    """Database connection context manager."""
    conn = sqlite3.connect(path)
    conn.row_factory = sqlite3.Row
    try:
        yield conn
    finally:
        conn.close()
 
def query(sql, params=(), db_path="data.db"):
    """Execute query and return all rows."""
    with get_db(db_path) as conn:
        cursor = conn.cursor()
        cursor.execute(sql, params)
        return cursor.fetchall()
 
def execute(sql, params=(), db_path="data.db"):
    """Execute statement and commit."""
    with get_db(db_path) as conn:
        cursor = conn.cursor()
        cursor.execute(sql, params)
        conn.commit()
        return cursor.lastrowid

Best Practices

# Always use parameterized queries
cursor.execute("SELECT * FROM users WHERE id = ?", (user_id,))
 
# Use row_factory for named access
conn.row_factory = sqlite3.Row
 
# Use context managers for transactions
with conn:
    cursor.execute(...)
 
# Close connections when done
conn.close()
 
# Use in-memory DB for tests
conn = sqlite3.connect(":memory:")
 
# Enable foreign keys
cursor.execute("PRAGMA foreign_keys = ON")

SQLite is perfect for local data storage, caching, and applications that don't need a server. For high-concurrency or distributed systems, consider PostgreSQL or MySQL.

React to this post: