Asyncpg automatically converts Python dictionaries and lists to PostgreSQL JSONB (and JSON) types without requiring manual JSON serialization. This is built into asyncpg's type codec system as of version 0.18.0+.
Automatic Conversion:
import asyncpg
# Python dict automatically converts to JSONB
conn = await asyncpg.connect(
host='localhost',
database='mydb',
user='postgres',
password='password'
)
# INSERT: Python dict → PostgreSQL JSONB
user_data = {
'name': 'John Doe',
'age': 30,
'email': '[email protected]',
'preferences': {
'theme': 'dark',
'notifications': True
}
}
await conn.execute(
'INSERT INTO users (id, data) VALUES ($1, $2)',
1,
user_data # ← Python dict, no json.dumps() needed!
)
# SELECT: PostgreSQL JSONB → Python dict
row = await conn.fetchrow('SELECT id, data FROM users WHERE id = $1', 1)
print(row['data']) # ← Automatically a Python dict!
# Output: {'name': 'John Doe', 'age': 30, 'email': '[email protected]', ...}
print(type(row['data'])) # <class 'dict'>
Type Mappings:
| Python Type | PostgreSQL Type | Automatic |
|---|---|---|
dict |
jsonb or json |
✅ Yes |
list |
jsonb[] or json[] |
✅ Yes |
None |
NULL |
✅ Yes |
str |
text (not JSON) |
✅ Yes |
Complete Example:
import asyncpg
from typing import Dict, Any, List
# Table schema
"""
CREATE TABLE products (
id SERIAL PRIMARY KEY,
name TEXT NOT NULL,
metadata JSONB, -- Python dict
tags JSONB, -- Python list
created_at TIMESTAMP DEFAULT NOW()
);
"""
async def create_product(name: str, metadata: Dict[str, Any], tags: List[str]):
conn = await asyncpg.connect('postgresql://user:pass@localhost/mydb')
try:
# All JSON data auto-converted
product_id = await conn.fetchval(
"""
INSERT INTO products (name, metadata, tags)
VALUES ($1, $2, $3)
RETURNING id
""",
name,
metadata, # dict → jsonb (automatic)
tags # list → jsonb (automatic)
)
return product_id
finally:
await conn.close()
# Usage - no JSON serialization needed
product_id = await create_product(
name='Laptop',
metadata={
'brand': 'Dell',
'model': 'XPS 15',
'specs': {
'ram': '32GB',
'cpu': 'Intel i9',
'storage': '1TB SSD'
},
'price': 1999.99,
'in_stock': True
},
tags=['electronics', 'computers', 'laptops']
)
print(f"Created product ID: {product_id}")
Querying JSONB Data:
import asyncpg
conn = await asyncpg.connect('postgresql://user:pass@localhost/mydb')
# Query with JSONB operators
products = await conn.fetch(
"""
SELECT id, name, metadata, tags
FROM products
WHERE metadata->>'brand' = $1 -- JSONB text extraction
AND (metadata->>'price')::numeric < $2
AND tags @> $3 -- JSONB containment
""",
'Dell',
2000.00,
['laptops'] # ← Python list auto-converted for comparison
)
for product in products:
print(f"Product: {product['name']}")
print(f"Metadata: {product['metadata']}") # Already a dict!
print(f"Brand: {product['metadata']['brand']}") # Direct dict access
print(f"Tags: {product['tags']}") # Already a list!
Updating JSONB Fields:
# Update entire JSONB column
new_metadata = {
'brand': 'Dell',
'model': 'XPS 17', # Changed
'specs': {
'ram': '64GB', # Changed
'cpu': 'Intel i9',
'storage': '2TB SSD' # Changed
}
}
await conn.execute(
'UPDATE products SET metadata = $1 WHERE id = $2',
new_metadata, # ← Dict auto-converted
product_id
)
# Update specific JSONB field using || operator
await conn.execute(
"""
UPDATE products
SET metadata = metadata || $1
WHERE id = $2
""",
{'price': 1799.99}, # ← Merge this dict into existing JSONB
product_id
)
# Update nested JSONB field using jsonb_set()
await conn.execute(
"""
UPDATE products
SET metadata = jsonb_set(
metadata,
'{specs,ram}', -- Path to nested field
$1 -- New value (as JSONB)
)
WHERE id = $2
""",
'"128GB"', # ← Must be valid JSON string
product_id
)
Edge Cases and Known Issues:
Based on asyncpg GitHub issues and documentation (2024), there are some edge cases:
# 1. None values in dicts (Issue #440)
data_with_none = {
'name': 'Test',
'value': None # ← Works fine, becomes JSON null
}
await conn.execute(
'INSERT INTO table (data) VALUES ($1)',
data_with_none # ✅ Works correctly
)
# 2. executemany with JSONB (requires nested lists)
data_list = [
[1, {'name': 'Item 1'}], # ← Each row is a list of column values
[2, {'name': 'Item 2'}],
[3, {'name': 'Item 3'}]
]
await conn.executemany(
'INSERT INTO items (id, data) VALUES ($1, $2)',
data_list # ← List of lists
)
# 3. Raw JSON strings (if you have pre-serialized JSON)
import json
json_string = '{"key": "value"}'
# Option A: Parse to dict (recommended)
data = json.loads(json_string)
await conn.execute('INSERT INTO table (data) VALUES ($1)', data)
# Option B: Cast in SQL
await conn.execute(
'INSERT INTO table (data) VALUES ($1::jsonb)',
json_string
)
Type Hints for Clarity:
from typing import Dict, Any, List, Optional
import asyncpg
async def store_user_preferences(
user_id: int,
preferences: Dict[str, Any]
) -> None:
"""
Store user preferences as JSONB.
Args:
user_id: User ID
preferences: Preferences dict (auto-converted to JSONB)
"""
conn = await asyncpg.connect('postgresql://user:pass@localhost/mydb')
try:
await conn.execute(
'UPDATE users SET preferences = $1 WHERE id = $2',
preferences, # Dict[str, Any] → JSONB
user_id
)
finally:
await conn.close()
async def get_user_preferences(user_id: int) -> Optional[Dict[str, Any]]:
"""
Retrieve user preferences from JSONB column.
Returns:
Preferences dict (auto-converted from JSONB) or None
"""
conn = await asyncpg.connect('postgresql://user:pass@localhost/mydb')
try:
preferences = await conn.fetchval(
'SELECT preferences FROM users WHERE id = $1',
user_id
)
return preferences # JSONB → Dict[str, Any] or None
finally:
await conn.close()
FastAPI Integration:
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from typing import Dict, Any
import asyncpg
app = FastAPI()
class ProductMetadata(BaseModel):
brand: str
model: str
specs: Dict[str, Any]
price: float
in_stock: bool
@app.post("/products")
async def create_product(
name: str,
metadata: ProductMetadata # Pydantic validates structure
):
conn = await asyncpg.connect('postgresql://user:pass@localhost/mydb')
try:
# metadata.dict() returns Python dict
# asyncpg auto-converts to JSONB
product_id = await conn.fetchval(
'INSERT INTO products (name, metadata) VALUES ($1, $2) RETURNING id',
name,
metadata.dict() # ← Dict auto-converted to JSONB
)
return {"product_id": product_id}
finally:
await conn.close()
@app.get("/products/{product_id}")
async def get_product(product_id: int):
conn = await asyncpg.connect('postgresql://user:pass@localhost/mydb')
try:
product = await conn.fetchrow(
'SELECT name, metadata FROM products WHERE id = $1',
product_id
)
if not product:
raise HTTPException(status_code=404, detail="Product not found")
# product['metadata'] is already a Python dict
return {
"name": product['name'],
"metadata": product['metadata'] # ← Dict from JSONB
}
finally:
await conn.close()
JSON vs JSONB:
# Both JSON and JSONB columns work the same with asyncpg
# JSONB (recommended): Binary format, faster queries, supports indexing
await conn.execute(
'INSERT INTO table (data_jsonb) VALUES ($1)',
{'key': 'value'} # ← Auto-converted to JSONB
)
# JSON: Text format, preserves formatting/whitespace
await conn.execute(
'INSERT INTO table (data_json) VALUES ($1)',
{'key': 'value'} # ← Auto-converted to JSON
)
# Asyncpg handles both identically from Python's perspective
Performance Note:
According to asyncpg documentation and benchmarks (2024), asyncpg's native type conversion is significantly faster than manual json.dumps()/json.loads() because it uses optimized C extensions.
Version Note: Automatic JSONB/JSON conversion available since asyncpg 0.18.0+, stable and reliable in 0.27.0+ (2023-2024)