sless-primer/POSTGRES/code/pg-bulk-insert/pg_bulk_insert.py
2026-03-22 17:08:18 +04:00

39 lines
1.5 KiB
Python

# 2026-03-21 — pg-bulk-insert: bulk INSERT через execute_values.
# Тестирует: большие батчи (до 500 строк), производительность, память.
import os, time, psycopg2, psycopg2.extras
def bulk_insert(event):
try:
n = max(0, min(int(event.get("n", 50)), 500)) # cap 500, min 0
except (TypeError, ValueError):
n = 50
prefix = str(event.get("prefix", "bulk"))[:50]
ts = int(time.time() * 1000)
# n=0 — граничный случай: вернуть сразу без обращения к PG.
if n == 0:
return {"inserted": 0, "first_id": None, "elapsed_sec": 0.0}
rows = [(f"{prefix}-{ts}-{i}",) for i in range(n)]
conn = psycopg2.connect(
host=os.environ["PGHOST"], port=int(os.environ.get("PGPORT", 5432)),
dbname=os.environ["PGDATABASE"], user=os.environ["PGUSER"],
password=os.environ["PGPASSWORD"], sslmode=os.environ.get("PGSSLMODE", "require"),
)
try:
t0 = time.time()
with conn.cursor() as cur:
psycopg2.extras.execute_values(
cur,
"INSERT INTO terraform_demo_table (title) VALUES %s RETURNING id",
rows,
page_size=100,
)
ids = [r[0] for r in cur.fetchall()]
conn.commit()
elapsed = round(time.time() - t0, 3)
return {"inserted": len(ids), "first_id": ids[0] if ids else None, "elapsed_sec": elapsed}
finally:
conn.close()