mirror of
https://github.com/sqlalchemy/sqlalchemy.git
synced 2026-05-15 13:17:24 -04:00
2bcc97da42
the feature is enabled for all built in backends when RETURNING is used, except for Oracle that doesn't need it, and on psycopg2 and mssql+pyodbc it is used for all INSERT statements, not just those that use RETURNING. third party dialects would need to opt in to the new feature by setting use_insertmanyvalues to True. Also adds dialect-level guards against using returning with executemany where we dont have an implementation to suit it. execute single w/ returning still defers to the server without us checking. Fixes: #6047 Fixes: #7907 Change-Id: I3936d3c00003f02e322f2e43fb949d0e6e568304
216 lines
5.4 KiB
Python
216 lines
5.4 KiB
Python
from __future__ import annotations
|
|
|
|
from sqlalchemy import bindparam
|
|
from sqlalchemy import Column
|
|
from sqlalchemy import create_engine
|
|
from sqlalchemy import Identity
|
|
from sqlalchemy import insert
|
|
from sqlalchemy import Integer
|
|
from sqlalchemy import select
|
|
from sqlalchemy import String
|
|
from sqlalchemy.orm import declarative_base
|
|
from sqlalchemy.orm import Session
|
|
from . import Profiler
|
|
|
|
"""This series of tests illustrates different ways to INSERT a large number
|
|
of rows in bulk.
|
|
|
|
|
|
"""
|
|
|
|
Base = declarative_base()
|
|
|
|
|
|
class Customer(Base):
|
|
__tablename__ = "customer"
|
|
id = Column(Integer, Identity(), primary_key=True)
|
|
name = Column(String(255))
|
|
description = Column(String(255))
|
|
|
|
|
|
Profiler.init("bulk_inserts", num=100000)
|
|
|
|
|
|
@Profiler.setup
|
|
def setup_database(dburl, echo, num):
|
|
global engine
|
|
engine = create_engine(dburl, echo=echo)
|
|
Base.metadata.drop_all(engine)
|
|
Base.metadata.create_all(engine)
|
|
|
|
|
|
@Profiler.profile
|
|
def test_flush_no_pk(n):
|
|
"""INSERT statements via the ORM (batched with RETURNING if available),
|
|
fetching generated row id"""
|
|
session = Session(bind=engine)
|
|
for chunk in range(0, n, 1000):
|
|
session.add_all(
|
|
[
|
|
Customer(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(chunk, chunk + 1000)
|
|
]
|
|
)
|
|
session.flush()
|
|
session.commit()
|
|
|
|
|
|
@Profiler.profile
|
|
def test_bulk_save_return_pks(n):
|
|
"""INSERT statements in "bulk" (batched with RETURNING if available),
|
|
fetching generated row id"""
|
|
session = Session(bind=engine)
|
|
session.bulk_save_objects(
|
|
[
|
|
Customer(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(n)
|
|
],
|
|
return_defaults=True,
|
|
)
|
|
session.commit()
|
|
|
|
|
|
@Profiler.profile
|
|
def test_flush_pk_given(n):
|
|
"""Batched INSERT statements via the ORM, PKs already defined"""
|
|
session = Session(bind=engine)
|
|
for chunk in range(0, n, 1000):
|
|
session.add_all(
|
|
[
|
|
Customer(
|
|
id=i + 1,
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(chunk, chunk + 1000)
|
|
]
|
|
)
|
|
session.flush()
|
|
session.commit()
|
|
|
|
|
|
@Profiler.profile
|
|
def test_bulk_save(n):
|
|
"""Batched INSERT statements via the ORM in "bulk", discarding PKs."""
|
|
session = Session(bind=engine)
|
|
session.bulk_save_objects(
|
|
[
|
|
Customer(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(n)
|
|
]
|
|
)
|
|
session.commit()
|
|
|
|
|
|
@Profiler.profile
|
|
def test_orm_insert(n):
|
|
"""A single Core INSERT run through the Session"""
|
|
session = Session(bind=engine)
|
|
session.execute(
|
|
insert(Customer),
|
|
params=[
|
|
dict(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(n)
|
|
],
|
|
)
|
|
session.commit()
|
|
|
|
|
|
@Profiler.profile
|
|
def test_orm_insert_w_fetch(n):
|
|
"""A single Core INSERT w executemany run through the Session, fetching
|
|
back new Customer objects into a list"""
|
|
session = Session(bind=engine)
|
|
result = session.execute(
|
|
select(Customer).from_statement(insert(Customer).returning(Customer)),
|
|
params=[
|
|
dict(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(n)
|
|
],
|
|
)
|
|
customers = result.scalars().all() # noqa: F841
|
|
session.commit()
|
|
|
|
|
|
@Profiler.profile
|
|
def test_bulk_insert_mappings(n):
|
|
"""Batched INSERT statements via the ORM "bulk", using dictionaries."""
|
|
session = Session(bind=engine)
|
|
session.bulk_insert_mappings(
|
|
Customer,
|
|
[
|
|
dict(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(n)
|
|
],
|
|
)
|
|
session.commit()
|
|
|
|
|
|
@Profiler.profile
|
|
def test_core_insert(n):
|
|
"""A single Core INSERT construct inserting mappings in bulk."""
|
|
with engine.begin() as conn:
|
|
conn.execute(
|
|
Customer.__table__.insert(),
|
|
[
|
|
dict(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(n)
|
|
],
|
|
)
|
|
|
|
|
|
@Profiler.profile
|
|
def test_dbapi_raw(n):
|
|
"""The DBAPI's API inserting rows in bulk."""
|
|
|
|
conn = engine.pool._creator()
|
|
cursor = conn.cursor()
|
|
compiled = (
|
|
Customer.__table__.insert()
|
|
.values(name=bindparam("name"), description=bindparam("description"))
|
|
.compile(dialect=engine.dialect)
|
|
)
|
|
|
|
if compiled.positional:
|
|
args = (
|
|
("customer name %d" % i, "customer description %d" % i)
|
|
for i in range(n)
|
|
)
|
|
else:
|
|
args = (
|
|
dict(
|
|
name="customer name %d" % i,
|
|
description="customer description %d" % i,
|
|
)
|
|
for i in range(n)
|
|
)
|
|
|
|
cursor.executemany(str(compiled), list(args))
|
|
conn.commit()
|
|
conn.close()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
Profiler.main()
|