diff --git a/doc/source/whatsnew/v1.5.0.rst b/doc/source/whatsnew/v1.5.0.rst index 7e470a51858ce..d340e82776274 100644 --- a/doc/source/whatsnew/v1.5.0.rst +++ b/doc/source/whatsnew/v1.5.0.rst @@ -259,7 +259,7 @@ Performance improvements - Performance improvement in :func:`merge` when left and/or right are empty (:issue:`45838`) - Performance improvement in :meth:`DataFrame.join` when left and/or right are empty (:issue:`46015`) - Performance improvement in :class:`DataFrame` and :class:`Series` constructors for extension dtype scalars (:issue:`45854`) -- +- Performance improvement in :meth:`io.sql.SQLDatabase.execute` which streams results when chunking is enabled (:issue:`40847`) .. --------------------------------------------------------------------------- .. _whatsnew_150.bug_fixes: diff --git a/pandas/io/sql.py b/pandas/io/sql.py index e004e9c1ecbcc..0f56180f276e7 100644 --- a/pandas/io/sql.py +++ b/pandas/io/sql.py @@ -1382,9 +1382,27 @@ def run_transaction(self): else: yield self.connectable - def execute(self, *args, **kwargs): - """Simple passthrough to SQLAlchemy connectable""" - return self.connectable.execution_options().execute(*args, **kwargs) + def execute(self, chunksize: int = 0, *args, **kwargs): + """ + Simple passthrough to SQLAlchemy connectable + + Parameters + ---------- + chunksize : int, default 0 + Specify the number of rows in each batch to be written at a time. + By default, all rows will be written at once. + + Returns + ------- + Results Iterable + """ + if chunksize > 0: + # See: https://pythonspeed.com/articles/pandas-sql-chunking/ + return self.connectable.execution_options(stream_results=True).execute( + *args, **kwargs + ) + else: + return self.connectable.execution_options().execute(*args, **kwargs) def read_table( self,