Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bugfix/sql param data memory leak #703

Merged
merged 12 commits into from May 13, 2022
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -54,3 +54,4 @@ tags
# The Access unit tests copy empty.accdb and empty.mdb to these names and use them.
test.accdb
test.mdb

6 changes: 3 additions & 3 deletions src/cursor.cpp
Expand Up @@ -386,8 +386,8 @@ static void closeimpl(Cursor* cur)

free_results(cur, FREE_STATEMENT | FREE_PREPARED);

FreeParameterInfo(cur);
FreeParameterData(cur);
FreeParameterInfo(cur);

if (StatementIsValid(cur))
{
Expand Down Expand Up @@ -1061,9 +1061,9 @@ static PyObject* Cursor_executemany(PyObject* self, PyObject* args)
if (cursor->fastexecmany)
{
free_results(cursor, FREE_STATEMENT | KEEP_PREPARED);
if (!ExecuteMulti(cursor, pSql, param_seq))
if (!ExecuteMulti(cursor, pSql, param_seq))
return 0;
}
}
else
{
for (Py_ssize_t i = 0; i < c; i++)
Expand Down
95 changes: 95 additions & 0 deletions tests3/dbapi_SQLParamData_memory__test.py
@@ -0,0 +1,95 @@
"""
This tests ensures that there is no memory leakage
after using SQLParamData that returns -1.

One scenario where SQLParamData function will be used is when there is a parameterized
INSERT INTO query with at least one parameter's length is too big.
Note that In my case, 'too big' means pGetLen(pInfo->pObject) was more than 4000.

In order to execute the INSERT INTO query, SQLExecute is used.
SQLExecute will return SQL_NEED_DATA (SQL_NEED_DATA = 99),
then SQLParamData will be used to create a SQL parameter and will return SQL_NEED_DATA.
This call will create the pObject (PyObject) that should be freed.
After that SQLPutData will be used in a loop to save the data in this SQL parameter.
Then SQLParamData is called again, and if there is an error (-1), the data of newly
created SQL Parameter should be freed.

This test should be tested against a table that has no space at all or no space in the
transaction log in order to get -1 value on the second call to SQLParamData.
The name of the table is stored in `TABLE_NAME`, change it to be your table's name.
"""
import gc
import os
import unittest

import math
import psutil

from tests3.testutils import add_to_path, load_setup_connection_string

add_to_path()
import pyodbc

KB = 1024
MB = KB * 1024

ACCEPTABLE_MEMORY_DIFF = 500 * KB

TABLE_NAME = "FullTable"

CONNECTION_STRING = None

CONNECTION_STRING_ERROR_MESSAGE = (
r"Please create tmp\setup.cfg file or set a valid value to CONNECTION_STRING."
)


def current_total_memory_usage():
"""
:return: Current total memory usage in bytes.
"""
process = psutil.Process(os.getpid())
return process.memory_info().rss


class MemoryLeakSQLParamDataTestCase(unittest.TestCase):
driver = pyodbc

@classmethod
def setUpClass(cls):
filename = os.path.splitext(os.path.basename(__file__))[0]
cls.connection_string = (
load_setup_connection_string(filename) or CONNECTION_STRING
)

if not cls.connection_string:
return ValueError(CONNECTION_STRING_ERROR_MESSAGE)

def test_memory_leak(self):
query = "INSERT INTO {table_name} VALUES (?)".format(table_name=TABLE_NAME)

with pyodbc.connect(self.connection_string) as conn:
cursor = conn.cursor()

current_memory_usage = current_total_memory_usage()

try:
cur = cursor.execute(query, "a" * 10 * MB)
except self.driver.ProgrammingError as e:
self.assertEqual("42000", e.args[0])
self.assertIn("SQLParamData", e.args[1])
finally:
cursor.close()

after_excpetion_memory_usage = current_total_memory_usage()

diff = math.fabs(after_excpetion_memory_usage - current_memory_usage)
self.assertLess(diff, ACCEPTABLE_MEMORY_DIFF)


def main():
unittest.main()


if __name__ == "__main__":
main()
1 change: 1 addition & 0 deletions tests3/dbapi_SQLParamData_memory__test__requirements.txt
@@ -0,0 +1 @@
psutil