Details
- 
    Bug 
- 
    Status: Stalled (View Workflow)
- 
    Major 
- 
    Resolution: Unresolved
- 
    1.1.10, 1.1.12
- 
    None
- 
    Ubuntu 20.04.6 LTS
- 
        3.10
Description
The following script raises the error `mariadb.OperationalError: Can't create more than max_prepared_stmt_count statements (current value: 16382)`, which arises when the number of stored prepared statements in the backend database exceed a defined limit.
I could reproduce and would expect this error if I kept a list cursor objects, one per call and explicitly did not close the cursors.
However since I am using a with block to manage the cursor object, it should be closing (and indeed the error persists even when I explicity add `cursor.close()` to the for loop).
(The error occurs after 16382 calls of cursor.executemany)
| mport mariadb | 
| import json | 
|  | 
| test_connection_params = { | 
| 'user': MARIADB_USERNAME, | 
| 'password': MARIADB_PASSWORD, | 
| 'host': MARIADB_HOST_ADDRESS, | 
| 'port': 3306, | 
| 'database': MARIADB_DATABASE_NAME | 
| } | 
|  | 
| def main(): | 
| conn = mariadb.connect(**test_connection_params) | 
|  | 
|     with conn.cursor() as cursor: | 
|         cursor.execute(""" | 
|             CREATE TABLE IF NOT EXISTS my_table ( | 
| id INT AUTO_INCREMENT PRIMARY KEY, | 
| name VARCHAR(128), | 
| description VARCHAR(2000), | 
|                 embedding TEXT NOT NULL | 
|             ); | 
|         """) | 
|         conn.commit() | 
|  | 
| data = [(f"Product {i}", f"Description for product {i}", json.dumps([i*0.01, i*0.02, i*0.03, i*0.04])) for i in range(1_000_000)] | 
| query = """ | 
|     INSERT INTO my_table (name, description, embedding) | 
|     VALUES (?, ?, ?) | 
|     """ | 
|  | 
| chunk_size = 10 | 
|  | 
| for i in range(0, len(data), chunk_size): | 
|         with conn.cursor() as cursor: | 
| cursor.executemany(query, data[i:i + chunk_size]) | 
|             conn.commit() | 
|  | 
|     conn.close() | 
|  | 
|     cursor.close() | 
|     conn.close()
 | 
This error does not occur when using the mypysql connector with similar logic.
The error also does not occur when I swap the TEXT field for the new MariaDB 11.7.1-rc vector feature:
| def main(): | 
| conn = mariadb.connect(**test_connection_params) | 
|  | 
|     with conn.cursor() as cursor: | 
| query = """ | 
|         CREATE TABLE IF NOT EXISTS products ( | 
| id INT AUTO_INCREMENT PRIMARY KEY, | 
| name VARCHAR(128), | 
| description VARCHAR(2000), | 
| embedding VECTOR(4) NOT NULL | 
|         ); | 
|         """ | 
|         cursor.execute(query) | 
|         conn.commit() | 
|  | 
| conn = mariadb.connect(**test_connection_params) | 
|  | 
| data = [(f"Product {i}", f"Description for product {i}", json.dumps([i*0.01, i*0.02, i*0.03, i*0.04])) for i in range(1_000_000)] | 
| query = "INSERT INTO products (name, description, embedding) VALUES (?, ?, VEC_FromText(?))" | 
|  | 
| chunk_size = 10 | 
| for i in range(0, len(data), chunk_size): | 
|         with conn.cursor() as cursor: | 
| cursor.executemany(query, data[i:i + chunk_size]) | 
|             conn.commit() | 
|  | 
|     conn.close()
 | 
However oddly, the error returns using the same logic that was just working, when I wrap the MariaDB part of the code into a Python Object:
| class DatabaseManager: | 
| def __init__(self, config): | 
| self.connection = mariadb.connect(**config) | 
|  | 
| def create_table_if_not_exists(self): | 
| query = """ | 
|         CREATE TABLE IF NOT EXISTS products ( | 
| id INT AUTO_INCREMENT PRIMARY KEY, | 
| name VARCHAR(128), | 
| description VARCHAR(2000), | 
| embedding VECTOR(4) NOT NULL | 
|         ); | 
|         """ | 
| with self.connection.cursor() as cursor: | 
|             cursor.execute(query) | 
| self.connection.commit() | 
|  | 
| def insert_rows(self, chunk): | 
| query = """ | 
|         INSERT INTO products (name, description, embedding) | 
|         VALUES (?, ?, VEC_FromText(?)) | 
|         """ | 
| with self.connection.cursor() as cursor: | 
|             cursor.executemany(query, chunk) | 
|  | 
| def close_connection(self): | 
| if self.connection: | 
| self.connection.close() | 
|  | 
| def main(): | 
| db_manager = DatabaseManager(test_connection_params) | 
|     db_manager.create_table_if_not_exists() | 
|  | 
| data = [(f"Product {i}", f"Description for product {i}", json.dumps([i * 0.01, i * 0.02, i * 0.03, i * 0.04])) for i | 
| in range(1_000_000)] | 
|  | 
| chunk_size = 10 | 
| for i in range(0, len(data), chunk_size): | 
| db_manager.insert_rows(data[i:i + chunk_size]) | 
|  | 
|     db_manager.close_connection()
 |