From 45866f94ec549ac77150da0af131ff1d108c0036 Mon Sep 17 00:00:00 2001 From: Jahnvi Thakkar Date: Mon, 2 Mar 2026 15:50:23 +0530 Subject: [PATCH 1/6] feat: add native_uuid support with pyodbc-compatible defaults - Add module-level native_uuid property (default=False, matching pyodbc) - Add per-connection native_uuid override via connect(native_uuid=True/False) - Connection-level setting takes precedence over module-level - Snapshot native_uuid at execute() time for consistency within result sets - Return uppercase UUID strings when native_uuid=False (pyodbc compat) - Extract _compute_uuid_str_indices() helper to eliminate code duplication - Move uuid import to module-level in row.py (avoid per-row lazy import) - Fix Row.__init__ signature in .pyi stub to match implementation - Remove duplicate DDBCSQLDescribeCol call in execute() - Add comprehensive tests for both module-level and per-connection control - Thread-safe settings via _settings_lock --- mssql_python/__init__.py | 19 + mssql_python/connection.py | 13 + mssql_python/cursor.py | 71 +- mssql_python/db_connection.py | 22 +- mssql_python/helpers.py | 6 +- mssql_python/mssql_python.pyi | 20 +- mssql_python/row.py | 34 +- tests/test_001_globals.py | 141 +++- tests/test_004_cursor.py | 1461 ++++++++++++++++++++++++++------- 9 files changed, 1443 insertions(+), 344 deletions(-) diff --git a/mssql_python/__init__.py b/mssql_python/__init__.py index 88801aac0..3d4675f1e 100644 --- a/mssql_python/__init__.py +++ b/mssql_python/__init__.py @@ -339,6 +339,24 @@ def lowercase(self, value: bool) -> None: with _settings_lock: _settings.lowercase = value + @property + def native_uuid(self) -> bool: + """Get the native_uuid setting. + + Controls whether UNIQUEIDENTIFIER columns return uuid.UUID objects (True) + or str (False). Default is False (matching pyodbc behavior). + Set to True to return native uuid.UUID objects. + """ + return _settings.native_uuid + + @native_uuid.setter + def native_uuid(self, value: bool) -> None: + """Set the native_uuid setting.""" + if not isinstance(value, bool): + raise ValueError("native_uuid must be a boolean value") + with _settings_lock: + _settings.native_uuid = value + # Replace the current module with our custom module class old_module: types.ModuleType = sys.modules[__name__] @@ -357,3 +375,4 @@ def lowercase(self, value: bool) -> None: # Initialize property values lowercase: bool = _settings.lowercase +native_uuid: bool = _settings.native_uuid diff --git a/mssql_python/connection.py b/mssql_python/connection.py index c6c4944de..b9fdc2182 100644 --- a/mssql_python/connection.py +++ b/mssql_python/connection.py @@ -203,6 +203,7 @@ def __init__( autocommit: bool = False, attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None, timeout: int = 0, + native_uuid: Optional[bool] = None, **kwargs: Any, ) -> None: """ @@ -219,6 +220,9 @@ def __init__( connecting, such as SQL_ATTR_LOGIN_TIMEOUT, SQL_ATTR_ODBC_CURSORS, and SQL_ATTR_PACKET_SIZE. timeout (int): Login timeout in seconds. 0 means no timeout. + native_uuid (bool, optional): Controls whether UNIQUEIDENTIFIER columns return + uuid.UUID objects (True) or str (False) for cursors created from this connection. + None (default) defers to the module-level ``mssql_python.native_uuid`` setting. **kwargs: Additional key/value pairs for the connection string. Returns: @@ -236,7 +240,16 @@ def __init__( >>> import mssql_python as ms >>> conn = ms.connect("Server=myserver;Database=mydb", ... attrs_before={ms.SQL_ATTR_LOGIN_TIMEOUT: 30}) + + >>> # Return native uuid.UUID objects instead of strings + >>> conn = ms.connect("Server=myserver;Database=mydb", native_uuid=True) """ + # Store per-connection native_uuid override. + # None means "use module-level mssql_python.native_uuid". + if native_uuid is not None and not isinstance(native_uuid, bool): + raise ValueError("native_uuid must be a boolean value or None") + self._native_uuid = native_uuid + self.connection_str = self._construct_connection_string(connection_str, **kwargs) self._attrs_before = attrs_before or {} diff --git a/mssql_python/cursor.py b/mssql_python/cursor.py index 316d8ed9f..df6779a41 100644 --- a/mssql_python/cursor.py +++ b/mssql_python/cursor.py @@ -135,6 +135,10 @@ def __init__(self, connection: "Connection", timeout: int = 0) -> None: self._cached_column_map = None self._cached_converter_map = None + self._uuid_str_indices = None # Pre-computed UUID column indices for str conversion + # Cache the effective native_uuid setting for this cursor's connection. + # Resolution order: connection._native_uuid (if not None) → module-level setting. + self._conn_native_uuid = getattr(self.connection, "_native_uuid", None) self._next_row_index = 0 # internal: index of the next row the driver will return (0-based) self._has_result_set = False # Track if we have an active result set self._skip_increment_for_next_fetch = ( @@ -1009,6 +1013,32 @@ def _build_converter_map(self): return converter_map + def _compute_uuid_str_indices(self): + """ + Compute the tuple of column indices whose uuid.UUID values should be + stringified (as uppercase), based on the effective native_uuid setting. + + Resolution order: connection-level (if set) → module-level (fallback). + + Returns: + tuple of int or None: Column indices to stringify, or None when + native_uuid is True — meaning zero per-row overhead. + """ + if not self.description: + return None + + effective_native_uuid = ( + self._conn_native_uuid + if self._conn_native_uuid is not None + else get_settings().native_uuid + ) + if not effective_native_uuid: + indices = tuple( + i for i, desc in enumerate(self.description) if desc and desc[1] is uuid.UUID + ) + return indices if indices else None + return None + def _get_column_and_converter_maps(self): """ Get column map and converter map for Row construction (thread-safe). @@ -1429,20 +1459,13 @@ def execute( # pylint: disable=too-many-locals,too-many-branches,too-many-state col_desc[0]: i for i, col_desc in enumerate(self.description) } self._cached_converter_map = self._build_converter_map() + self._uuid_str_indices = self._compute_uuid_str_indices() else: self.rowcount = ddbc_bindings.DDBCSQLRowCount(self.hstmt) self._clear_rownumber() self._cached_column_map = None self._cached_converter_map = None - - # After successful execution, initialize description if there are results - column_metadata = [] - try: - ddbc_bindings.DDBCSQLDescribeCol(self.hstmt, column_metadata) - self._initialize_description(column_metadata) - except Exception as e: - # If describe fails, it's likely there are no results (e.g., for INSERT) - self.description = None + self._uuid_str_indices = None self._reset_inputsizes() # Reset input sizes after execution # Return self for method chaining @@ -2278,9 +2301,11 @@ def executemany( # pylint: disable=too-many-locals,too-many-branches,too-many-s if self.description: self.rowcount = -1 self._reset_rownumber() + self._uuid_str_indices = self._compute_uuid_str_indices() else: self.rowcount = ddbc_bindings.DDBCSQLRowCount(self.hstmt) self._clear_rownumber() + self._uuid_str_indices = None finally: # Reset input sizes after execution self._reset_inputsizes() @@ -2328,7 +2353,13 @@ def fetchone(self) -> Union[None, Row]: # Get column and converter maps column_map, converter_map = self._get_column_and_converter_maps() - return Row(row_data, column_map, cursor=self, converter_map=converter_map) + return Row( + row_data, + column_map, + cursor=self, + converter_map=converter_map, + uuid_str_indices=self._uuid_str_indices, + ) except Exception as e: # On error, don't increment rownumber - rethrow the error raise e @@ -2386,8 +2417,15 @@ def fetchmany(self, size: Optional[int] = None) -> List[Row]: column_map, converter_map = self._get_column_and_converter_maps() # Convert raw data to Row objects + uuid_idx = self._uuid_str_indices return [ - Row(row_data, column_map, cursor=self, converter_map=converter_map) + Row( + row_data, + column_map, + cursor=self, + converter_map=converter_map, + uuid_str_indices=uuid_idx, + ) for row_data in rows_data ] except Exception as e: @@ -2439,8 +2477,15 @@ def fetchall(self) -> List[Row]: column_map, converter_map = self._get_column_and_converter_maps() # Convert raw data to Row objects + uuid_idx = self._uuid_str_indices return [ - Row(row_data, column_map, cursor=self, converter_map=converter_map) + Row( + row_data, + column_map, + cursor=self, + converter_map=converter_map, + uuid_str_indices=uuid_idx, + ) for row_data in rows_data ] except Exception as e: @@ -2466,6 +2511,7 @@ def nextset(self) -> Union[bool, None]: # Clear cached column and converter maps for the new result set self._cached_column_map = None self._cached_converter_map = None + self._uuid_str_indices = None # Skip to the next result set ret = ddbc_bindings.DDBCSQLMoreResults(self.hstmt) @@ -2491,6 +2537,7 @@ def nextset(self) -> Union[bool, None]: col_desc[0]: i for i, col_desc in enumerate(self.description) } self._cached_converter_map = self._build_converter_map() + self._uuid_str_indices = self._compute_uuid_str_indices() except Exception as e: # pylint: disable=broad-exception-caught # If describe fails, there might be no results in this result set self.description = None diff --git a/mssql_python/db_connection.py b/mssql_python/db_connection.py index a6b8c614e..765be9415 100644 --- a/mssql_python/db_connection.py +++ b/mssql_python/db_connection.py @@ -14,6 +14,7 @@ def connect( autocommit: bool = False, attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None, timeout: int = 0, + native_uuid: Optional[bool] = None, **kwargs: Any, ) -> Connection: """ @@ -22,10 +23,18 @@ def connect( Args: connection_str (str): The connection string to connect to. autocommit (bool): If True, causes a commit to be performed after each SQL statement. - TODO: Add the following parameters to the function signature: + attrs_before (dict, optional): A dictionary of connection attributes to set before + connecting. timeout (int): The timeout for the connection attempt, in seconds. - readonly (bool): If True, the connection is set to read-only. - attrs_before (dict): A dictionary of connection attributes to set before connecting. + native_uuid (bool, optional): Controls whether UNIQUEIDENTIFIER columns return + uuid.UUID objects (True) or str (False) for this connection. + - True: UNIQUEIDENTIFIER columns return uuid.UUID objects. + - False: UNIQUEIDENTIFIER columns return str (pyodbc-compatible). + - None (default): Uses the module-level ``mssql_python.native_uuid`` setting (False). + + This per-connection override is useful for incremental adoption of native UUIDs: + connections that are ready can pass native_uuid=True, while the default (False) + preserves pyodbc-compatible string behavior. Keyword Args: **kwargs: Additional key/value pairs for the connection string. Below attributes are not implemented in the internal driver: @@ -44,6 +53,11 @@ def connect( transactions, and closing the connection. """ conn = Connection( - connection_str, autocommit=autocommit, attrs_before=attrs_before, timeout=timeout, **kwargs + connection_str, + autocommit=autocommit, + attrs_before=attrs_before, + timeout=timeout, + native_uuid=native_uuid, + **kwargs, ) return conn diff --git a/mssql_python/helpers.py b/mssql_python/helpers.py index 8c7b90602..18619539c 100644 --- a/mssql_python/helpers.py +++ b/mssql_python/helpers.py @@ -360,13 +360,17 @@ class Settings: Settings class for mssql_python package configuration. This class holds global settings that affect the behavior of the package, - including lowercase column names, decimal separator. + including lowercase column names, decimal separator, and UUID handling. """ def __init__(self) -> None: self.lowercase: bool = False # Use the pre-determined separator - no locale access here self.decimal_separator: str = _default_decimal_separator + # Controls whether UNIQUEIDENTIFIER columns return uuid.UUID (True) + # or str (False). Default False matches pyodbc behavior for seamless migration. + # Set to True to return native uuid.UUID objects. + self.native_uuid: bool = False # Global settings instance diff --git a/mssql_python/mssql_python.pyi b/mssql_python/mssql_python.pyi index dd3fd96a0..ba5dcad98 100644 --- a/mssql_python/mssql_python.pyi +++ b/mssql_python/mssql_python.pyi @@ -129,21 +129,11 @@ class Row: def __init__( self, - cursor: "Cursor", - description: List[ - Tuple[ - str, - Any, - Optional[int], - Optional[int], - Optional[int], - Optional[int], - Optional[bool], - ] - ], values: List[Any], - column_map: Optional[Dict[str, int]] = None, - settings_snapshot: Optional[Dict[str, Any]] = None, + column_map: Dict[str, int], + cursor: Optional["Cursor"] = None, + converter_map: Optional[List[Any]] = None, + uuid_str_indices: Optional[Tuple[int, ...]] = None, ) -> None: ... def __getitem__(self, index: int) -> Any: ... def __getattr__(self, name: str) -> Any: ... @@ -247,6 +237,7 @@ class Connection: autocommit: bool = False, attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None, timeout: int = 0, + native_uuid: Optional[bool] = None, **kwargs: Any, ) -> None: ... @@ -289,6 +280,7 @@ def connect( autocommit: bool = False, attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None, timeout: int = 0, + native_uuid: Optional[bool] = None, **kwargs: Any, ) -> Connection: ... diff --git a/mssql_python/row.py b/mssql_python/row.py index 57072e6d3..a703db33a 100644 --- a/mssql_python/row.py +++ b/mssql_python/row.py @@ -6,6 +6,7 @@ """ import decimal +import uuid as _uuid from typing import Any from mssql_python.helpers import get_settings from mssql_python.logging import logger @@ -26,7 +27,7 @@ class Row: print(row.column_name) # Access by column name (case sensitivity varies) """ - def __init__(self, values, column_map, cursor=None, converter_map=None): + def __init__(self, values, column_map, cursor=None, converter_map=None, uuid_str_indices=None): """ Initialize a Row object with values and pre-built column map. Args: @@ -34,6 +35,11 @@ def __init__(self, values, column_map, cursor=None, converter_map=None): column_map: Pre-built column name to index mapping (shared across rows) cursor: Optional cursor reference (for backward compatibility and lowercase access) converter_map: Pre-computed converter map (shared across rows for performance) + uuid_str_indices: Tuple of column indices whose uuid.UUID values should be + converted to uppercase str. Pre-computed once per result set when + native_uuid=False (the default). The uppercase format matches pyodbc + and SQL Server's native text representation. + None means no conversion (native_uuid=True). """ # Apply output converters if available using pre-computed converter map if converter_map: @@ -48,9 +54,35 @@ def __init__(self, values, column_map, cursor=None, converter_map=None): else: self._values = values + # Convert UUID columns to str when native_uuid=False (the default). + # uuid_str_indices is pre-computed once at execute() time, so this is + # O(num_uuid_columns) per row — zero cost when native_uuid=True. + if uuid_str_indices: + self._stringify_uuids(uuid_str_indices) + self._column_map = column_map self._cursor = cursor + def _stringify_uuids(self, indices): + """ + Convert uuid.UUID values at the given column indices to uppercase str in-place. + + This is only called when native_uuid=False. The uppercase format matches + the behavior of pyodbc and SQL Server's native UNIQUEIDENTIFIER text + representation, ensuring seamless migration. It operates directly on + self._values to avoid creating an extra list copy. + """ + vals = self._values + # If values are still the original list (no converters), we need a mutable copy + if not isinstance(vals, list): + vals = list(vals) + self._values = vals + + for i in indices: + v = vals[i] + if v is not None and isinstance(v, _uuid.UUID): + vals[i] = str(v).upper() + def _apply_output_converters(self, values, cursor): """ Apply output converters to raw values. diff --git a/tests/test_001_globals.py b/tests/test_001_globals.py index 7c004a136..cb7b6012b 100644 --- a/tests/test_001_globals.py +++ b/tests/test_001_globals.py @@ -21,6 +21,7 @@ lowercase, getDecimalSeparator, setDecimalSeparator, + native_uuid, ) @@ -388,7 +389,8 @@ def test_decimal_separator_with_db_operations(db_connection): try: # Create a test table with decimal values cursor = db_connection.cursor() - cursor.execute(""" + cursor.execute( + """ DROP TABLE IF EXISTS #decimal_separator_test; CREATE TABLE #decimal_separator_test ( id INT, @@ -399,7 +401,8 @@ def test_decimal_separator_with_db_operations(db_connection): (2, 678.90), (3, 0.01), (4, 999.99); - """) + """ + ) cursor.close() # Test 1: Fetch with default separator @@ -467,7 +470,8 @@ def test_decimal_separator_batch_operations(db_connection): try: # Create test data cursor = db_connection.cursor() - cursor.execute(""" + cursor.execute( + """ DROP TABLE IF EXISTS #decimal_batch_test; CREATE TABLE #decimal_batch_test ( id INT, @@ -478,7 +482,8 @@ def test_decimal_separator_batch_operations(db_connection): (1, 123.456, 12345.67890), (2, 0.001, 0.00001), (3, 999.999, 9999.99999); - """) + """ + ) cursor.close() # Test 1: Fetch results with default separator @@ -740,3 +745,131 @@ def separator_reader_worker(): # Always make sure to clean up stop_event.set() setDecimalSeparator(original_separator) + + +def test_native_uuid_default(): + """Test that native_uuid defaults to False (matching pyodbc).""" + assert ( + mssql_python.native_uuid is False + ), "native_uuid should default to False (matching pyodbc)" + + +def test_native_uuid_type_validation(): + """Test that native_uuid only accepts boolean values.""" + original = mssql_python.native_uuid + + try: + # Test valid boolean values + mssql_python.native_uuid = True + assert mssql_python.native_uuid is True + + mssql_python.native_uuid = False + assert mssql_python.native_uuid is False + + # Test invalid types — all should raise ValueError + invalid_values = [1, 0, "True", "False", None, [], {}, "yes", "no", "t", "f"] + + for value in invalid_values: + with pytest.raises(ValueError, match="native_uuid must be a boolean value"): + mssql_python.native_uuid = value + + finally: + # Always restore original value + mssql_python.native_uuid = original + + +def test_native_uuid_settings_consistency(): + """Test that native_uuid is consistent between module property and Settings object.""" + from mssql_python import get_settings + + original = mssql_python.native_uuid + + try: + mssql_python.native_uuid = False + settings = get_settings() + assert settings.native_uuid is False, "Settings should reflect module-level change" + + mssql_python.native_uuid = True + settings = get_settings() + assert settings.native_uuid is True, "Settings should reflect module-level change" + + finally: + mssql_python.native_uuid = original + + +def test_native_uuid_thread_safety(): + """Test that native_uuid is thread-safe under concurrent access.""" + import queue + + original = mssql_python.native_uuid + results_queue = queue.Queue() + stop_event = threading.Event() + errors = [] + + def writer_thread(): + """Toggle native_uuid between True and False.""" + try: + while not stop_event.is_set(): + mssql_python.native_uuid = True + mssql_python.native_uuid = False + results_queue.put(("write", True)) + except Exception as e: + errors.append(str(e)) + + def reader_thread(): + """Read native_uuid and verify it's a boolean.""" + try: + while not stop_event.is_set(): + val = mssql_python.native_uuid + assert isinstance(val, bool), f"Expected bool, got {type(val)}" + results_queue.put(("read", val)) + except Exception as e: + errors.append(str(e)) + + threads = [] + for _ in range(3): + threads.append(threading.Thread(target=writer_thread)) + threads.append(threading.Thread(target=reader_thread)) + + for t in threads: + t.start() + + try: + time.sleep(1) # Let threads run for 1 second + stop_event.set() + + for t in threads: + t.join(timeout=1) + + assert not errors, f"Thread errors detected: {errors}" + + finally: + stop_event.set() + mssql_python.native_uuid = original + + +def test_connect_native_uuid_parameter_signature(): + """Test that connect() accepts the native_uuid parameter without errors.""" + import inspect + + sig = inspect.signature(mssql_python.connect) + params = sig.parameters + + assert "native_uuid" in params, "connect() should accept native_uuid parameter" + param = params["native_uuid"] + assert param.default is None, "native_uuid default should be None" + + +def test_connection_native_uuid_attribute(): + """Test that Connection class stores the _native_uuid attribute.""" + from mssql_python.connection import Connection + + # Connection.__init__ should accept native_uuid; we can't fully construct + # a Connection without a valid connection string, but we can verify the + # parameter is accepted by inspecting the signature. + import inspect + + sig = inspect.signature(Connection.__init__) + params = sig.parameters + assert "native_uuid" in params, "Connection.__init__ should accept native_uuid parameter" + assert params["native_uuid"].default is None diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 575496299..21145dda3 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -182,13 +182,15 @@ def test_mixed_empty_and_null_values(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_empty_vs_null") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_empty_vs_null ( id INT, text_col NVARCHAR(100), binary_col VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert mix of empty and NULL values @@ -886,13 +888,15 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """) + """ + ) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -988,12 +992,14 @@ def test_fetchmany_size_zero_lob(cursor, db_connection): """Test fetchmany with size=0 for LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_lob ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """) + """ + ) # Insert test data test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1018,12 +1024,14 @@ def test_fetchmany_more_than_exist_lob(cursor, db_connection): """Test fetchmany requesting more rows than exist with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_more") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_lob_more ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """) + """ + ) # Insert only 3 rows test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1057,12 +1065,14 @@ def test_fetchmany_empty_result_lob(cursor, db_connection): """Test fetchmany on empty result set with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_empty") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_lob_empty ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Query empty table @@ -1085,12 +1095,14 @@ def test_fetchmany_very_large_lob(cursor, db_connection): """Test fetchmany with very large LOB column data""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_large_lob") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_large_lob ( id INT PRIMARY KEY, large_lob NVARCHAR(MAX) ) - """) + """ + ) # Create very large data (10000 characters) large_data = "x" * 10000 @@ -1140,12 +1152,14 @@ def test_fetchmany_mixed_lob_sizes(cursor, db_connection): """Test fetchmany with mixed LOB sizes including empty and NULL""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_mixed_lob") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_mixed_lob ( id INT PRIMARY KEY, mixed_lob NVARCHAR(MAX) ) - """) + """ + ) # Mix of sizes: empty, NULL, small, medium, large test_data = [ @@ -1273,12 +1287,14 @@ def test_executemany_empty_strings(cursor, db_connection): """Test executemany with empty strings - regression test for Unix UTF-16 conversion issue""" try: # Create test table for empty string testing - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_empty_batch ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_empty_batch") @@ -1319,7 +1335,8 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): """Test executemany with empty strings in different column types""" try: # Create test table with different string types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_string_types ( id INT, varchar_col VARCHAR(50), @@ -1327,7 +1344,8 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): text_col TEXT, ntext_col NTEXT ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_string_types") @@ -1368,12 +1386,14 @@ def test_executemany_unicode_and_empty_strings(cursor, db_connection): """Test executemany with mix of Unicode characters and empty strings""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_unicode_test ( id INT, data NVARCHAR(100) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_unicode_test") @@ -1418,12 +1438,14 @@ def test_executemany_large_batch_with_empty_strings(cursor, db_connection): """Test executemany with large batch containing empty strings""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_large_batch ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_large_batch") @@ -1476,12 +1498,14 @@ def test_executemany_compare_with_execute(cursor, db_connection): """Test that executemany produces same results as individual execute calls""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_compare_test ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Test data with empty strings test_data = [ @@ -1534,13 +1558,15 @@ def test_executemany_edge_cases_empty_strings(cursor, db_connection): """Test executemany edge cases with empty strings and special characters""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_edge_cases ( id INT, varchar_data VARCHAR(100), nvarchar_data NVARCHAR(100) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_edge_cases") @@ -1594,12 +1620,14 @@ def test_executemany_null_vs_empty_string(cursor, db_connection): """Test that executemany correctly distinguishes between NULL and empty string""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_vs_empty ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_null_vs_empty") @@ -1664,12 +1692,14 @@ def test_executemany_binary_data_edge_cases(cursor, db_connection): """Test executemany with binary data and empty byte arrays""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_binary_test ( id INT, binary_data VARBINARY(100) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_binary_test") @@ -1831,7 +1861,8 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): """Test executemany with randomly mixed NULL and non-NULL values across multiple columns and rows (50 rows, 10 columns).""" try: # Create table with 10 columns of various types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_empty_params ( col1 INT, col2 VARCHAR(50), @@ -1844,7 +1875,8 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): col9 DATE, col10 REAL ) - """) + """ + ) # Generate 50 rows with randomly mixed NULL and non-NULL values across 10 columns data = [] @@ -1908,7 +1940,8 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): """Test executemany with multi-column NULL arrays (50 records, 8 columns).""" try: # Create table with 8 columns of various types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_arrays ( col1 INT, col2 VARCHAR(100), @@ -1919,7 +1952,8 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): col7 BIGINT, col8 DATE ) - """) + """ + ) # Generate 50 rows with all NULL values across 8 columns data = [(None, None, None, None, None, None, None, None) for _ in range(50)] @@ -1939,12 +1973,14 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): assert null_count == 50, f"Expected 50 NULLs in col{col_num}, got {null_count}" # Verify no non-NULL values exist - cursor.execute(""" + cursor.execute( + """ SELECT COUNT(*) FROM #pytest_null_arrays WHERE col1 IS NOT NULL OR col2 IS NOT NULL OR col3 IS NOT NULL OR col4 IS NOT NULL OR col5 IS NOT NULL OR col6 IS NOT NULL OR col7 IS NOT NULL OR col8 IS NOT NULL - """) + """ + ) non_null_count = cursor.fetchone()[0] assert non_null_count == 0, f"Expected 0 non-NULL values, got {non_null_count}" @@ -1983,7 +2019,8 @@ def test_executemany_concurrent_null_parameters(db_connection): # Create table with db_connection.cursor() as cursor: - cursor.execute(f""" + cursor.execute( + f""" IF OBJECT_ID('{table_name}', 'U') IS NOT NULL DROP TABLE {table_name} @@ -1995,7 +2032,8 @@ def test_executemany_concurrent_null_parameters(db_connection): col3 FLOAT, col4 DATETIME ) - """) + """ + ) db_connection.commit() # Execute multiple sequential insert operations @@ -2250,12 +2288,14 @@ def test_insert_data_for_join(cursor, db_connection): def test_join_operations(cursor): """Test join operations""" try: - cursor.execute(""" + cursor.execute( + """ SELECT e.name, d.department_name, p.project_name FROM #pytest_employees e JOIN #pytest_departments d ON e.department_id = d.department_id JOIN #pytest_projects p ON e.employee_id = p.employee_id - """) + """ + ) rows = cursor.fetchall() assert len(rows) == 3, "Join operation returned incorrect number of rows" assert rows[0] == [ @@ -2345,10 +2385,12 @@ def test_execute_stored_procedure_with_parameters(cursor): def test_execute_stored_procedure_without_parameters(cursor): """Test executing stored procedure without parameters""" try: - cursor.execute(""" + cursor.execute( + """ DECLARE @EmployeeID INT = 2 EXEC dbo.GetEmployeeProjects @EmployeeID - """) + """ + ) rows = cursor.fetchall() assert ( len(rows) == 1 @@ -2568,21 +2610,25 @@ def test_row_attribute_access(cursor, db_connection): """Test accessing row values by column name as attributes""" try: # Create test table with multiple columns - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_row_attr_test ( id INT PRIMARY KEY, name VARCHAR(50), email VARCHAR(100), age INT ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_row_attr_test (id, name, email, age) VALUES (1, 'John Doe', 'john@example.com', 30) - """) + """ + ) db_connection.commit() # Test attribute access @@ -2678,13 +2724,15 @@ def test_row_comparison_with_list(cursor, db_connection): def test_row_string_representation(cursor, db_connection): """Test Row string and repr representations""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_row_test ( id INT PRIMARY KEY, text_col NVARCHAR(50), null_col INT ) - """) + """ + ) db_connection.commit() cursor.execute( @@ -2717,13 +2765,15 @@ def test_row_string_representation(cursor, db_connection): def test_row_column_mapping(cursor, db_connection): """Test Row column name mapping""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_row_test ( FirstColumn INT PRIMARY KEY, Second_Column NVARCHAR(50), [Complex Name!] INT ) - """) + """ + ) db_connection.commit() cursor.execute( @@ -3206,10 +3256,12 @@ def test_execute_rowcount_chaining(cursor, db_connection): assert count == 1, "INSERT should affect 1 row" # Test multiple INSERT rowcount chaining - count = cursor.execute(""" + count = cursor.execute( + """ INSERT INTO #test_chaining (id, value) VALUES (2, 'test2'), (3, 'test3'), (4, 'test4') - """).rowcount + """ + ).rowcount assert count == 3, "Multiple INSERT should affect 3 rows" # Test UPDATE rowcount chaining @@ -3444,7 +3496,8 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): """Test next() functionality with various data types""" try: # Create test table with various data types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_next_types ( id INT, name NVARCHAR(50), @@ -3453,7 +3506,8 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): created_date DATE, created_time DATETIME ) - """) + """ + ) db_connection.commit() # Insert test data with different types @@ -3645,14 +3699,16 @@ def test_execute_chaining_compatibility_examples(cursor, db_connection): """Test real-world chaining examples""" try: # Create users table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #users ( user_id INT IDENTITY(1,1) PRIMARY KEY, user_name NVARCHAR(50), last_logon DATETIME, status NVARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test users @@ -4351,7 +4407,8 @@ def test_fetchval_different_data_types(cursor, db_connection): try: # Create test table with different data types drop_table_if_exists(cursor, "#pytest_fetchval_types") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_fetchval_types ( int_col INTEGER, float_col FLOAT, @@ -4363,14 +4420,17 @@ def test_fetchval_different_data_types(cursor, db_connection): date_col DATE, time_col TIME ) - """) + """ + ) # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_fetchval_types VALUES (123, 45.67, 89.12, 'ASCII text', N'Unicode text', 1, '2024-05-20 12:34:56', '2024-05-20', '12:34:56') - """) + """ + ) db_connection.commit() # Test different data types @@ -5668,21 +5728,25 @@ def test_cursor_rollback_data_consistency(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_rollback_orders") drop_table_if_exists(cursor, "#pytest_rollback_customers") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_rollback_customers ( id INTEGER PRIMARY KEY, name VARCHAR(50) ) - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_rollback_orders ( id INTEGER PRIMARY KEY, customer_id INTEGER, amount DECIMAL(10,2), FOREIGN KEY (customer_id) REFERENCES #pytest_rollback_customers(id) ) - """) + """ + ) cursor.commit() # Insert initial data @@ -6164,26 +6228,32 @@ def test_tables_setup(cursor, db_connection): cursor.execute("DROP VIEW IF EXISTS pytest_tables_schema.test_view") # Create regular table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_tables_schema.regular_table ( id INT PRIMARY KEY, name VARCHAR(100) ) - """) + """ + ) # Create another table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_tables_schema.another_table ( id INT PRIMARY KEY, description VARCHAR(200) ) - """) + """ + ) # Create a view - cursor.execute(""" + cursor.execute( + """ CREATE VIEW pytest_tables_schema.test_view AS SELECT id, name FROM pytest_tables_schema.regular_table - """) + """ + ) db_connection.commit() except Exception as e: @@ -6535,12 +6605,14 @@ def test_emoji_round_trip(cursor, db_connection): "1🚀' OR '1'='1", ] - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_emoji_test ( id INT IDENTITY PRIMARY KEY, content NVARCHAR(MAX) ); - """) + """ + ) db_connection.commit() for text in test_inputs: @@ -6692,14 +6764,16 @@ def test_empty_values_fetchmany(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_fetchmany_empty") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_fetchmany_empty ( id INT, varchar_col VARCHAR(50), nvarchar_col NVARCHAR(50), binary_col VARBINARY(50) ) - """) + """ + ) db_connection.commit() # Insert multiple rows with empty values @@ -6824,7 +6898,8 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_batch_empty_assertions") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_batch_empty_assertions ( id INT, empty_varchar VARCHAR(100), @@ -6834,24 +6909,29 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): null_nvarchar NVARCHAR(100), null_binary VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert rows with mix of empty and NULL values - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_batch_empty_assertions VALUES (1, '', '', 0x, NULL, NULL, NULL), (2, '', '', 0x, NULL, NULL, NULL), (3, '', '', 0x, NULL, NULL, NULL) - """) + """ + ) db_connection.commit() # Test fetchall - should not trigger any assertions about dataLen - cursor.execute(""" + cursor.execute( + """ SELECT empty_varchar, empty_nvarchar, empty_binary, null_varchar, null_nvarchar, null_binary FROM #pytest_batch_empty_assertions ORDER BY id - """) + """ + ) rows = cursor.fetchall() assert len(rows) == 3, "Should return 3 rows" @@ -6868,10 +6948,12 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): assert row[5] is None, f"Row {i+1} null_binary should be None" # Test fetchmany - should also not trigger assertions - cursor.execute(""" + cursor.execute( + """ SELECT empty_nvarchar, empty_binary FROM #pytest_batch_empty_assertions ORDER BY id - """) + """ + ) # Fetch in batches first_batch = cursor.fetchmany(2) @@ -6911,13 +6993,15 @@ def test_executemany_utf16_length_validation(cursor, db_connection): try: # Create test table with small column size to trigger validation drop_table_if_exists(cursor, "#pytest_utf16_validation") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_utf16_validation ( id INT, short_text NVARCHAR(5), -- Small column to test length validation medium_text NVARCHAR(10) -- Medium column for edge cases ) - """) + """ + ) db_connection.commit() # Test 1: Valid strings that should work on all platforms @@ -7063,12 +7147,14 @@ def test_binary_data_over_8000_bytes(cursor, db_connection): try: # Create test table with VARBINARY(MAX) to handle large data drop_table_if_exists(cursor, "#pytest_small_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_small_binary ( id INT, large_binary VARBINARY(MAX) ) - """) + """ + ) # Test data that fits within both parameter and fetch limits (< 4096 bytes) medium_data = b"B" * 3000 # 3,000 bytes - under both limits @@ -7102,12 +7188,14 @@ def test_varbinarymax_insert_fetch(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_varbinarymax") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_varbinarymax ( id INT, binary_data VARBINARY(MAX) ) - """) + """ + ) # Prepare test data - use moderate sizes to guarantee LOB fetch path (line 867-868) efficiently test_data = [ @@ -7174,12 +7262,14 @@ def test_all_empty_binaries(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_all_empty_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_all_empty_binary ( id INT, empty_binary VARBINARY(100) ) - """) + """ + ) # Insert multiple rows with only empty binary data test_data = [ @@ -7218,12 +7308,14 @@ def test_mixed_bytes_and_bytearray_types(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_binary_types") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_mixed_binary_types ( id INT, binary_data VARBINARY(100) ) - """) + """ + ) # Test data mixing bytes and bytearray for the same column test_data = [ @@ -7278,12 +7370,14 @@ def test_binary_mostly_small_one_large(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_size_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_mixed_size_binary ( id INT, binary_data VARBINARY(MAX) ) - """) + """ + ) # Create large binary value within both parameter and fetch limits (< 4096 bytes) large_binary = b"X" * 3500 # 3,500 bytes - under both limits @@ -7343,12 +7437,14 @@ def test_varbinarymax_insert_fetch_null(cursor, db_connection): """Test insertion and retrieval of NULL value in VARBINARY(MAX) column.""" try: drop_table_if_exists(cursor, "#pytest_varbinarymax_null") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_varbinarymax_null ( id INT, binary_data VARBINARY(MAX) ) - """) + """ + ) # Insert a row with NULL for binary_data cursor.execute( @@ -7378,13 +7474,15 @@ def test_sql_double_type(cursor, db_connection): """Test SQL_DOUBLE type (FLOAT(53)) to cover line 3213 in dispatcher.""" try: drop_table_if_exists(cursor, "#pytest_double_type") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_double_type ( id INT PRIMARY KEY, double_col FLOAT(53), float_col FLOAT ) - """) + """ + ) # Insert test data with various double precision values test_data = [ @@ -7430,15 +7528,19 @@ def test_sql_double_type(cursor, db_connection): def test_null_guid_type(cursor, db_connection): """Test NULL UNIQUEIDENTIFIER (GUID) to cover lines 3376-3377.""" + original_native_uuid = mssql_python.native_uuid try: + mssql_python.native_uuid = True drop_table_if_exists(cursor, "#pytest_null_guid") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_guid ( id INT PRIMARY KEY, guid_col UNIQUEIDENTIFIER, guid_nullable UNIQUEIDENTIFIER NULL ) - """) + """ + ) # Insert test data with NULL and non-NULL GUIDs test_guid = uuid.uuid4() @@ -7481,6 +7583,7 @@ def test_null_guid_type(cursor, db_connection): pytest.fail(f"NULL GUID type test failed: {e}") finally: + mssql_python.native_uuid = original_native_uuid drop_table_if_exists(cursor, "#pytest_null_guid") db_connection.commit() @@ -7490,12 +7593,14 @@ def test_only_null_and_empty_binary(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_null_empty_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_empty_binary ( id INT, binary_data VARBINARY(100) ) - """) + """ + ) # Test data with only NULL and empty values test_data = [ @@ -7818,7 +7923,8 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): """Test inserting and retrieving valid MONEY and SMALLMONEY values including boundaries and typical data""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, @@ -7826,7 +7932,8 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): d DECIMAL(19,4), n NUMERIC(10,4) ) - """) + """ + ) db_connection.commit() # Max values @@ -7916,13 +8023,15 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): def test_money_smallmoney_null_handling(cursor, db_connection): """Test that NULL values for MONEY and SMALLMONEY are stored and retrieved correctly""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() # Row with both NULLs @@ -7972,13 +8081,15 @@ def test_money_smallmoney_null_handling(cursor, db_connection): def test_money_smallmoney_roundtrip(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using decimal.Decimal roundtrip""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() values = (decimal.Decimal("12345.6789"), decimal.Decimal("987.6543")) @@ -8002,13 +8113,15 @@ def test_money_smallmoney_boundaries(cursor, db_connection): """Test boundary values for MONEY and SMALLMONEY types are handled correctly""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() # Insert max boundary @@ -8048,13 +8161,15 @@ def test_money_smallmoney_boundaries(cursor, db_connection): def test_money_smallmoney_invalid_values(cursor, db_connection): """Test that invalid or out-of-range MONEY and SMALLMONEY values raise errors""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() # Out of range MONEY @@ -8085,13 +8200,15 @@ def test_money_smallmoney_invalid_values(cursor, db_connection): def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using executemany with decimal.Decimal""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() test_data = [ @@ -8125,13 +8242,15 @@ def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): def test_money_smallmoney_executemany_null_handling(cursor, db_connection): """Test inserting NULLs into MONEY and SMALLMONEY using executemany""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() rows = [ @@ -8189,12 +8308,14 @@ def test_uuid_insert_and_select_none(cursor, db_connection): table_name = "#pytest_uuid_nullable" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Insert a row with None for the UUID @@ -8215,15 +8336,19 @@ def test_uuid_insert_and_select_none(cursor, db_connection): def test_insert_multiple_uuids(cursor, db_connection): """Test inserting multiple UUIDs and verifying retrieval.""" + original_native_uuid = mssql_python.native_uuid table_name = "#pytest_uuid_multiple" try: + mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Prepare test data @@ -8250,21 +8375,26 @@ def test_insert_multiple_uuids(cursor, db_connection): retrieved_uuid == expected_uuid ), f"UUID mismatch for '{retrieved_desc}': expected {expected_uuid}, got {retrieved_uuid}" finally: + mssql_python.native_uuid = original_native_uuid cursor.execute(f"DROP TABLE IF EXISTS {table_name}") db_connection.commit() def test_fetchmany_uuids(cursor, db_connection): """Test fetching multiple UUID rows with fetchmany().""" + original_native_uuid = mssql_python.native_uuid table_name = "#pytest_uuid_fetchmany" try: + mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() uuids_to_insert = {f"Item {i}": uuid.uuid4() for i in range(10)} @@ -8291,6 +8421,7 @@ def test_fetchmany_uuids(cursor, db_connection): expected_uuid = uuids_to_insert[retrieved_desc] assert retrieved_uuid == expected_uuid finally: + mssql_python.native_uuid = original_native_uuid cursor.execute(f"DROP TABLE IF EXISTS {table_name}") db_connection.commit() @@ -8300,12 +8431,14 @@ def test_uuid_insert_with_none(cursor, db_connection): table_name = "#pytest_uuid_none" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """) + """ + ) db_connection.commit() cursor.execute(f"INSERT INTO {table_name} (id, name) VALUES (?, ?)", [None, "Alice"]) @@ -8368,8 +8501,10 @@ def test_duplicate_uuid_inserts(cursor, db_connection): def test_extreme_uuids(cursor, db_connection): """Test inserting extreme but valid UUIDs.""" + original_native_uuid = mssql_python.native_uuid table_name = "#pytest_uuid_extreme" try: + mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") cursor.execute(f"CREATE TABLE {table_name} (id UNIQUEIDENTIFIER)") db_connection.commit() @@ -8390,6 +8525,7 @@ def test_extreme_uuids(cursor, db_connection): for uid in extreme_uuids: assert uid in fetched_uuids, f"Extreme UUID {uid} not retrieved correctly" finally: + mssql_python.native_uuid = original_native_uuid cursor.execute(f"DROP TABLE IF EXISTS {table_name}") db_connection.commit() @@ -8401,12 +8537,14 @@ def test_executemany_uuid_insert_and_select(cursor, db_connection): try: # Drop and create a temporary table for the test cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Generate data for insertion @@ -8456,12 +8594,14 @@ def test_executemany_uuid_roundtrip_fixed_value(cursor, db_connection): table_name = "#pytest_uuid_fixed" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() fixed_uuid = uuid.UUID("12345678-1234-5678-1234-567812345678") @@ -8502,7 +8642,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -8510,14 +8651,17 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) - db_connection.commit() + """ + ) + db_connection.commit() # Test with default separator first cursor.execute("SELECT * FROM #pytest_decimal_multi_test") @@ -8553,19 +8697,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -8604,12 +8752,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -8694,21 +8844,25 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """) + """ + ) db_connection.commit() # First test with lowercase=False (default) @@ -8763,12 +8917,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -8850,7 +9006,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -8858,13 +9015,16 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) + """ + ) db_connection.commit() # Test with default separator first @@ -8901,19 +9061,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -9426,21 +9590,25 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """) + """ + ) db_connection.commit() # First test with lowercase=False (default) @@ -9495,12 +9663,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -9582,7 +9752,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -9590,13 +9761,16 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) + """ + ) db_connection.commit() # Test with default separator first @@ -9633,19 +9807,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -9684,12 +9862,14 @@ def test_cursor_setinputsizes_basic(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes ( string_col NVARCHAR(100), int_col INT ) - """) + """ + ) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9715,13 +9895,15 @@ def test_cursor_setinputsizes_with_executemany_float(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_float") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_float ( id INT, name NVARCHAR(50), price REAL /* Use REAL instead of DECIMAL */ ) - """) + """ + ) # Prepare data with float values data = [(1, "Item 1", 10.99), (2, "Item 2", 20.50), (3, "Item 3", 30.75)] @@ -9758,12 +9940,14 @@ def test_cursor_setinputsizes_reset(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_reset") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_reset ( col1 NVARCHAR(100), col2 INT ) - """) + """ + ) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9798,12 +9982,14 @@ def test_cursor_setinputsizes_override_inference(db_connection): # Create a test table with specific types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_override") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_override ( small_int SMALLINT, big_text NVARCHAR(MAX) ) - """) + """ + ) # Set input sizes that override the default inference # For SMALLINT, use a valid precision value (5 is typical for SMALLINT) @@ -9859,13 +10045,15 @@ def test_setinputsizes_parameter_count_mismatch_fewer(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100), col3 FLOAT ) - """) + """ + ) # Set fewer input sizes than parameters cursor.setinputsizes( @@ -9908,12 +10096,14 @@ def test_setinputsizes_parameter_count_mismatch_more(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100) ) - """) + """ + ) # Set more input sizes than parameters cursor.setinputsizes( @@ -9948,7 +10138,8 @@ def test_setinputsizes_with_null_values(db_connection): # Create a test table with multiple data types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_null") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_null ( int_col INT, string_col NVARCHAR(100), @@ -9956,7 +10147,8 @@ def test_setinputsizes_with_null_values(db_connection): date_col DATE, binary_col VARBINARY(100) ) - """) + """ + ) # Set input sizes for all columns cursor.setinputsizes( @@ -10259,15 +10451,18 @@ def test_procedures_setup(cursor, db_connection): ) # Create test stored procedures - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc1 AS BEGIN SELECT 1 AS result END - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc2 @param1 INT, @param2 VARCHAR(50) OUTPUT @@ -10276,7 +10471,8 @@ def test_procedures_setup(cursor, db_connection): SELECT @param2 = 'Output ' + CAST(@param1 AS VARCHAR(10)) RETURN @param1 END - """) + """ + ) db_connection.commit() except Exception as e: @@ -10394,7 +10590,8 @@ def test_procedures_with_parameters(cursor, db_connection): """Test that procedures() correctly reports parameter information""" try: # Create a simpler procedure with basic parameters - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_params_proc @in1 INT, @in2 VARCHAR(50) @@ -10402,7 +10599,8 @@ def test_procedures_with_parameters(cursor, db_connection): BEGIN SELECT @in1 AS value1, @in2 AS value2 END - """) + """ + ) db_connection.commit() # Get procedure info @@ -10436,23 +10634,28 @@ def test_procedures_result_set_info(cursor, db_connection): """Test that procedures() reports information about result sets""" try: # Create procedures with different result set patterns - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_no_results AS BEGIN DECLARE @x INT = 1 END - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_one_result AS BEGIN SELECT 1 AS col1, 'test' AS col2 END - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_multiple_results AS BEGIN @@ -10460,7 +10663,8 @@ def test_procedures_result_set_info(cursor, db_connection): SELECT 'test' AS result2 SELECT GETDATE() AS result3 END - """) + """ + ) db_connection.commit() # Get procedure info for all test procedures @@ -10542,15 +10746,18 @@ def test_foreignkeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.customers") # Create parent table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.customers ( customer_id INT PRIMARY KEY, customer_name VARCHAR(100) NOT NULL ) - """) + """ + ) # Create child table with foreign key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.orders ( order_id INT PRIMARY KEY, order_date DATETIME NOT NULL, @@ -10559,18 +10766,23 @@ def test_foreignkeys_setup(cursor, db_connection): CONSTRAINT FK_Orders_Customers FOREIGN KEY (customer_id) REFERENCES pytest_fk_schema.customers (customer_id) ) - """) + """ + ) # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO pytest_fk_schema.customers (customer_id, customer_name) VALUES (1, 'Test Customer 1'), (2, 'Test Customer 2') - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ INSERT INTO pytest_fk_schema.orders (order_id, order_date, customer_id, total_amount) VALUES (101, GETDATE(), 1, 150.00), (102, GETDATE(), 2, 250.50) - """) + """ + ) db_connection.commit() except Exception as e: @@ -10798,17 +11010,20 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.product_variants") # Create parent table with composite primary key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.product_variants ( product_id INT NOT NULL, variant_id INT NOT NULL, variant_name VARCHAR(100) NOT NULL, PRIMARY KEY (product_id, variant_id) ) - """) + """ + ) # Create child table with composite foreign key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.order_details ( order_id INT NOT NULL, product_id INT NOT NULL, @@ -10818,7 +11033,8 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): CONSTRAINT FK_OrderDetails_ProductVariants FOREIGN KEY (product_id, variant_id) REFERENCES pytest_fk_schema.product_variants (product_id, variant_id) ) - """) + """ + ) db_connection.commit() @@ -10883,23 +11099,27 @@ def test_primarykeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_pk_schema.composite_pk_test") # Create table with simple primary key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_pk_schema.single_pk_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, description VARCHAR(200) NULL ) - """) + """ + ) # Create table with composite primary key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_pk_schema.composite_pk_test ( dept_id INT NOT NULL, emp_id INT NOT NULL, hire_date DATE NOT NULL, CONSTRAINT PK_composite_test PRIMARY KEY (dept_id, emp_id) ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -11210,13 +11430,15 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """) + """ + ) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -11251,26 +11473,31 @@ def test_specialcolumns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_special_schema.identity_test") # Create table with primary key (for rowIdColumns) - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.rowid_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, unique_col NVARCHAR(100) UNIQUE, non_unique_col NVARCHAR(100) ) - """) + """ + ) # Create table with rowversion column (for rowVerColumns) - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.timestamp_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_updated ROWVERSION ) - """) + """ + ) # Create table with multiple unique identifiers - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.multiple_unique_test ( id INT NOT NULL, code VARCHAR(10) NOT NULL, @@ -11278,16 +11505,19 @@ def test_specialcolumns_setup(cursor, db_connection): order_number VARCHAR(20) UNIQUE, CONSTRAINT PK_multiple_unique_test PRIMARY KEY (id, code) ) - """) + """ + ) # Create table with identity column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.identity_test ( id INT IDENTITY(1,1) PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_modified DATETIME DEFAULT GETDATE() ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -11406,12 +11636,14 @@ def test_rowid_columns_nullable(cursor, db_connection): """Test rowIdColumns with nullable parameter""" try: # First create a table with nullable unique column and non-nullable PK - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.nullable_test ( id INT PRIMARY KEY, -- PK can't be nullable in SQL Server data NVARCHAR(100) NULL ) - """) + """ + ) db_connection.commit() # Test with nullable=True (default) @@ -11504,12 +11736,14 @@ def test_rowver_columns_nullable(cursor, db_connection): """Test rowVerColumns with nullable parameter (not expected to have effect)""" try: # First create a table with rowversion column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.nullable_rowver_test ( id INT PRIMARY KEY, ts ROWVERSION ) - """) + """ + ) db_connection.commit() # Test with nullable=True (default) @@ -11618,7 +11852,8 @@ def test_statistics_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_stats_schema.empty_stats_test") # Create test table with various indexes - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_stats_schema.stats_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, @@ -11627,25 +11862,32 @@ def test_statistics_setup(cursor, db_connection): salary DECIMAL(10, 2) NULL, hire_date DATE NOT NULL ) - """) + """ + ) # Create a non-unique index - cursor.execute(""" + cursor.execute( + """ CREATE INDEX IX_stats_test_dept_date ON pytest_stats_schema.stats_test (department, hire_date) - """) + """ + ) # Create a unique index on multiple columns - cursor.execute(""" + cursor.execute( + """ CREATE UNIQUE INDEX UX_stats_test_name_dept ON pytest_stats_schema.stats_test (name, department) - """) + """ + ) # Create an empty table for testing - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_stats_schema.empty_stats_test ( id INT PRIMARY KEY, data VARCHAR(100) NULL ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -11910,7 +12152,8 @@ def test_columns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_cols_schema.columns_special_test") # Create test table with various column types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_cols_schema.columns_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, @@ -11922,10 +12165,12 @@ def test_columns_setup(cursor, db_connection): notes TEXT NULL, [computed_col] AS (name + ' - ' + CAST(id AS VARCHAR(10))) ) - """) + """ + ) # Create table with special column names and edge cases - fix the problematic column name - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_cols_schema.columns_special_test ( [ID] INT PRIMARY KEY, [User Name] NVARCHAR(100) NULL, @@ -11937,7 +12182,8 @@ def test_columns_setup(cursor, db_connection): [Column/With/Slashes] VARCHAR(20) NULL, [Column_With_Underscores] VARCHAR(20) NULL -- Changed from problematic nested brackets ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -12401,21 +12647,25 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """) + """ + ) db_connection.commit() # First test with lowercase=False (default) @@ -12470,12 +12720,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -12557,7 +12809,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -12565,13 +12818,16 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) + """ + ) db_connection.commit() # Test with default separator first @@ -12608,19 +12864,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -12657,12 +12917,14 @@ def test_executemany_with_uuids(cursor, db_connection): table_name = "#pytest_uuid_batch" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Prepare test data: mix of UUIDs and None @@ -12810,11 +13072,13 @@ def test_date_string_parameter_binding(cursor, db_connection): table_name = "#pytest_date_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( a_column VARCHAR(20) ) - """) + """ + ) cursor.execute(f"INSERT INTO {table_name} (a_column) VALUES ('string1'), ('string2')") db_connection.commit() @@ -12841,11 +13105,13 @@ def test_time_string_parameter_binding(cursor, db_connection): table_name = "#pytest_time_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( time_col VARCHAR(22) ) - """) + """ + ) cursor.execute(f"INSERT INTO {table_name} (time_col) VALUES ('prefix_14:30:45_suffix')") db_connection.commit() @@ -12870,11 +13136,13 @@ def test_datetime_string_parameter_binding(cursor, db_connection): table_name = "#pytest_datetime_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( datetime_col VARCHAR(33) ) - """) + """ + ) cursor.execute( f"INSERT INTO {table_name} (datetime_col) VALUES ('prefix_2025-08-12T14:30:45_suffix')" ) @@ -13738,12 +14006,14 @@ def test_column_metadata_error_handling(cursor): """Test column metadata retrieval error handling (Lines 1156-1167).""" # Execute a complex query that might stress metadata retrieval - cursor.execute(""" + cursor.execute( + """ SELECT CAST(1 as INT) as int_col, CAST('test' as NVARCHAR(100)) as nvarchar_col, CAST(NEWID() as UNIQUEIDENTIFIER) as guid_col - """) + """ + ) # This should exercise the metadata retrieval code paths # If there are any errors, they should be logged but not crash @@ -13859,12 +14129,14 @@ def test_row_uuid_processing_with_braces(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_uuid_braces") # Create table with UNIQUEIDENTIFIER column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_uuid_braces ( id INT IDENTITY(1,1), guid_col UNIQUEIDENTIFIER ) - """) + """ + ) # Insert a GUID with braces (this is how SQL Server often returns them) test_guid = "12345678-1234-5678-9ABC-123456789ABC" @@ -13908,13 +14180,15 @@ def test_row_uuid_processing_sql_guid_type(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_sql_guid_type") # Create table with UNIQUEIDENTIFIER column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_sql_guid_type ( id INT, guid_col UNIQUEIDENTIFIER ) - """) - + """ + ) + # Insert test data test_guid = "ABCDEF12-3456-7890-ABCD-1234567890AB" cursor.execute( @@ -13959,12 +14233,14 @@ def test_row_output_converter_overflow_error(cursor, db_connection): try: # Create a table with integer column drop_table_if_exists(cursor, "#pytest_overflow_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_overflow_test ( id INT, small_int TINYINT -- TINYINT can only hold 0-255 ) - """) + """ + ) # Insert a valid value first cursor.execute("INSERT INTO #pytest_overflow_test (id, small_int) VALUES (?, ?)", [1, 100]) @@ -14014,12 +14290,14 @@ def test_row_output_converter_general_exception(cursor, db_connection): try: # Create a table with string column drop_table_if_exists(cursor, "#pytest_exception_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_exception_test ( id INT, text_col VARCHAR(50) ) - """) + """ + ) # Insert test data cursor.execute( @@ -14070,12 +14348,14 @@ def test_row_cursor_log_method_availability(cursor, db_connection): try: # Create test data drop_table_if_exists(cursor, "#pytest_log_check") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_log_check ( id INT, value_col INT ) - """) + """ + ) cursor.execute("INSERT INTO #pytest_log_check (id, value_col) VALUES (?, ?)", [1, 42]) db_connection.commit() @@ -14103,7 +14383,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14113,7 +14394,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """) + """ + ) db_connection.commit() # Insert row with all NULLs @@ -14155,14 +14437,16 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create large data that will trigger LOB handling @@ -14195,12 +14479,14 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14227,12 +14513,14 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14259,12 +14547,14 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -14291,14 +14581,16 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert empty (non-NULL) values @@ -14326,12 +14618,14 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """) + """ + ) db_connection.commit() # Insert NULL GUID @@ -14358,12 +14652,14 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """) + """ + ) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -14390,12 +14686,14 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """) + """ + ) db_connection.commit() # Insert various decimal values including edge cases @@ -14516,7 +14814,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14526,7 +14825,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """) + """ + ) db_connection.commit() # Insert row with all NULLs @@ -14568,14 +14868,16 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create large data that will trigger LOB handling @@ -14608,12 +14910,14 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14640,12 +14944,14 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14672,12 +14978,14 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -14704,14 +15012,16 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert empty (non-NULL) values @@ -14739,12 +15049,14 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """) + """ + ) db_connection.commit() # Insert NULL GUID @@ -14771,12 +15083,14 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """) + """ + ) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -14803,12 +15117,14 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """) + """ + ) db_connection.commit() # Insert various decimal values including edge cases @@ -14929,14 +15245,16 @@ def test_fetchall_with_integrity_constraint(cursor, db_connection): try: # Setup table with unique constraint cursor.execute("DROP TABLE IF EXISTS #uniq_cons_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #uniq_cons_test ( id INTEGER NOT NULL IDENTITY, data VARCHAR(50) NULL, PRIMARY KEY (id), UNIQUE (data) ) - """) + """ + ) # Insert initial row - should work cursor.execute( @@ -15019,3 +15337,530 @@ def test_close(db_connection): pytest.fail(f"Cursor close test failed: {e}") finally: cursor = db_connection.cursor() + + +# ───────────────────────────────────────────────────────────────────── +# native_uuid tests +# ───────────────────────────────────────────────────────────────────── + + +def test_native_uuid_true_returns_uuid_objects(db_connection): + """Test that with native_uuid=True, UNIQUEIDENTIFIER columns return uuid.UUID.""" + import uuid + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + mssql_python.native_uuid = True + + drop_table_if_exists(cursor, "#test_native_uuid_true") + cursor.execute( + "CREATE TABLE #test_native_uuid_true (id UNIQUEIDENTIFIER, name NVARCHAR(50))" + ) + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #test_native_uuid_true VALUES (?, ?)", [test_uuid, "test"]) + + # fetchone + cursor.execute("SELECT id, name FROM #test_native_uuid_true") + row = cursor.fetchone() + assert isinstance(row[0], uuid.UUID), f"Expected uuid.UUID, got {type(row[0])}" + assert row[0] == test_uuid + + # fetchall + cursor.execute("SELECT id, name FROM #test_native_uuid_true") + rows = cursor.fetchall() + assert isinstance(rows[0][0], uuid.UUID), f"Expected uuid.UUID, got {type(rows[0][0])}" + assert rows[0][0] == test_uuid + + # fetchmany + cursor.execute("SELECT id, name FROM #test_native_uuid_true") + rows = cursor.fetchmany(1) + assert isinstance(rows[0][0], uuid.UUID), f"Expected uuid.UUID, got {type(rows[0][0])}" + assert rows[0][0] == test_uuid + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_native_uuid_true") + db_connection.commit() + + +def test_native_uuid_false_returns_strings(db_connection): + """Test that with native_uuid=False, UNIQUEIDENTIFIER columns return str.""" + import uuid + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + mssql_python.native_uuid = False + + drop_table_if_exists(cursor, "#test_native_uuid_false") + cursor.execute( + "CREATE TABLE #test_native_uuid_false (id UNIQUEIDENTIFIER, name NVARCHAR(50))" + ) + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #test_native_uuid_false VALUES (?, ?)", [test_uuid, "test"]) + + # fetchone + cursor.execute("SELECT id, name FROM #test_native_uuid_false") + row = cursor.fetchone() + assert isinstance(row[0], str), f"With native_uuid=False, expected str, got {type(row[0])}" + assert row[0] == str(test_uuid).upper(), f"UUID string mismatch: {row[0]} != {str(test_uuid).upper()}" + + # fetchall + cursor.execute("SELECT id, name FROM #test_native_uuid_false") + rows = cursor.fetchall() + assert isinstance( + rows[0][0], str + ), f"With native_uuid=False, expected str, got {type(rows[0][0])}" + assert rows[0][0] == str(test_uuid).upper() + + # fetchmany + cursor.execute("SELECT id, name FROM #test_native_uuid_false") + rows = cursor.fetchmany(1) + assert isinstance( + rows[0][0], str + ), f"With native_uuid=False, expected str, got {type(rows[0][0])}" + assert rows[0][0] == str(test_uuid).upper() + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_native_uuid_false") + db_connection.commit() + + +def test_native_uuid_false_string_operations(db_connection): + """Test that with native_uuid=False, string operations work on returned UUIDs.""" + import uuid + import json + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + mssql_python.native_uuid = False + + drop_table_if_exists(cursor, "#test_uuid_str_ops") + cursor.execute("CREATE TABLE #test_uuid_str_ops (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #test_uuid_str_ops VALUES (?)", [test_uuid]) + + cursor.execute("SELECT id FROM #test_uuid_str_ops") + row = cursor.fetchone() + guid_value = row[0] + + # These operations should all work without errors when native_uuid=False + assert isinstance(guid_value, str) + assert guid_value == str(test_uuid).upper() + assert guid_value.replace("-", "") == str(test_uuid).upper().replace("-", "") + assert guid_value.strip() == str(test_uuid).upper() + + # JSON serialization should work + payload = json.dumps({"id": guid_value}) + assert str(test_uuid).upper() in payload + + # String equality should work (pyodbc-compatible uppercase) + assert guid_value == str(test_uuid).upper() + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_uuid_str_ops") + db_connection.commit() + + +def test_native_uuid_null_handling(db_connection): + """Test that NULL UNIQUEIDENTIFIER values remain None regardless of native_uuid setting.""" + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + drop_table_if_exists(cursor, "#test_uuid_null") + cursor.execute("CREATE TABLE #test_uuid_null (id INT, uuid_col UNIQUEIDENTIFIER)") + cursor.execute("INSERT INTO #test_uuid_null VALUES (1, NULL)") + + # Test with native_uuid=True + mssql_python.native_uuid = True + cursor.execute("SELECT * FROM #test_uuid_null") + row = cursor.fetchone() + assert row[1] is None, "NULL UUID should remain None with native_uuid=True" + + # Test with native_uuid=False + mssql_python.native_uuid = False + cursor.execute("SELECT * FROM #test_uuid_null") + row = cursor.fetchone() + assert row[1] is None, "NULL UUID should remain None with native_uuid=False" + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_uuid_null") + db_connection.commit() + + +def test_native_uuid_multiple_rows(db_connection): + """Test native_uuid=False with multiple UUID rows via fetchall and fetchmany.""" + import uuid + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + mssql_python.native_uuid = False + + drop_table_if_exists(cursor, "#test_uuid_multi") + cursor.execute("CREATE TABLE #test_uuid_multi (id UNIQUEIDENTIFIER, seq INT)") + + test_uuids = [uuid.uuid4() for _ in range(10)] + for i, uid in enumerate(test_uuids): + cursor.execute("INSERT INTO #test_uuid_multi VALUES (?, ?)", [uid, i]) + db_connection.commit() + + # fetchall — all should be strings + cursor.execute("SELECT id, seq FROM #test_uuid_multi ORDER BY seq") + rows = cursor.fetchall() + assert len(rows) == 10 + for i, row in enumerate(rows): + assert isinstance(row[0], str), f"Row {i}: expected str, got {type(row[0])}" + assert row[0] == str(test_uuids[i]).upper() + + # fetchmany — should also return strings + cursor.execute("SELECT id, seq FROM #test_uuid_multi ORDER BY seq") + rows = cursor.fetchmany(5) + assert len(rows) == 5 + for i, row in enumerate(rows): + assert isinstance(row[0], str), f"Row {i}: expected str, got {type(row[0])}" + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_uuid_multi") + db_connection.commit() + + +def test_native_uuid_non_uuid_columns_unaffected(db_connection): + """Test that native_uuid=False does not affect non-UUID columns.""" + import uuid + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + mssql_python.native_uuid = False + + drop_table_if_exists(cursor, "#test_uuid_other_cols") + cursor.execute( + """ + CREATE TABLE #test_uuid_other_cols ( + id UNIQUEIDENTIFIER, + int_col INT, + str_col NVARCHAR(50), + float_col FLOAT, + bit_col BIT + ) + """ + ) + test_uuid = uuid.uuid4() + cursor.execute( + "INSERT INTO #test_uuid_other_cols VALUES (?, ?, ?, ?, ?)", + [test_uuid, 42, "hello", 3.14, True], + ) + + cursor.execute("SELECT * FROM #test_uuid_other_cols") + row = cursor.fetchone() + + # UUID column should be str + assert isinstance(row[0], str), f"UUID col: expected str, got {type(row[0])}" + # Other columns should retain their types + assert isinstance(row[1], int), f"INT col: expected int, got {type(row[1])}" + assert isinstance(row[2], str), f"NVARCHAR col: expected str, got {type(row[2])}" + assert isinstance(row[3], float), f"FLOAT col: expected float, got {type(row[3])}" + assert isinstance(row[4], bool), f"BIT col: expected bool, got {type(row[4])}" + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_uuid_other_cols") + db_connection.commit() + + +def test_native_uuid_setting_snapshot_at_execute(db_connection): + """Test that native_uuid is snapshotted at execute() time, not fetch() time.""" + import uuid + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + drop_table_if_exists(cursor, "#test_uuid_snapshot") + cursor.execute("CREATE TABLE #test_uuid_snapshot (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #test_uuid_snapshot VALUES (?)", [test_uuid]) + + # Execute with native_uuid=False + mssql_python.native_uuid = False + cursor.execute("SELECT id FROM #test_uuid_snapshot") + + # Change setting AFTER execute but BEFORE fetch + mssql_python.native_uuid = True + + # Should still return str because setting was snapshotted at execute() + row = cursor.fetchone() + assert isinstance(row[0], str), ( + "Setting should be snapshotted at execute() time. " f"Expected str, got {type(row[0])}" + ) + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_uuid_snapshot") + db_connection.commit() + + +def test_native_uuid_wide_result_set_performance(db_connection): + """Test UUID handling in wide result sets — only UUID columns should be affected.""" + import uuid + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + mssql_python.native_uuid = False + + drop_table_if_exists(cursor, "#test_uuid_wide") + # Create a wide table: 1 UUID column + 30 varchar columns + cols = ", ".join([f"col{i} VARCHAR(50)" for i in range(1, 31)]) + cursor.execute(f"CREATE TABLE #test_uuid_wide (id UNIQUEIDENTIFIER, {cols})") + + test_uuid = uuid.uuid4() + values = [test_uuid] + [f"Value {i}" for i in range(1, 31)] + placeholders = ", ".join(["?"] * 31) + cursor.execute(f"INSERT INTO #test_uuid_wide VALUES ({placeholders})", values) + + cursor.execute("SELECT * FROM #test_uuid_wide") + row = cursor.fetchone() + + # UUID column should be str (uppercase, matching pyodbc / SQL Server format) + assert isinstance(row[0], str), f"UUID col: expected str, got {type(row[0])}" + assert row[0] == str(test_uuid).upper() + + # All other columns should remain str (unaffected) + for i in range(1, 31): + assert isinstance(row[i], str), f"Column {i}: expected str, got {type(row[i])}" + assert row[i] == f"Value {i}" + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_uuid_wide") + db_connection.commit() + + +def test_native_uuid_input_parameter_accepts_uuid_objects(db_connection): + """Test that uuid.UUID objects are still accepted as input parameters regardless of native_uuid.""" + import uuid + + original_value = mssql_python.native_uuid + cursor = db_connection.cursor() + + try: + drop_table_if_exists(cursor, "#test_uuid_input") + cursor.execute("CREATE TABLE #test_uuid_input (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + + # Insert with native_uuid=False — uuid.UUID input should still work + mssql_python.native_uuid = False + cursor.execute("INSERT INTO #test_uuid_input VALUES (?)", [test_uuid]) + + cursor.execute("SELECT id FROM #test_uuid_input") + row = cursor.fetchone() + assert isinstance(row[0], str) + assert row[0] == str(test_uuid).upper() + + # Query with UUID parameter — should also work + cursor.execute("SELECT id FROM #test_uuid_input WHERE id = ?", [test_uuid]) + row = cursor.fetchone() + assert row is not None + assert row[0] == str(test_uuid).upper() + + finally: + mssql_python.native_uuid = original_value + drop_table_if_exists(cursor, "#test_uuid_input") + db_connection.commit() + + +# ────────────────────────────────────────────────────────────────────────────── +# Per-connection native_uuid tests +# ────────────────────────────────────────────────────────────────────────────── + + +def test_per_connection_native_uuid_false(conn_str): + """Test that connect(native_uuid=False) returns UUID columns as strings.""" + import uuid + + conn = mssql_python.connect(conn_str, native_uuid=False) + cursor = conn.cursor() + try: + drop_table_if_exists(cursor, "#test_conn_uuid_false") + cursor.execute( + "CREATE TABLE #test_conn_uuid_false (id UNIQUEIDENTIFIER, name NVARCHAR(50))" + ) + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #test_conn_uuid_false VALUES (?, ?)", [test_uuid, "test"]) + + cursor.execute("SELECT id, name FROM #test_conn_uuid_false") + row = cursor.fetchone() + assert isinstance(row[0], str), f"Expected str, got {type(row[0])}" + assert row[0] == str(test_uuid).upper() + finally: + drop_table_if_exists(cursor, "#test_conn_uuid_false") + conn.close() + + +def test_per_connection_native_uuid_true(conn_str): + """Test that connect(native_uuid=True) returns UUID columns as uuid.UUID objects.""" + import uuid + + conn = mssql_python.connect(conn_str, native_uuid=True) + cursor = conn.cursor() + try: + drop_table_if_exists(cursor, "#test_conn_uuid_true") + cursor.execute("CREATE TABLE #test_conn_uuid_true (id UNIQUEIDENTIFIER, name NVARCHAR(50))") + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #test_conn_uuid_true VALUES (?, ?)", [test_uuid, "test"]) + + cursor.execute("SELECT id, name FROM #test_conn_uuid_true") + row = cursor.fetchone() + assert isinstance(row[0], uuid.UUID), f"Expected uuid.UUID, got {type(row[0])}" + assert row[0] == test_uuid + finally: + drop_table_if_exists(cursor, "#test_conn_uuid_true") + conn.close() + + +def test_per_connection_native_uuid_none_uses_module_default(conn_str): + """Test that connect(native_uuid=None) defers to module-level setting.""" + import uuid + + original_value = mssql_python.native_uuid + conn = mssql_python.connect(conn_str, native_uuid=None) + cursor = conn.cursor() + try: + # Module-level = True, connection = None → should return uuid.UUID + mssql_python.native_uuid = True + drop_table_if_exists(cursor, "##test_conn_uuid_none") + cursor.execute("CREATE TABLE ##test_conn_uuid_none (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO ##test_conn_uuid_none VALUES (?)", [test_uuid]) + cursor.execute("SELECT id FROM ##test_conn_uuid_none") + row = cursor.fetchone() + assert isinstance(row[0], uuid.UUID), "None should defer to module-level True" + + # Now change module-level to False → new cursor on same connection should return str + mssql_python.native_uuid = False + cursor2 = conn.cursor() + cursor2.execute("SELECT id FROM ##test_conn_uuid_none") + row2 = cursor2.fetchone() + assert isinstance(row2[0], str), "None should defer to module-level False" + + finally: + drop_table_if_exists(cursor, "##test_conn_uuid_none") + conn.close() + mssql_python.native_uuid = original_value + + +def test_per_connection_overrides_module_level(conn_str): + """Test that per-connection native_uuid overrides the module-level setting.""" + import uuid + + original_value = mssql_python.native_uuid + conn = None + conn2 = None + try: + # Module-level = True, but connection says False → strings + mssql_python.native_uuid = True + conn = mssql_python.connect(conn_str, native_uuid=False) + cursor = conn.cursor() + + drop_table_if_exists(cursor, "#test_conn_override_a") + cursor.execute("CREATE TABLE #test_conn_override_a (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #test_conn_override_a VALUES (?)", [test_uuid]) + + cursor.execute("SELECT id FROM #test_conn_override_a") + row = cursor.fetchone() + assert isinstance( + row[0], str + ), f"Connection native_uuid=False should override module True, got {type(row[0])}" + + # Module-level = False, but connection says True → uuid.UUID + mssql_python.native_uuid = False + conn2 = mssql_python.connect(conn_str, native_uuid=True) + cursor2 = conn2.cursor() + drop_table_if_exists(cursor2, "#test_conn_override_b") + cursor2.execute("CREATE TABLE #test_conn_override_b (id UNIQUEIDENTIFIER)") + cursor2.execute("INSERT INTO #test_conn_override_b VALUES (?)", [test_uuid]) + + cursor2.execute("SELECT id FROM #test_conn_override_b") + row2 = cursor2.fetchone() + assert isinstance( + row2[0], uuid.UUID + ), f"Connection native_uuid=True should override module False, got {type(row2[0])}" + + drop_table_if_exists(cursor, "#test_conn_override_a") + drop_table_if_exists(cursor2, "#test_conn_override_b") + finally: + mssql_python.native_uuid = original_value + if conn: + conn.close() + if conn2: + conn2.close() + + +def test_two_connections_different_native_uuid(conn_str): + """Test that two simultaneous connections can have different native_uuid settings.""" + import uuid + + original_value = mssql_python.native_uuid + try: + conn_str_mode = conn_str + conn_uuid = mssql_python.connect(conn_str_mode, native_uuid=True) + conn_str_mode2 = conn_str + conn_string = mssql_python.connect(conn_str_mode2, native_uuid=False) + + cursor_uuid = conn_uuid.cursor() + cursor_string = conn_string.cursor() + + drop_table_if_exists(cursor_uuid, "#test_dual_conn") + cursor_uuid.execute("CREATE TABLE #test_dual_conn (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + cursor_uuid.execute("INSERT INTO #test_dual_conn VALUES (?)", [test_uuid]) + + # Same query, different connections → different types + cursor_uuid.execute("SELECT id FROM #test_dual_conn") + row_uuid = cursor_uuid.fetchone() + + # Need a separate temp table for the second connection since temp tables + # are connection-scoped. Use a global temp table instead. + drop_table_if_exists(cursor_string, "##test_dual_conn_shared") + cursor_string.execute("CREATE TABLE ##test_dual_conn_shared (id UNIQUEIDENTIFIER)") + cursor_string.execute("INSERT INTO ##test_dual_conn_shared VALUES (?)", [test_uuid]) + cursor_string.execute("SELECT id FROM ##test_dual_conn_shared") + row_string = cursor_string.fetchone() + + assert isinstance(row_uuid[0], uuid.UUID), f"Expected uuid.UUID, got {type(row_uuid[0])}" + assert isinstance(row_string[0], str), f"Expected str, got {type(row_string[0])}" + assert str(row_uuid[0]).upper() == row_string[0], "Values should be equal as uppercase strings" + + drop_table_if_exists(cursor_uuid, "#test_dual_conn") + drop_table_if_exists(cursor_string, "##test_dual_conn_shared") + conn_uuid.close() + conn_string.close() + finally: + mssql_python.native_uuid = original_value + + +def test_per_connection_native_uuid_invalid_type(conn_str): + """Test that connect(native_uuid=) raises ValueError.""" + import pytest + + with pytest.raises(ValueError, match="native_uuid must be a boolean"): + mssql_python.connect(conn_str, native_uuid="false") + + with pytest.raises(ValueError, match="native_uuid must be a boolean"): + mssql_python.connect(conn_str, native_uuid=1) From de3c2e8e0f51434176661078b9a4f36a21bc3d2c Mon Sep 17 00:00:00 2001 From: Jahnvi Thakkar Date: Mon, 2 Mar 2026 16:02:24 +0530 Subject: [PATCH 2/6] Formatting tests files using black formatter --- tests/test_003_connection.py | 12 ++- tests/test_004_cursor.py | 8 +- tests/test_011_performance_stress.py | 36 ++++--- tests/test_013_SqlHandle_free_shutdown.py | 96 +++++++++++------ tests/test_013_encoding_decoding.py | 108 +++++++++++++------- tests/test_017_spatial_types.py | 48 ++++++--- tests/test_017_varchar_cp1252_boundary.py | 78 +++++++++----- tests/test_018_polars_pandas_integration.py | 18 ++-- tests/test_019_bulkcopy.py | 12 ++- tests/test_cache_invalidation.py | 108 +++++++++++++------- 10 files changed, 350 insertions(+), 174 deletions(-) diff --git a/tests/test_003_connection.py b/tests/test_003_connection.py index c6141ea77..e2f0cce0d 100644 --- a/tests/test_003_connection.py +++ b/tests/test_003_connection.py @@ -992,14 +992,16 @@ def test_execute_with_large_parameters(db_connection, conn_str): pytest.skip("Skipping for Azure SQL - large parameter tests may cause timeouts") # Test with a temporary table for large data - cursor = db_connection.execute(""" + cursor = db_connection.execute( + """ DROP TABLE IF EXISTS #large_params_test; CREATE TABLE #large_params_test ( id INT, large_text NVARCHAR(MAX), large_binary VARBINARY(MAX) ) - """) + """ + ) cursor.close() try: @@ -2124,10 +2126,12 @@ def test_timeout_long_query(db_connection): while retry_count < max_retries: start_time = time.perf_counter() try: - cursor.execute(""" + cursor.execute( + """ SELECT COUNT(*) FROM sys.objects a, sys.objects b, sys.objects c WHERE a.object_id = b.object_id * c.object_id - """) + """ + ) cursor.fetchall() elapsed_time = time.perf_counter() - start_time break # Success, exit retry loop diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 21145dda3..6b9365af4 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -15406,7 +15406,9 @@ def test_native_uuid_false_returns_strings(db_connection): cursor.execute("SELECT id, name FROM #test_native_uuid_false") row = cursor.fetchone() assert isinstance(row[0], str), f"With native_uuid=False, expected str, got {type(row[0])}" - assert row[0] == str(test_uuid).upper(), f"UUID string mismatch: {row[0]} != {str(test_uuid).upper()}" + assert ( + row[0] == str(test_uuid).upper() + ), f"UUID string mismatch: {row[0]} != {str(test_uuid).upper()}" # fetchall cursor.execute("SELECT id, name FROM #test_native_uuid_false") @@ -15845,7 +15847,9 @@ def test_two_connections_different_native_uuid(conn_str): assert isinstance(row_uuid[0], uuid.UUID), f"Expected uuid.UUID, got {type(row_uuid[0])}" assert isinstance(row_string[0], str), f"Expected str, got {type(row_string[0])}" - assert str(row_uuid[0]).upper() == row_string[0], "Values should be equal as uppercase strings" + assert ( + str(row_uuid[0]).upper() == row_string[0] + ), "Values should be equal as uppercase strings" drop_table_if_exists(cursor_uuid, "#test_dual_conn") drop_table_if_exists(cursor_string, "##test_dual_conn_shared") diff --git a/tests/test_011_performance_stress.py b/tests/test_011_performance_stress.py index 7750fee52..9f9636321 100644 --- a/tests/test_011_performance_stress.py +++ b/tests/test_011_performance_stress.py @@ -53,13 +53,15 @@ def test_exception_mid_batch_no_corrupt_data(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_mid_batch_exception") # Create simple table to test batch processing integrity - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_mid_batch_exception ( id INT, value NVARCHAR(50), amount FLOAT ) - """) + """ + ) db_connection.commit() # Insert 1000 rows using individual inserts to avoid executemany complications @@ -119,14 +121,16 @@ def test_python_c_api_null_handling_memory_pressure(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_memory_pressure") # Create table with various string types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_memory_pressure ( id INT, varchar_col VARCHAR(1000), nvarchar_col NVARCHAR(1000), varbinary_col VARBINARY(1000) ) - """) + """ + ) db_connection.commit() # Insert test data @@ -188,14 +192,16 @@ def test_thousands_of_empty_strings_allocation_stress(cursor, db_connection): try: drop_table_if_exists(cursor, "#pytest_empty_stress") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_empty_stress ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_varbinary VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert 10,000 rows with empty strings @@ -271,14 +277,16 @@ def test_large_result_set_100k_rows_no_overflow(cursor, db_connection): try: drop_table_if_exists(cursor, "#pytest_100k_rows") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_100k_rows ( id INT, varchar_col VARCHAR(50), nvarchar_col NVARCHAR(50), int_col INT ) - """) + """ + ) db_connection.commit() # Insert 100,000 rows with sequential IDs and predictable data @@ -367,14 +375,16 @@ def test_very_large_lob_10mb_data_integrity(cursor, db_connection): try: drop_table_if_exists(cursor, "#pytest_10mb_lob") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_10mb_lob ( id INT, varchar_lob VARCHAR(MAX), nvarchar_lob NVARCHAR(MAX), varbinary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create 10MB+ data @@ -484,13 +494,15 @@ def worker_thread(thread_id: int, conn_str: str, results_list: List, errors_list table_name = f"#pytest_concurrent_t{thread_id}" drop_table_if_exists(cursor, table_name) - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id INT, thread_id INT, data VARCHAR(100) ) - """) + """ + ) conn.commit() # Insert thread-specific data diff --git a/tests/test_013_SqlHandle_free_shutdown.py b/tests/test_013_SqlHandle_free_shutdown.py index 9944d8987..7e426cfbf 100644 --- a/tests/test_013_SqlHandle_free_shutdown.py +++ b/tests/test_013_SqlHandle_free_shutdown.py @@ -52,7 +52,8 @@ def test_aggressive_dbc_segfault_reproduction(self, conn_str): Expected with CURRENT CODE: May segfault (this is the bug we're testing for) Expected with FIXED CODE: No segfault """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys import gc from mssql_python import connect @@ -82,7 +83,8 @@ def test_aggressive_dbc_segfault_reproduction(self, conn_str): # Force immediate exit - this triggers finalize_garbage sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -115,7 +117,8 @@ def test_dbc_handle_outlives_env_handle(self, conn_str): Expected with CURRENT CODE: Likely segfault Expected with FIXED CODE: No segfault """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys import atexit from mssql_python import connect @@ -138,7 +141,8 @@ def on_exit(): print("Python GC will finalize DBC during shutdown") print("If DBC cleanup isn't skipped, SQLFreeHandle will access freed ENV") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -163,7 +167,8 @@ def test_force_gc_finalization_order_issue(self, conn_str): Expected with CURRENT CODE: May segfault Expected with FIXED CODE: No segfault """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys import gc import weakref @@ -202,7 +207,8 @@ def test_force_gc_finalization_order_issue(self, conn_str): print("Exiting - finalize_garbage will be called") print("If DBC handles aren't protected, segfault in SQLFreeHandle") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -228,7 +234,8 @@ def test_stmt_handle_cleanup_at_shutdown(self, conn_str): Expected: No segfault, clean exit """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys from mssql_python import connect @@ -244,7 +251,8 @@ def test_stmt_handle_cleanup_at_shutdown(self, conn_str): # Type 3 (STMT) handle should be skipped when pythonShuttingDown=true print("STMT handle cleanup test: Exiting without explicit cleanup") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -267,7 +275,8 @@ def test_dbc_handle_cleanup_at_shutdown(self, conn_str): Expected: No segfault, clean exit """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys from mssql_python import connect @@ -287,7 +296,8 @@ def test_dbc_handle_cleanup_at_shutdown(self, conn_str): # Type 2 (DBC) handles should be skipped when pythonShuttingDown=true print("DBC handle cleanup test: Exiting without explicit connection cleanup") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -316,7 +326,8 @@ def test_env_handle_cleanup_at_shutdown(self, conn_str): Note: ENV handle is static and destructs via normal C++ mechanisms, not during Python GC. This test verifies the overall flow. """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys from mssql_python import connect @@ -335,7 +346,8 @@ def test_env_handle_cleanup_at_shutdown(self, conn_str): # It does NOT have pythonShuttingDown protection (Type 1 not in check) print("ENV handle cleanup test: All connections closed properly") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -362,7 +374,8 @@ def test_mixed_handle_cleanup_at_shutdown(self, conn_str): Expected: No segfault, clean exit This tests the real-world scenario where cleanup is partial """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys from mssql_python import connect @@ -407,7 +420,8 @@ def test_mixed_handle_cleanup_at_shutdown(self, conn_str): # - Type 1 (ENV) handle: normal C++ static destruction print("Mixed handle cleanup test: Exiting with partial cleanup") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -432,7 +446,8 @@ def test_rapid_connection_churn_with_shutdown(self, conn_str): Expected: No segfault, proper handle cleanup order """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys import gc from mssql_python import connect @@ -460,7 +475,8 @@ def test_rapid_connection_churn_with_shutdown(self, conn_str): # Their DBC and STMT handles will be skipped during shutdown print("Rapid churn test: Exiting with mixed cleanup") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -483,7 +499,8 @@ def test_exception_during_query_with_shutdown(self, conn_str): Expected: No segfault, graceful error handling """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys from mssql_python import connect, ProgrammingError @@ -499,7 +516,8 @@ def test_exception_during_query_with_shutdown(self, conn_str): print("Exception test: Exiting after exception without cleanup") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -521,7 +539,8 @@ def test_weakref_cleanup_at_shutdown(self, conn_str): Expected: No segfault, proper weakref finalization """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys import weakref from mssql_python import connect @@ -552,7 +571,8 @@ def callback(ref): print("Weakref test: Exiting with weakrefs active") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -573,7 +593,8 @@ def test_gc_during_shutdown_with_circular_refs(self, conn_str): Expected: No segfault, proper cycle breaking """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys import gc from mssql_python import connect @@ -610,7 +631,8 @@ def execute_query(self): print("Circular ref test: Exiting after GC with cycles") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -631,7 +653,8 @@ def test_all_handle_types_comprehensive(self, conn_str): Expected: Clean shutdown with no segfaults """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import sys from mssql_python import connect @@ -682,7 +705,8 @@ def test_all_handle_types_comprehensive(self, conn_str): print("- Type 1 (ENV) handle: Normal C++ static destruction") print("=== Exiting ===") sys.exit(0) - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -926,7 +950,8 @@ def test_cleanup_connections_scenarios(self, conn_str, scenario, test_code, expe - empty_list: No errors with empty set - mixed_scenario: Mixed connection states handled correctly """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import mssql_python # Verify cleanup infrastructure exists @@ -937,7 +962,8 @@ def test_cleanup_connections_scenarios(self, conn_str, scenario, test_code, expe {test_code} print("{expected_msg}") - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=3 @@ -957,7 +983,8 @@ def test_active_connections_thread_safety(self, conn_str): - Cleanup can safely iterate while threads are registering - Lock prevents data corruption in WeakSet """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import mssql_python import threading import time @@ -1026,7 +1053,8 @@ def register_connections(thread_id, count): assert conn._closed, f"Connection {{conn.conn_id}} was not closed" print("Thread safety test: PASSED") - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=10 @@ -1048,7 +1076,8 @@ def test_cleanup_connections_list_copy_isolation(self, conn_str): 3. WeakSet can be modified (e.g., connections removed by GC) without breaking iteration 4. The copy prevents "Set changed size during iteration" RuntimeError """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import mssql_python import weakref import gc @@ -1123,7 +1152,8 @@ def close(self): print("List copy isolation: PASSED") print("[OK] connections_to_close = list(_active_connections) properly tested") - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=3 @@ -1147,7 +1177,8 @@ def test_cleanup_connections_weakset_modification_during_iteration(self, conn_st 2. With the list copy, iteration is safe even if WeakSet shrinks due to GC 3. The pattern prevents "dictionary changed size during iteration" type errors """ - script = textwrap.dedent(f""" + script = textwrap.dedent( + f""" import mssql_python import weakref import gc @@ -1213,7 +1244,8 @@ def close(self): print("WeakSet modification during iteration: PASSED") print("[OK] list() copy prevents 'set changed size during iteration' errors") - """) + """ + ) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=3 diff --git a/tests/test_013_encoding_decoding.py b/tests/test_013_encoding_decoding.py index 034afae68..6fe07619d 100644 --- a/tests/test_013_encoding_decoding.py +++ b/tests/test_013_encoding_decoding.py @@ -1078,13 +1078,15 @@ def test_setdecoding_with_unicode_data(db_connection): try: # Create test table with NVARCHAR columns for Unicode support - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_decoding_unicode ( id INT IDENTITY(1,1), ascii_col VARCHAR(100), unicode_col NVARCHAR(100) ) - """) + """ + ) # Test ASCII strings in VARCHAR (safe) ascii_strings = [ @@ -1159,7 +1161,8 @@ def test_encoding_decoding_comprehensive_unicode_characters(db_connection): try: # Create test table with different column types - use NVARCHAR for better Unicode support - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_encoding_comprehensive ( id INT PRIMARY KEY, varchar_col VARCHAR(1000), @@ -1167,7 +1170,8 @@ def test_encoding_decoding_comprehensive_unicode_characters(db_connection): text_col TEXT, ntext_col NTEXT ) - """) + """ + ) # Test cases with different Unicode character categories test_cases = [ @@ -1329,7 +1333,8 @@ def test_encoding_decoding_edge_case_data_types(db_connection): try: # Create table with various data types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_encoding_datatypes ( id INT PRIMARY KEY, varchar_small VARCHAR(50), @@ -1341,7 +1346,8 @@ def test_encoding_decoding_edge_case_data_types(db_connection): text_type TEXT, ntext_type NTEXT ) - """) + """ + ) # Test different encoding configurations test_configs = [ @@ -1633,14 +1639,16 @@ def test_encoding_decoding_large_dataset_performance(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_large_encoding ( id INT PRIMARY KEY, ascii_data VARCHAR(1000), unicode_data NVARCHAR(1000), mixed_data NVARCHAR(MAX) ) - """) + """ + ) # Generate test data - ensure it fits in column sizes ascii_text = "This is ASCII text with numbers 12345." * 10 # ~400 chars @@ -1809,13 +1817,15 @@ def test_encoding_decoding_metadata_columns(db_connection): try: # Create table with Unicode column names if supported - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_metadata ( [normal_col] NVARCHAR(100), [column_with_unicode_测试] NVARCHAR(100), [special_chars_ñáéíóú] INT ) - """) + """ + ) # Test metadata decoding configuration db_connection.setdecoding(mssql_python.SQL_WMETADATA, encoding="utf-16le", ctype=SQL_WCHAR) @@ -1889,7 +1899,8 @@ def test_encoding_decoding_stress_test_comprehensive(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #stress_test_encoding ( id INT IDENTITY(1,1) PRIMARY KEY, ascii_text VARCHAR(500), @@ -1897,7 +1908,8 @@ def test_encoding_decoding_stress_test_comprehensive(db_connection): binary_data VARBINARY(500), mixed_content NVARCHAR(MAX) ) - """) + """ + ) # Generate diverse test data test_datasets = [] @@ -2018,13 +2030,15 @@ def test_encoding_decoding_sql_char_various_encodings(db_connection): try: # Create test table with VARCHAR columns (SQL_CHAR type) - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_sql_char_encodings ( id INT PRIMARY KEY, data_col VARCHAR(100), description VARCHAR(200) ) - """) + """ + ) # Define various encoding types to test with SQL_CHAR encoding_tests = [ @@ -2301,13 +2315,15 @@ def test_encoding_decoding_sql_char_with_unicode_fallback(db_connection): try: # Create test table with both VARCHAR and NVARCHAR - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_unicode_fallback ( id INT PRIMARY KEY, varchar_data VARCHAR(100), nvarchar_data NVARCHAR(100) ) - """) + """ + ) # Test Unicode data unicode_test_cases = [ @@ -2378,13 +2394,15 @@ def test_encoding_decoding_sql_char_native_character_sets(db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_native_chars ( id INT PRIMARY KEY, data VARCHAR(200), encoding_used VARCHAR(50) ) - """) + """ + ) # Test encoding-specific character sets that should work encoding_native_tests = [ @@ -2519,13 +2537,15 @@ def test_encoding_decoding_sql_char_boundary_encoding_cases(db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_encoding_boundaries ( id INT PRIMARY KEY, test_data VARCHAR(500), test_type VARCHAR(100) ) - """) + """ + ) # Test boundary cases for different encodings boundary_tests = [ @@ -2626,14 +2646,16 @@ def test_encoding_decoding_sql_char_unicode_issue_diagnosis(db_connection): try: # Create test table with both VARCHAR and NVARCHAR for comparison - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_unicode_issue ( id INT PRIMARY KEY, varchar_col VARCHAR(100), nvarchar_col NVARCHAR(100), encoding_used VARCHAR(50) ) - """) + """ + ) # Test Unicode strings that commonly cause issues test_strings = [ @@ -2679,9 +2701,11 @@ def test_encoding_decoding_sql_char_unicode_issue_diagnosis(db_connection): ) # Retrieve results - cursor.execute(""" + cursor.execute( + """ SELECT varchar_col, nvarchar_col FROM #test_unicode_issue WHERE id = 1 - """) + """ + ) result = cursor.fetchone() if result: @@ -2736,7 +2760,8 @@ def test_encoding_decoding_sql_char_best_practices_guide(db_connection): try: # Create test table demonstrating different column types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_best_practices ( id INT PRIMARY KEY, -- ASCII-safe columns (VARCHAR with SQL_CHAR) @@ -2750,7 +2775,8 @@ def test_encoding_decoding_sql_char_best_practices_guide(db_connection): -- Mixed approach column safe_text VARCHAR(200) ) - """) + """ + ) # Configure optimal settings db_connection.setencoding(encoding="utf-8", ctype=SQL_CHAR) # For ASCII data @@ -4966,13 +4992,15 @@ def test_execute_executemany_encoding_consistency(db_connection): try: # Create test table that can handle both VARCHAR and NVARCHAR data - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_encoding_consistency ( id INT IDENTITY(1,1) PRIMARY KEY, varchar_col VARCHAR(1000) COLLATE SQL_Latin1_General_CP1_CI_AS, nvarchar_col NVARCHAR(1000) ) - """) + """ + ) # Test data with various encoding challenges # Using ASCII-safe characters that work across different encodings @@ -5025,11 +5053,13 @@ def test_execute_executemany_encoding_consistency(db_connection): ) # Retrieve immediately to verify encoding worked - cursor.execute(""" + cursor.execute( + """ SELECT varchar_col, nvarchar_col FROM #test_encoding_consistency WHERE id = (SELECT MAX(id) FROM #test_encoding_consistency) - """) + """ + ) result = cursor.fetchone() execute_results.append((result[0], result[1])) @@ -5054,11 +5084,13 @@ def test_execute_executemany_encoding_consistency(db_connection): ) # Retrieve all results from executemany - cursor.execute(""" + cursor.execute( + """ SELECT varchar_col, nvarchar_col FROM #test_encoding_consistency ORDER BY id - """) + """ + ) executemany_results = cursor.fetchall() # Verify executemany results match execute results @@ -5095,11 +5127,13 @@ def test_execute_executemany_encoding_consistency(db_connection): test_string, ) - cursor.execute(""" + cursor.execute( + """ SELECT nvarchar_col FROM #test_encoding_consistency WHERE id = (SELECT MAX(id) FROM #test_encoding_consistency) - """) + """ + ) result = cursor.fetchone() unicode_execute_results.append(result[0]) @@ -5126,11 +5160,13 @@ def test_execute_executemany_encoding_consistency(db_connection): unicode_params, ) - cursor.execute(""" + cursor.execute( + """ SELECT nvarchar_col FROM #test_encoding_consistency ORDER BY id - """) + """ + ) unicode_executemany_results = cursor.fetchall() # Compare Unicode results diff --git a/tests/test_017_spatial_types.py b/tests/test_017_spatial_types.py index 7fcb09c68..6daa07347 100644 --- a/tests/test_017_spatial_types.py +++ b/tests/test_017_spatial_types.py @@ -201,13 +201,15 @@ def test_geography_large_polygon_fetch(cursor, db_connection): def test_geography_mixed_with_other_types(cursor, db_connection): - cursor.execute("""CREATE TABLE #geo_mixed ( + cursor.execute( + """CREATE TABLE #geo_mixed ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), geo_col GEOGRAPHY NULL, created_date DATETIME, score FLOAT - );""") + );""" + ) db_connection.commit() cursor.execute( @@ -330,11 +332,13 @@ def test_geography_description_metadata(cursor, db_connection): def test_geography_stdistance(cursor, db_connection): - cursor.execute("""CREATE TABLE #geo_distance ( + cursor.execute( + """CREATE TABLE #geo_distance ( id INT PRIMARY KEY IDENTITY(1,1), geo1 GEOGRAPHY NULL, geo2 GEOGRAPHY NULL - );""") + );""" + ) db_connection.commit() point2 = "POINT(-73.98500 40.75800)" # New York @@ -346,8 +350,10 @@ def test_geography_stdistance(cursor, db_connection): ) db_connection.commit() - row = cursor.execute("""SELECT geo1.STDistance(geo2) as distance_meters - FROM #geo_distance;""").fetchone() + row = cursor.execute( + """SELECT geo1.STDistance(geo2) as distance_meters + FROM #geo_distance;""" + ).fetchone() # Seattle to New York is approximately 3,870 km assert 3_500_000 < row[0] < 4_500_000 @@ -524,12 +530,14 @@ def test_geometry_description_metadata(cursor, db_connection): def test_geometry_mixed_with_other_types(cursor, db_connection): - cursor.execute("""CREATE TABLE #geom_mixed ( + cursor.execute( + """CREATE TABLE #geom_mixed ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), geom_col GEOMETRY NULL, area FLOAT - );""") + );""" + ) db_connection.commit() cursor.execute( @@ -680,11 +688,13 @@ def test_hierarchyid_description_metadata(cursor, db_connection): def test_hierarchyid_tree_structure(cursor, db_connection): - cursor.execute("""CREATE TABLE #hid_tree ( + cursor.execute( + """CREATE TABLE #hid_tree ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), node HIERARCHYID NULL - );""") + );""" + ) db_connection.commit() org_data = [ @@ -705,20 +715,24 @@ def test_hierarchyid_tree_structure(cursor, db_connection): db_connection.commit() # All descendants of VP Engineering (including self) - rows = cursor.execute("""SELECT name, node.ToString() as path + rows = cursor.execute( + """SELECT name, node.ToString() as path FROM #hid_tree WHERE node.IsDescendantOf(hierarchyid::Parse('/1/')) = 1 - ORDER BY node;""").fetchall() + ORDER BY node;""" + ).fetchall() assert len(rows) == 5 names = [r[0] for r in rows] assert names == ["VP Engineering", "Dev Manager", "Senior Dev", "Junior Dev", "QA Manager"] # Direct reports of Dev Manager - rows = cursor.execute("""SELECT name, node.ToString() as path + rows = cursor.execute( + """SELECT name, node.ToString() as path FROM #hid_tree WHERE node.GetAncestor(1) = hierarchyid::Parse('/1/1/') - ORDER BY node;""").fetchall() + ORDER BY node;""" + ).fetchall() assert len(rows) == 2 names = [r[0] for r in rows] @@ -726,12 +740,14 @@ def test_hierarchyid_tree_structure(cursor, db_connection): def test_hierarchyid_mixed_with_other_types(cursor, db_connection): - cursor.execute("""CREATE TABLE #hid_mixed ( + cursor.execute( + """CREATE TABLE #hid_mixed ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), node HIERARCHYID NULL, salary DECIMAL(10,2) - );""") + );""" + ) db_connection.commit() cursor.execute( diff --git a/tests/test_017_varchar_cp1252_boundary.py b/tests/test_017_varchar_cp1252_boundary.py index 4c038ffdc..97b6a3a14 100644 --- a/tests/test_017_varchar_cp1252_boundary.py +++ b/tests/test_017_varchar_cp1252_boundary.py @@ -18,7 +18,8 @@ def test_varchar_cp1252_exact_length_boundary(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_boundary ( id INT PRIMARY KEY, varchar_10 VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS, @@ -26,7 +27,8 @@ def test_varchar_cp1252_exact_length_boundary(db_connection): varchar_50 VARCHAR(50) COLLATE SQL_Latin1_General_CP1_CI_AS, varchar_100 VARCHAR(100) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() # Set encoding to CP1252 for VARCHAR columns @@ -137,12 +139,14 @@ def test_varchar_cp1252_length_variations(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_variations ( id INT PRIMARY KEY, varchar_col VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -198,12 +202,14 @@ def test_varchar_cp1252_mixed_ascii_nonascii(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_mixed ( id INT PRIMARY KEY, varchar_col VARCHAR(15) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -246,12 +252,14 @@ def test_varchar_cp1252_empty_and_null(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_edge ( id INT PRIMARY KEY, varchar_col VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -290,13 +298,15 @@ def test_varchar_cp1252_parameterized_query(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_params ( id INT PRIMARY KEY, varchar_10 VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS, varchar_20 VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -338,14 +348,16 @@ def test_varchar_cp1252_fetchall_multi_column_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_multi_col ( id INT PRIMARY KEY, col_a VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS, col_b VARCHAR(15) COLLATE SQL_Latin1_General_CP1_CI_AS, col_c VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -391,12 +403,14 @@ def test_varchar_cp1252_fetchmany_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_fetchmany ( id INT PRIMARY KEY, data VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -441,7 +455,8 @@ def test_varchar_cp1252_mixed_types_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_mixed_types ( id INT PRIMARY KEY, name VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS, @@ -449,7 +464,8 @@ def test_varchar_cp1252_mixed_types_batch(db_connection): city VARCHAR(15) COLLATE SQL_Latin1_General_CP1_CI_AS, age SMALLINT ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -494,12 +510,14 @@ def test_varchar_cp1252_lob_with_collation(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_lob ( id INT PRIMARY KEY, data VARCHAR(MAX) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -532,12 +550,14 @@ def test_varchar_cp1252_varying_lengths_per_row(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_varying ( id INT PRIMARY KEY, data VARCHAR(50) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -583,12 +603,14 @@ def test_varchar_cp1252_null_interspersed_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_nulls ( id INT PRIMARY KEY, data VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -639,12 +661,14 @@ def test_varchar_cp1252_decode_fallback_returns_bytes(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_decode_fallback_bytes ( id INT PRIMARY KEY, data VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() # Insert valid CP1252 data @@ -687,12 +711,14 @@ def test_varchar_cp1252_fetchall_many_rows(db_connection): cursor = db_connection.cursor() try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_cp1252_many ( id INT PRIMARY KEY, data VARCHAR(30) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """) + """ + ) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) diff --git a/tests/test_018_polars_pandas_integration.py b/tests/test_018_polars_pandas_integration.py index 529f6d8e5..9d857afdb 100644 --- a/tests/test_018_polars_pandas_integration.py +++ b/tests/test_018_polars_pandas_integration.py @@ -78,7 +78,8 @@ def test_datetimeoffset_type_code_is_datetime_datetime(self, cursor): def test_all_types_are_isclass(self, cursor): """Every type_code in cursor.description must pass inspect.isclass().""" - cursor.execute(""" + cursor.execute( + """ SELECT CAST(1 AS INT) AS i, CAST(1 AS SMALLINT) AS si, @@ -103,7 +104,8 @@ def test_all_types_are_isclass(self, cursor): CAST(0x01 AS VARBINARY(10)) AS vbin, NEWID() AS guid, CAST('' AS XML) AS x - """) + """ + ) for desc in cursor.description: col_name = desc[0] type_code = desc[1] @@ -172,18 +174,22 @@ def test_polars_date_with_nulls(self, db_connection): cursor = db_connection.cursor() try: cursor.execute("DROP TABLE IF EXISTS #polars_null_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #polars_null_test ( id INT, d DATE ) - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ INSERT INTO #polars_null_test VALUES (1, '2024-01-15'), (2, NULL), (3, '2024-03-20') - """) + """ + ) db_connection.commit() df = pl.read_database( diff --git a/tests/test_019_bulkcopy.py b/tests/test_019_bulkcopy.py index 7542139d5..204276ecc 100644 --- a/tests/test_019_bulkcopy.py +++ b/tests/test_019_bulkcopy.py @@ -189,13 +189,15 @@ def test_bulkcopy_with_server_synonyms(conn_str): # Create table cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id INT, name NVARCHAR(50), value FLOAT ) - """) + """ + ) conn.commit() # Test data @@ -236,13 +238,15 @@ def test_bulkcopy_with_server_synonyms(conn_str): # Create table cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id INT, name NVARCHAR(50), value FLOAT ) - """) + """ + ) conn.commit() # Test data diff --git a/tests/test_cache_invalidation.py b/tests/test_cache_invalidation.py index fa1d34e2f..2c4bb48b3 100644 --- a/tests/test_cache_invalidation.py +++ b/tests/test_cache_invalidation.py @@ -22,29 +22,38 @@ def test_cursor_cache_invalidation_different_column_orders(db_connection): try: # Setup test tables with different column orders and types - cursor.execute(""" + cursor.execute( + """ IF OBJECT_ID('tempdb..#test_cache_table1') IS NOT NULL DROP TABLE #test_cache_table1 - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ CREATE TABLE #test_cache_table1 ( id INT, name VARCHAR(50), age INT, salary DECIMAL(10,2) ) - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ INSERT INTO #test_cache_table1 VALUES (1, 'Alice', 30, 50000.00), (2, 'Bob', 25, 45000.00) - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ IF OBJECT_ID('tempdb..#test_cache_table2') IS NOT NULL DROP TABLE #test_cache_table2 - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ CREATE TABLE #test_cache_table2 ( salary DECIMAL(10,2), age INT, @@ -52,12 +61,15 @@ def test_cursor_cache_invalidation_different_column_orders(db_connection): name VARCHAR(50), bonus FLOAT ) - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ INSERT INTO #test_cache_table2 VALUES (60000.00, 35, 3, 'Charlie', 5000.5), (55000.00, 28, 4, 'Diana', 3000.75) - """) + """ + ) # Execute first query - columns: id, name, age, salary cursor.execute("SELECT id, name, age, salary FROM #test_cache_table1 ORDER BY id") @@ -128,11 +140,13 @@ def test_cursor_cache_invalidation_stored_procedure_multiple_resultsets(db_conne # the scenario where cached maps need to be invalidated between different queries # First result set: user info (3 columns) - cursor.execute(""" + cursor.execute( + """ SELECT 1 as user_id, 'John' as username, 'john@example.com' as email UNION ALL SELECT 2, 'Jane', 'jane@example.com' - """) + """ + ) # Validate first result set - user info assert len(cursor.description) == 3 @@ -147,11 +161,13 @@ def test_cursor_cache_invalidation_stored_procedure_multiple_resultsets(db_conne assert user_rows[0].email == "john@example.com" # Execute second query with completely different structure - cursor.execute(""" + cursor.execute( + """ SELECT 101 as product_id, 'Widget A' as product_name, 29.99 as price, 100 as stock_qty UNION ALL SELECT 102, 'Widget B', 39.99, 50 - """) + """ + ) # Validate second result set - product info (different structure) assert len(cursor.description) == 4 @@ -195,11 +211,14 @@ def test_cursor_cache_invalidation_metadata_then_select(db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ IF OBJECT_ID('tempdb..#test_metadata_table') IS NOT NULL DROP TABLE #test_metadata_table - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ CREATE TABLE #test_metadata_table ( meta_id INT PRIMARY KEY, meta_name VARCHAR(100), @@ -207,15 +226,19 @@ def test_cursor_cache_invalidation_metadata_then_select(db_connection): meta_date DATETIME, meta_flag BIT ) - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ INSERT INTO #test_metadata_table VALUES (1, 'Config1', 123.4567, '2023-01-15 10:30:00', 1), (2, 'Config2', 987.6543, '2023-02-20 14:45:00', 0) - """) + """ + ) # First: Execute a metadata-only query (no actual data rows) - cursor.execute(""" + cursor.execute( + """ SELECT COLUMN_NAME, DATA_TYPE, @@ -225,7 +248,8 @@ def test_cursor_cache_invalidation_metadata_then_select(db_connection): WHERE TABLE_NAME = 'test_metadata_table' AND TABLE_SCHEMA = 'tempdb' ORDER BY ORDINAL_POSITION - """) + """ + ) # Verify metadata result structure meta_description = cursor.description @@ -299,24 +323,30 @@ def test_cursor_cache_invalidation_fetch_methods_consistency(db_connection): try: # Create test data - cursor.execute(""" + cursor.execute( + """ IF OBJECT_ID('tempdb..#test_fetch_cache') IS NOT NULL DROP TABLE #test_fetch_cache - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ CREATE TABLE #test_fetch_cache ( first_col VARCHAR(20), second_col INT, third_col DECIMAL(8,2) ) - """) - cursor.execute(""" + """ + ) + cursor.execute( + """ INSERT INTO #test_fetch_cache VALUES ('Row1', 10, 100.50), ('Row2', 20, 200.75), ('Row3', 30, 300.25), ('Row4', 40, 400.00) - """) + """ + ) # Execute first query with specific column order cursor.execute( @@ -380,9 +410,11 @@ def test_cache_specific_close_cleanup_validation(db_connection): try: # Setup test data - cursor.execute(""" + cursor.execute( + """ SELECT 1 as cache_col1, 'test' as cache_col2, 99.99 as cache_col3 - """) + """ + ) # Verify cache is populated assert cursor.description is not None @@ -527,12 +559,15 @@ def test_real_stored_procedure_cache_validation(db_connection): try: # Create a temporary stored procedure with multiple result sets - cursor.execute(""" + cursor.execute( + """ IF OBJECT_ID('tempdb..#sp_test_cache') IS NOT NULL DROP PROCEDURE #sp_test_cache - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE PROCEDURE #sp_test_cache AS BEGIN @@ -545,7 +580,8 @@ def test_real_stored_procedure_cache_validation(db_connection): -- Third result set: Summary (yet another structure) SELECT GETDATE() as report_date, 'Cache Test' as report_type, 1 as version_num; END - """) + """ + ) # Execute the stored procedure cursor.execute("EXEC #sp_test_cache") From 10acb246a0bc6bd827772ab75b622e7d74d7b990 Mon Sep 17 00:00:00 2001 From: Jahnvi Thakkar Date: Mon, 2 Mar 2026 16:08:09 +0530 Subject: [PATCH 3/6] Formatting tests files using black formatter (after upgrading black in local) --- tests/test_001_globals.py | 12 +- tests/test_003_connection.py | 12 +- tests/test_004_cursor.py | 924 +++++++------------- tests/test_011_performance_stress.py | 36 +- tests/test_013_SqlHandle_free_shutdown.py | 96 +- tests/test_013_encoding_decoding.py | 108 +-- tests/test_017_spatial_types.py | 48 +- tests/test_017_varchar_cp1252_boundary.py | 78 +- tests/test_018_polars_pandas_integration.py | 18 +- tests/test_019_bulkcopy.py | 12 +- tests/test_cache_invalidation.py | 108 +-- 11 files changed, 484 insertions(+), 968 deletions(-) diff --git a/tests/test_001_globals.py b/tests/test_001_globals.py index cb7b6012b..03e600066 100644 --- a/tests/test_001_globals.py +++ b/tests/test_001_globals.py @@ -389,8 +389,7 @@ def test_decimal_separator_with_db_operations(db_connection): try: # Create a test table with decimal values cursor = db_connection.cursor() - cursor.execute( - """ + cursor.execute(""" DROP TABLE IF EXISTS #decimal_separator_test; CREATE TABLE #decimal_separator_test ( id INT, @@ -401,8 +400,7 @@ def test_decimal_separator_with_db_operations(db_connection): (2, 678.90), (3, 0.01), (4, 999.99); - """ - ) + """) cursor.close() # Test 1: Fetch with default separator @@ -470,8 +468,7 @@ def test_decimal_separator_batch_operations(db_connection): try: # Create test data cursor = db_connection.cursor() - cursor.execute( - """ + cursor.execute(""" DROP TABLE IF EXISTS #decimal_batch_test; CREATE TABLE #decimal_batch_test ( id INT, @@ -482,8 +479,7 @@ def test_decimal_separator_batch_operations(db_connection): (1, 123.456, 12345.67890), (2, 0.001, 0.00001), (3, 999.999, 9999.99999); - """ - ) + """) cursor.close() # Test 1: Fetch results with default separator diff --git a/tests/test_003_connection.py b/tests/test_003_connection.py index e2f0cce0d..c6141ea77 100644 --- a/tests/test_003_connection.py +++ b/tests/test_003_connection.py @@ -992,16 +992,14 @@ def test_execute_with_large_parameters(db_connection, conn_str): pytest.skip("Skipping for Azure SQL - large parameter tests may cause timeouts") # Test with a temporary table for large data - cursor = db_connection.execute( - """ + cursor = db_connection.execute(""" DROP TABLE IF EXISTS #large_params_test; CREATE TABLE #large_params_test ( id INT, large_text NVARCHAR(MAX), large_binary VARBINARY(MAX) ) - """ - ) + """) cursor.close() try: @@ -2126,12 +2124,10 @@ def test_timeout_long_query(db_connection): while retry_count < max_retries: start_time = time.perf_counter() try: - cursor.execute( - """ + cursor.execute(""" SELECT COUNT(*) FROM sys.objects a, sys.objects b, sys.objects c WHERE a.object_id = b.object_id * c.object_id - """ - ) + """) cursor.fetchall() elapsed_time = time.perf_counter() - start_time break # Success, exit retry loop diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 6b9365af4..05dc3b59b 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -182,15 +182,13 @@ def test_mixed_empty_and_null_values(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_empty_vs_null") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_empty_vs_null ( id INT, text_col NVARCHAR(100), binary_col VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert mix of empty and NULL values @@ -888,15 +886,13 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """ - ) + """) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -992,14 +988,12 @@ def test_fetchmany_size_zero_lob(cursor, db_connection): """Test fetchmany with size=0 for LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_lob ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """ - ) + """) # Insert test data test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1024,14 +1018,12 @@ def test_fetchmany_more_than_exist_lob(cursor, db_connection): """Test fetchmany requesting more rows than exist with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_more") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_lob_more ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """ - ) + """) # Insert only 3 rows test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1065,14 +1057,12 @@ def test_fetchmany_empty_result_lob(cursor, db_connection): """Test fetchmany on empty result set with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_empty") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_lob_empty ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Query empty table @@ -1095,14 +1085,12 @@ def test_fetchmany_very_large_lob(cursor, db_connection): """Test fetchmany with very large LOB column data""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_large_lob") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_large_lob ( id INT PRIMARY KEY, large_lob NVARCHAR(MAX) ) - """ - ) + """) # Create very large data (10000 characters) large_data = "x" * 10000 @@ -1152,14 +1140,12 @@ def test_fetchmany_mixed_lob_sizes(cursor, db_connection): """Test fetchmany with mixed LOB sizes including empty and NULL""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_mixed_lob") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_mixed_lob ( id INT PRIMARY KEY, mixed_lob NVARCHAR(MAX) ) - """ - ) + """) # Mix of sizes: empty, NULL, small, medium, large test_data = [ @@ -1287,14 +1273,12 @@ def test_executemany_empty_strings(cursor, db_connection): """Test executemany with empty strings - regression test for Unix UTF-16 conversion issue""" try: # Create test table for empty string testing - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_empty_batch ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_empty_batch") @@ -1335,8 +1319,7 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): """Test executemany with empty strings in different column types""" try: # Create test table with different string types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_string_types ( id INT, varchar_col VARCHAR(50), @@ -1344,8 +1327,7 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): text_col TEXT, ntext_col NTEXT ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_string_types") @@ -1386,14 +1368,12 @@ def test_executemany_unicode_and_empty_strings(cursor, db_connection): """Test executemany with mix of Unicode characters and empty strings""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_unicode_test ( id INT, data NVARCHAR(100) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_unicode_test") @@ -1438,14 +1418,12 @@ def test_executemany_large_batch_with_empty_strings(cursor, db_connection): """Test executemany with large batch containing empty strings""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_large_batch ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_large_batch") @@ -1498,14 +1476,12 @@ def test_executemany_compare_with_execute(cursor, db_connection): """Test that executemany produces same results as individual execute calls""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_compare_test ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Test data with empty strings test_data = [ @@ -1558,15 +1534,13 @@ def test_executemany_edge_cases_empty_strings(cursor, db_connection): """Test executemany edge cases with empty strings and special characters""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_edge_cases ( id INT, varchar_data VARCHAR(100), nvarchar_data NVARCHAR(100) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_edge_cases") @@ -1620,14 +1594,12 @@ def test_executemany_null_vs_empty_string(cursor, db_connection): """Test that executemany correctly distinguishes between NULL and empty string""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_vs_empty ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_null_vs_empty") @@ -1692,14 +1664,12 @@ def test_executemany_binary_data_edge_cases(cursor, db_connection): """Test executemany with binary data and empty byte arrays""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_binary_test ( id INT, binary_data VARBINARY(100) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_binary_test") @@ -1861,8 +1831,7 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): """Test executemany with randomly mixed NULL and non-NULL values across multiple columns and rows (50 rows, 10 columns).""" try: # Create table with 10 columns of various types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_empty_params ( col1 INT, col2 VARCHAR(50), @@ -1875,8 +1844,7 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): col9 DATE, col10 REAL ) - """ - ) + """) # Generate 50 rows with randomly mixed NULL and non-NULL values across 10 columns data = [] @@ -1940,8 +1908,7 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): """Test executemany with multi-column NULL arrays (50 records, 8 columns).""" try: # Create table with 8 columns of various types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_arrays ( col1 INT, col2 VARCHAR(100), @@ -1952,8 +1919,7 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): col7 BIGINT, col8 DATE ) - """ - ) + """) # Generate 50 rows with all NULL values across 8 columns data = [(None, None, None, None, None, None, None, None) for _ in range(50)] @@ -1973,14 +1939,12 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): assert null_count == 50, f"Expected 50 NULLs in col{col_num}, got {null_count}" # Verify no non-NULL values exist - cursor.execute( - """ + cursor.execute(""" SELECT COUNT(*) FROM #pytest_null_arrays WHERE col1 IS NOT NULL OR col2 IS NOT NULL OR col3 IS NOT NULL OR col4 IS NOT NULL OR col5 IS NOT NULL OR col6 IS NOT NULL OR col7 IS NOT NULL OR col8 IS NOT NULL - """ - ) + """) non_null_count = cursor.fetchone()[0] assert non_null_count == 0, f"Expected 0 non-NULL values, got {non_null_count}" @@ -2019,8 +1983,7 @@ def test_executemany_concurrent_null_parameters(db_connection): # Create table with db_connection.cursor() as cursor: - cursor.execute( - f""" + cursor.execute(f""" IF OBJECT_ID('{table_name}', 'U') IS NOT NULL DROP TABLE {table_name} @@ -2032,8 +1995,7 @@ def test_executemany_concurrent_null_parameters(db_connection): col3 FLOAT, col4 DATETIME ) - """ - ) + """) db_connection.commit() # Execute multiple sequential insert operations @@ -2288,14 +2250,12 @@ def test_insert_data_for_join(cursor, db_connection): def test_join_operations(cursor): """Test join operations""" try: - cursor.execute( - """ + cursor.execute(""" SELECT e.name, d.department_name, p.project_name FROM #pytest_employees e JOIN #pytest_departments d ON e.department_id = d.department_id JOIN #pytest_projects p ON e.employee_id = p.employee_id - """ - ) + """) rows = cursor.fetchall() assert len(rows) == 3, "Join operation returned incorrect number of rows" assert rows[0] == [ @@ -2385,12 +2345,10 @@ def test_execute_stored_procedure_with_parameters(cursor): def test_execute_stored_procedure_without_parameters(cursor): """Test executing stored procedure without parameters""" try: - cursor.execute( - """ + cursor.execute(""" DECLARE @EmployeeID INT = 2 EXEC dbo.GetEmployeeProjects @EmployeeID - """ - ) + """) rows = cursor.fetchall() assert ( len(rows) == 1 @@ -2610,25 +2568,21 @@ def test_row_attribute_access(cursor, db_connection): """Test accessing row values by column name as attributes""" try: # Create test table with multiple columns - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_row_attr_test ( id INT PRIMARY KEY, name VARCHAR(50), email VARCHAR(100), age INT ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_row_attr_test (id, name, email, age) VALUES (1, 'John Doe', 'john@example.com', 30) - """ - ) + """) db_connection.commit() # Test attribute access @@ -2724,15 +2678,13 @@ def test_row_comparison_with_list(cursor, db_connection): def test_row_string_representation(cursor, db_connection): """Test Row string and repr representations""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_row_test ( id INT PRIMARY KEY, text_col NVARCHAR(50), null_col INT ) - """ - ) + """) db_connection.commit() cursor.execute( @@ -2765,15 +2717,13 @@ def test_row_string_representation(cursor, db_connection): def test_row_column_mapping(cursor, db_connection): """Test Row column name mapping""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_row_test ( FirstColumn INT PRIMARY KEY, Second_Column NVARCHAR(50), [Complex Name!] INT ) - """ - ) + """) db_connection.commit() cursor.execute( @@ -3256,12 +3206,10 @@ def test_execute_rowcount_chaining(cursor, db_connection): assert count == 1, "INSERT should affect 1 row" # Test multiple INSERT rowcount chaining - count = cursor.execute( - """ + count = cursor.execute(""" INSERT INTO #test_chaining (id, value) VALUES (2, 'test2'), (3, 'test3'), (4, 'test4') - """ - ).rowcount + """).rowcount assert count == 3, "Multiple INSERT should affect 3 rows" # Test UPDATE rowcount chaining @@ -3496,8 +3444,7 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): """Test next() functionality with various data types""" try: # Create test table with various data types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_next_types ( id INT, name NVARCHAR(50), @@ -3506,8 +3453,7 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): created_date DATE, created_time DATETIME ) - """ - ) + """) db_connection.commit() # Insert test data with different types @@ -3699,16 +3645,14 @@ def test_execute_chaining_compatibility_examples(cursor, db_connection): """Test real-world chaining examples""" try: # Create users table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #users ( user_id INT IDENTITY(1,1) PRIMARY KEY, user_name NVARCHAR(50), last_logon DATETIME, status NVARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test users @@ -4407,8 +4351,7 @@ def test_fetchval_different_data_types(cursor, db_connection): try: # Create test table with different data types drop_table_if_exists(cursor, "#pytest_fetchval_types") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_fetchval_types ( int_col INTEGER, float_col FLOAT, @@ -4420,17 +4363,14 @@ def test_fetchval_different_data_types(cursor, db_connection): date_col DATE, time_col TIME ) - """ - ) + """) # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_fetchval_types VALUES (123, 45.67, 89.12, 'ASCII text', N'Unicode text', 1, '2024-05-20 12:34:56', '2024-05-20', '12:34:56') - """ - ) + """) db_connection.commit() # Test different data types @@ -5728,25 +5668,21 @@ def test_cursor_rollback_data_consistency(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_rollback_orders") drop_table_if_exists(cursor, "#pytest_rollback_customers") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_rollback_customers ( id INTEGER PRIMARY KEY, name VARCHAR(50) ) - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_rollback_orders ( id INTEGER PRIMARY KEY, customer_id INTEGER, amount DECIMAL(10,2), FOREIGN KEY (customer_id) REFERENCES #pytest_rollback_customers(id) ) - """ - ) + """) cursor.commit() # Insert initial data @@ -6228,32 +6164,26 @@ def test_tables_setup(cursor, db_connection): cursor.execute("DROP VIEW IF EXISTS pytest_tables_schema.test_view") # Create regular table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_tables_schema.regular_table ( id INT PRIMARY KEY, name VARCHAR(100) ) - """ - ) + """) # Create another table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_tables_schema.another_table ( id INT PRIMARY KEY, description VARCHAR(200) ) - """ - ) + """) # Create a view - cursor.execute( - """ + cursor.execute(""" CREATE VIEW pytest_tables_schema.test_view AS SELECT id, name FROM pytest_tables_schema.regular_table - """ - ) + """) db_connection.commit() except Exception as e: @@ -6605,14 +6535,12 @@ def test_emoji_round_trip(cursor, db_connection): "1🚀' OR '1'='1", ] - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_emoji_test ( id INT IDENTITY PRIMARY KEY, content NVARCHAR(MAX) ); - """ - ) + """) db_connection.commit() for text in test_inputs: @@ -6764,16 +6692,14 @@ def test_empty_values_fetchmany(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_fetchmany_empty") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_fetchmany_empty ( id INT, varchar_col VARCHAR(50), nvarchar_col NVARCHAR(50), binary_col VARBINARY(50) ) - """ - ) + """) db_connection.commit() # Insert multiple rows with empty values @@ -6898,8 +6824,7 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_batch_empty_assertions") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_batch_empty_assertions ( id INT, empty_varchar VARCHAR(100), @@ -6909,29 +6834,24 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): null_nvarchar NVARCHAR(100), null_binary VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert rows with mix of empty and NULL values - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_batch_empty_assertions VALUES (1, '', '', 0x, NULL, NULL, NULL), (2, '', '', 0x, NULL, NULL, NULL), (3, '', '', 0x, NULL, NULL, NULL) - """ - ) + """) db_connection.commit() # Test fetchall - should not trigger any assertions about dataLen - cursor.execute( - """ + cursor.execute(""" SELECT empty_varchar, empty_nvarchar, empty_binary, null_varchar, null_nvarchar, null_binary FROM #pytest_batch_empty_assertions ORDER BY id - """ - ) + """) rows = cursor.fetchall() assert len(rows) == 3, "Should return 3 rows" @@ -6948,12 +6868,10 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): assert row[5] is None, f"Row {i+1} null_binary should be None" # Test fetchmany - should also not trigger assertions - cursor.execute( - """ + cursor.execute(""" SELECT empty_nvarchar, empty_binary FROM #pytest_batch_empty_assertions ORDER BY id - """ - ) + """) # Fetch in batches first_batch = cursor.fetchmany(2) @@ -6993,15 +6911,13 @@ def test_executemany_utf16_length_validation(cursor, db_connection): try: # Create test table with small column size to trigger validation drop_table_if_exists(cursor, "#pytest_utf16_validation") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_utf16_validation ( id INT, short_text NVARCHAR(5), -- Small column to test length validation medium_text NVARCHAR(10) -- Medium column for edge cases ) - """ - ) + """) db_connection.commit() # Test 1: Valid strings that should work on all platforms @@ -7147,14 +7063,12 @@ def test_binary_data_over_8000_bytes(cursor, db_connection): try: # Create test table with VARBINARY(MAX) to handle large data drop_table_if_exists(cursor, "#pytest_small_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_small_binary ( id INT, large_binary VARBINARY(MAX) ) - """ - ) + """) # Test data that fits within both parameter and fetch limits (< 4096 bytes) medium_data = b"B" * 3000 # 3,000 bytes - under both limits @@ -7188,14 +7102,12 @@ def test_varbinarymax_insert_fetch(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_varbinarymax") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_varbinarymax ( id INT, binary_data VARBINARY(MAX) ) - """ - ) + """) # Prepare test data - use moderate sizes to guarantee LOB fetch path (line 867-868) efficiently test_data = [ @@ -7262,14 +7174,12 @@ def test_all_empty_binaries(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_all_empty_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_all_empty_binary ( id INT, empty_binary VARBINARY(100) ) - """ - ) + """) # Insert multiple rows with only empty binary data test_data = [ @@ -7308,14 +7218,12 @@ def test_mixed_bytes_and_bytearray_types(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_binary_types") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_mixed_binary_types ( id INT, binary_data VARBINARY(100) ) - """ - ) + """) # Test data mixing bytes and bytearray for the same column test_data = [ @@ -7370,14 +7278,12 @@ def test_binary_mostly_small_one_large(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_size_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_mixed_size_binary ( id INT, binary_data VARBINARY(MAX) ) - """ - ) + """) # Create large binary value within both parameter and fetch limits (< 4096 bytes) large_binary = b"X" * 3500 # 3,500 bytes - under both limits @@ -7437,14 +7343,12 @@ def test_varbinarymax_insert_fetch_null(cursor, db_connection): """Test insertion and retrieval of NULL value in VARBINARY(MAX) column.""" try: drop_table_if_exists(cursor, "#pytest_varbinarymax_null") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_varbinarymax_null ( id INT, binary_data VARBINARY(MAX) ) - """ - ) + """) # Insert a row with NULL for binary_data cursor.execute( @@ -7474,15 +7378,13 @@ def test_sql_double_type(cursor, db_connection): """Test SQL_DOUBLE type (FLOAT(53)) to cover line 3213 in dispatcher.""" try: drop_table_if_exists(cursor, "#pytest_double_type") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_double_type ( id INT PRIMARY KEY, double_col FLOAT(53), float_col FLOAT ) - """ - ) + """) # Insert test data with various double precision values test_data = [ @@ -7532,15 +7434,13 @@ def test_null_guid_type(cursor, db_connection): try: mssql_python.native_uuid = True drop_table_if_exists(cursor, "#pytest_null_guid") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_guid ( id INT PRIMARY KEY, guid_col UNIQUEIDENTIFIER, guid_nullable UNIQUEIDENTIFIER NULL ) - """ - ) + """) # Insert test data with NULL and non-NULL GUIDs test_guid = uuid.uuid4() @@ -7593,14 +7493,12 @@ def test_only_null_and_empty_binary(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_null_empty_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_empty_binary ( id INT, binary_data VARBINARY(100) ) - """ - ) + """) # Test data with only NULL and empty values test_data = [ @@ -7923,8 +7821,7 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): """Test inserting and retrieving valid MONEY and SMALLMONEY values including boundaries and typical data""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, @@ -7932,8 +7829,7 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): d DECIMAL(19,4), n NUMERIC(10,4) ) - """ - ) + """) db_connection.commit() # Max values @@ -8023,15 +7919,13 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): def test_money_smallmoney_null_handling(cursor, db_connection): """Test that NULL values for MONEY and SMALLMONEY are stored and retrieved correctly""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() # Row with both NULLs @@ -8081,15 +7975,13 @@ def test_money_smallmoney_null_handling(cursor, db_connection): def test_money_smallmoney_roundtrip(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using decimal.Decimal roundtrip""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() values = (decimal.Decimal("12345.6789"), decimal.Decimal("987.6543")) @@ -8113,15 +8005,13 @@ def test_money_smallmoney_boundaries(cursor, db_connection): """Test boundary values for MONEY and SMALLMONEY types are handled correctly""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() # Insert max boundary @@ -8161,15 +8051,13 @@ def test_money_smallmoney_boundaries(cursor, db_connection): def test_money_smallmoney_invalid_values(cursor, db_connection): """Test that invalid or out-of-range MONEY and SMALLMONEY values raise errors""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() # Out of range MONEY @@ -8200,15 +8088,13 @@ def test_money_smallmoney_invalid_values(cursor, db_connection): def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using executemany with decimal.Decimal""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() test_data = [ @@ -8242,15 +8128,13 @@ def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): def test_money_smallmoney_executemany_null_handling(cursor, db_connection): """Test inserting NULLs into MONEY and SMALLMONEY using executemany""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() rows = [ @@ -8308,14 +8192,12 @@ def test_uuid_insert_and_select_none(cursor, db_connection): table_name = "#pytest_uuid_nullable" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Insert a row with None for the UUID @@ -8341,14 +8223,12 @@ def test_insert_multiple_uuids(cursor, db_connection): try: mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Prepare test data @@ -8387,14 +8267,12 @@ def test_fetchmany_uuids(cursor, db_connection): try: mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() uuids_to_insert = {f"Item {i}": uuid.uuid4() for i in range(10)} @@ -8431,14 +8309,12 @@ def test_uuid_insert_with_none(cursor, db_connection): table_name = "#pytest_uuid_none" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """ - ) + """) db_connection.commit() cursor.execute(f"INSERT INTO {table_name} (id, name) VALUES (?, ?)", [None, "Alice"]) @@ -8537,14 +8413,12 @@ def test_executemany_uuid_insert_and_select(cursor, db_connection): try: # Drop and create a temporary table for the test cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Generate data for insertion @@ -8594,14 +8468,12 @@ def test_executemany_uuid_roundtrip_fixed_value(cursor, db_connection): table_name = "#pytest_uuid_fixed" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() fixed_uuid = uuid.UUID("12345678-1234-5678-1234-567812345678") @@ -8642,8 +8514,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -8651,16 +8522,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -8697,23 +8565,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -8752,14 +8616,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -8844,25 +8706,21 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """ - ) + """) db_connection.commit() # First test with lowercase=False (default) @@ -8917,14 +8775,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -9006,8 +8862,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -9015,16 +8870,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -9061,23 +8913,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -9590,25 +9438,21 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """ - ) + """) db_connection.commit() # First test with lowercase=False (default) @@ -9663,14 +9507,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -9752,8 +9594,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -9761,16 +9602,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -9807,23 +9645,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -9862,14 +9696,12 @@ def test_cursor_setinputsizes_basic(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes ( string_col NVARCHAR(100), int_col INT ) - """ - ) + """) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9895,15 +9727,13 @@ def test_cursor_setinputsizes_with_executemany_float(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_float") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_float ( id INT, name NVARCHAR(50), price REAL /* Use REAL instead of DECIMAL */ ) - """ - ) + """) # Prepare data with float values data = [(1, "Item 1", 10.99), (2, "Item 2", 20.50), (3, "Item 3", 30.75)] @@ -9940,14 +9770,12 @@ def test_cursor_setinputsizes_reset(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_reset") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_reset ( col1 NVARCHAR(100), col2 INT ) - """ - ) + """) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9982,14 +9810,12 @@ def test_cursor_setinputsizes_override_inference(db_connection): # Create a test table with specific types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_override") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_override ( small_int SMALLINT, big_text NVARCHAR(MAX) ) - """ - ) + """) # Set input sizes that override the default inference # For SMALLINT, use a valid precision value (5 is typical for SMALLINT) @@ -10045,15 +9871,13 @@ def test_setinputsizes_parameter_count_mismatch_fewer(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100), col3 FLOAT ) - """ - ) + """) # Set fewer input sizes than parameters cursor.setinputsizes( @@ -10096,14 +9920,12 @@ def test_setinputsizes_parameter_count_mismatch_more(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100) ) - """ - ) + """) # Set more input sizes than parameters cursor.setinputsizes( @@ -10138,8 +9960,7 @@ def test_setinputsizes_with_null_values(db_connection): # Create a test table with multiple data types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_null") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_null ( int_col INT, string_col NVARCHAR(100), @@ -10147,8 +9968,7 @@ def test_setinputsizes_with_null_values(db_connection): date_col DATE, binary_col VARBINARY(100) ) - """ - ) + """) # Set input sizes for all columns cursor.setinputsizes( @@ -10451,18 +10271,15 @@ def test_procedures_setup(cursor, db_connection): ) # Create test stored procedures - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc1 AS BEGIN SELECT 1 AS result END - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc2 @param1 INT, @param2 VARCHAR(50) OUTPUT @@ -10471,8 +10288,7 @@ def test_procedures_setup(cursor, db_connection): SELECT @param2 = 'Output ' + CAST(@param1 AS VARCHAR(10)) RETURN @param1 END - """ - ) + """) db_connection.commit() except Exception as e: @@ -10590,8 +10406,7 @@ def test_procedures_with_parameters(cursor, db_connection): """Test that procedures() correctly reports parameter information""" try: # Create a simpler procedure with basic parameters - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_params_proc @in1 INT, @in2 VARCHAR(50) @@ -10599,8 +10414,7 @@ def test_procedures_with_parameters(cursor, db_connection): BEGIN SELECT @in1 AS value1, @in2 AS value2 END - """ - ) + """) db_connection.commit() # Get procedure info @@ -10634,28 +10448,23 @@ def test_procedures_result_set_info(cursor, db_connection): """Test that procedures() reports information about result sets""" try: # Create procedures with different result set patterns - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_no_results AS BEGIN DECLARE @x INT = 1 END - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_one_result AS BEGIN SELECT 1 AS col1, 'test' AS col2 END - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_multiple_results AS BEGIN @@ -10663,8 +10472,7 @@ def test_procedures_result_set_info(cursor, db_connection): SELECT 'test' AS result2 SELECT GETDATE() AS result3 END - """ - ) + """) db_connection.commit() # Get procedure info for all test procedures @@ -10746,18 +10554,15 @@ def test_foreignkeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.customers") # Create parent table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.customers ( customer_id INT PRIMARY KEY, customer_name VARCHAR(100) NOT NULL ) - """ - ) + """) # Create child table with foreign key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.orders ( order_id INT PRIMARY KEY, order_date DATETIME NOT NULL, @@ -10766,23 +10571,18 @@ def test_foreignkeys_setup(cursor, db_connection): CONSTRAINT FK_Orders_Customers FOREIGN KEY (customer_id) REFERENCES pytest_fk_schema.customers (customer_id) ) - """ - ) + """) # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO pytest_fk_schema.customers (customer_id, customer_name) VALUES (1, 'Test Customer 1'), (2, 'Test Customer 2') - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" INSERT INTO pytest_fk_schema.orders (order_id, order_date, customer_id, total_amount) VALUES (101, GETDATE(), 1, 150.00), (102, GETDATE(), 2, 250.50) - """ - ) + """) db_connection.commit() except Exception as e: @@ -11010,20 +10810,17 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.product_variants") # Create parent table with composite primary key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.product_variants ( product_id INT NOT NULL, variant_id INT NOT NULL, variant_name VARCHAR(100) NOT NULL, PRIMARY KEY (product_id, variant_id) ) - """ - ) + """) # Create child table with composite foreign key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.order_details ( order_id INT NOT NULL, product_id INT NOT NULL, @@ -11033,8 +10830,7 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): CONSTRAINT FK_OrderDetails_ProductVariants FOREIGN KEY (product_id, variant_id) REFERENCES pytest_fk_schema.product_variants (product_id, variant_id) ) - """ - ) + """) db_connection.commit() @@ -11099,27 +10895,23 @@ def test_primarykeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_pk_schema.composite_pk_test") # Create table with simple primary key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_pk_schema.single_pk_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, description VARCHAR(200) NULL ) - """ - ) + """) # Create table with composite primary key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_pk_schema.composite_pk_test ( dept_id INT NOT NULL, emp_id INT NOT NULL, hire_date DATE NOT NULL, CONSTRAINT PK_composite_test PRIMARY KEY (dept_id, emp_id) ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -11430,15 +11222,13 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """ - ) + """) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -11473,31 +11263,26 @@ def test_specialcolumns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_special_schema.identity_test") # Create table with primary key (for rowIdColumns) - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.rowid_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, unique_col NVARCHAR(100) UNIQUE, non_unique_col NVARCHAR(100) ) - """ - ) + """) # Create table with rowversion column (for rowVerColumns) - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.timestamp_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_updated ROWVERSION ) - """ - ) + """) # Create table with multiple unique identifiers - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.multiple_unique_test ( id INT NOT NULL, code VARCHAR(10) NOT NULL, @@ -11505,19 +11290,16 @@ def test_specialcolumns_setup(cursor, db_connection): order_number VARCHAR(20) UNIQUE, CONSTRAINT PK_multiple_unique_test PRIMARY KEY (id, code) ) - """ - ) + """) # Create table with identity column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.identity_test ( id INT IDENTITY(1,1) PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_modified DATETIME DEFAULT GETDATE() ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -11636,14 +11418,12 @@ def test_rowid_columns_nullable(cursor, db_connection): """Test rowIdColumns with nullable parameter""" try: # First create a table with nullable unique column and non-nullable PK - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.nullable_test ( id INT PRIMARY KEY, -- PK can't be nullable in SQL Server data NVARCHAR(100) NULL ) - """ - ) + """) db_connection.commit() # Test with nullable=True (default) @@ -11736,14 +11516,12 @@ def test_rowver_columns_nullable(cursor, db_connection): """Test rowVerColumns with nullable parameter (not expected to have effect)""" try: # First create a table with rowversion column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.nullable_rowver_test ( id INT PRIMARY KEY, ts ROWVERSION ) - """ - ) + """) db_connection.commit() # Test with nullable=True (default) @@ -11852,8 +11630,7 @@ def test_statistics_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_stats_schema.empty_stats_test") # Create test table with various indexes - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_stats_schema.stats_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, @@ -11862,32 +11639,25 @@ def test_statistics_setup(cursor, db_connection): salary DECIMAL(10, 2) NULL, hire_date DATE NOT NULL ) - """ - ) + """) # Create a non-unique index - cursor.execute( - """ + cursor.execute(""" CREATE INDEX IX_stats_test_dept_date ON pytest_stats_schema.stats_test (department, hire_date) - """ - ) + """) # Create a unique index on multiple columns - cursor.execute( - """ + cursor.execute(""" CREATE UNIQUE INDEX UX_stats_test_name_dept ON pytest_stats_schema.stats_test (name, department) - """ - ) + """) # Create an empty table for testing - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_stats_schema.empty_stats_test ( id INT PRIMARY KEY, data VARCHAR(100) NULL ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -12152,8 +11922,7 @@ def test_columns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_cols_schema.columns_special_test") # Create test table with various column types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_cols_schema.columns_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, @@ -12165,12 +11934,10 @@ def test_columns_setup(cursor, db_connection): notes TEXT NULL, [computed_col] AS (name + ' - ' + CAST(id AS VARCHAR(10))) ) - """ - ) + """) # Create table with special column names and edge cases - fix the problematic column name - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_cols_schema.columns_special_test ( [ID] INT PRIMARY KEY, [User Name] NVARCHAR(100) NULL, @@ -12182,8 +11949,7 @@ def test_columns_setup(cursor, db_connection): [Column/With/Slashes] VARCHAR(20) NULL, [Column_With_Underscores] VARCHAR(20) NULL -- Changed from problematic nested brackets ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -12647,25 +12413,21 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """ - ) + """) db_connection.commit() # First test with lowercase=False (default) @@ -12720,14 +12482,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -12809,8 +12569,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -12818,16 +12577,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -12864,23 +12620,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -12917,14 +12669,12 @@ def test_executemany_with_uuids(cursor, db_connection): table_name = "#pytest_uuid_batch" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Prepare test data: mix of UUIDs and None @@ -13072,13 +12822,11 @@ def test_date_string_parameter_binding(cursor, db_connection): table_name = "#pytest_date_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( a_column VARCHAR(20) ) - """ - ) + """) cursor.execute(f"INSERT INTO {table_name} (a_column) VALUES ('string1'), ('string2')") db_connection.commit() @@ -13105,13 +12853,11 @@ def test_time_string_parameter_binding(cursor, db_connection): table_name = "#pytest_time_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( time_col VARCHAR(22) ) - """ - ) + """) cursor.execute(f"INSERT INTO {table_name} (time_col) VALUES ('prefix_14:30:45_suffix')") db_connection.commit() @@ -13136,13 +12882,11 @@ def test_datetime_string_parameter_binding(cursor, db_connection): table_name = "#pytest_datetime_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( datetime_col VARCHAR(33) ) - """ - ) + """) cursor.execute( f"INSERT INTO {table_name} (datetime_col) VALUES ('prefix_2025-08-12T14:30:45_suffix')" ) @@ -14006,14 +13750,12 @@ def test_column_metadata_error_handling(cursor): """Test column metadata retrieval error handling (Lines 1156-1167).""" # Execute a complex query that might stress metadata retrieval - cursor.execute( - """ + cursor.execute(""" SELECT CAST(1 as INT) as int_col, CAST('test' as NVARCHAR(100)) as nvarchar_col, CAST(NEWID() as UNIQUEIDENTIFIER) as guid_col - """ - ) + """) # This should exercise the metadata retrieval code paths # If there are any errors, they should be logged but not crash @@ -14129,14 +13871,12 @@ def test_row_uuid_processing_with_braces(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_uuid_braces") # Create table with UNIQUEIDENTIFIER column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_uuid_braces ( id INT IDENTITY(1,1), guid_col UNIQUEIDENTIFIER ) - """ - ) + """) # Insert a GUID with braces (this is how SQL Server often returns them) test_guid = "12345678-1234-5678-9ABC-123456789ABC" @@ -14180,14 +13920,12 @@ def test_row_uuid_processing_sql_guid_type(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_sql_guid_type") # Create table with UNIQUEIDENTIFIER column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_sql_guid_type ( id INT, guid_col UNIQUEIDENTIFIER ) - """ - ) + """) # Insert test data test_guid = "ABCDEF12-3456-7890-ABCD-1234567890AB" @@ -14233,14 +13971,12 @@ def test_row_output_converter_overflow_error(cursor, db_connection): try: # Create a table with integer column drop_table_if_exists(cursor, "#pytest_overflow_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_overflow_test ( id INT, small_int TINYINT -- TINYINT can only hold 0-255 ) - """ - ) + """) # Insert a valid value first cursor.execute("INSERT INTO #pytest_overflow_test (id, small_int) VALUES (?, ?)", [1, 100]) @@ -14290,14 +14026,12 @@ def test_row_output_converter_general_exception(cursor, db_connection): try: # Create a table with string column drop_table_if_exists(cursor, "#pytest_exception_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_exception_test ( id INT, text_col VARCHAR(50) ) - """ - ) + """) # Insert test data cursor.execute( @@ -14348,14 +14082,12 @@ def test_row_cursor_log_method_availability(cursor, db_connection): try: # Create test data drop_table_if_exists(cursor, "#pytest_log_check") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_log_check ( id INT, value_col INT ) - """ - ) + """) cursor.execute("INSERT INTO #pytest_log_check (id, value_col) VALUES (?, ?)", [1, 42]) db_connection.commit() @@ -14383,8 +14115,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14394,8 +14125,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """ - ) + """) db_connection.commit() # Insert row with all NULLs @@ -14437,16 +14167,14 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create large data that will trigger LOB handling @@ -14479,14 +14207,12 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14513,14 +14239,12 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14547,14 +14271,12 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -14581,16 +14303,14 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert empty (non-NULL) values @@ -14618,14 +14338,12 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """ - ) + """) db_connection.commit() # Insert NULL GUID @@ -14652,14 +14370,12 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """ - ) + """) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -14686,14 +14402,12 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """ - ) + """) db_connection.commit() # Insert various decimal values including edge cases @@ -14814,8 +14528,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14825,8 +14538,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """ - ) + """) db_connection.commit() # Insert row with all NULLs @@ -14868,16 +14580,14 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create large data that will trigger LOB handling @@ -14910,14 +14620,12 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14944,14 +14652,12 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14978,14 +14684,12 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -15012,16 +14716,14 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert empty (non-NULL) values @@ -15049,14 +14751,12 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """ - ) + """) db_connection.commit() # Insert NULL GUID @@ -15083,14 +14783,12 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """ - ) + """) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -15117,14 +14815,12 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """ - ) + """) db_connection.commit() # Insert various decimal values including edge cases @@ -15245,16 +14941,14 @@ def test_fetchall_with_integrity_constraint(cursor, db_connection): try: # Setup table with unique constraint cursor.execute("DROP TABLE IF EXISTS #uniq_cons_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #uniq_cons_test ( id INTEGER NOT NULL IDENTITY, data VARCHAR(50) NULL, PRIMARY KEY (id), UNIQUE (data) ) - """ - ) + """) # Insert initial row - should work cursor.execute( @@ -15549,8 +15243,7 @@ def test_native_uuid_non_uuid_columns_unaffected(db_connection): mssql_python.native_uuid = False drop_table_if_exists(cursor, "#test_uuid_other_cols") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_uuid_other_cols ( id UNIQUEIDENTIFIER, int_col INT, @@ -15558,8 +15251,7 @@ def test_native_uuid_non_uuid_columns_unaffected(db_connection): float_col FLOAT, bit_col BIT ) - """ - ) + """) test_uuid = uuid.uuid4() cursor.execute( "INSERT INTO #test_uuid_other_cols VALUES (?, ?, ?, ?, ?)", diff --git a/tests/test_011_performance_stress.py b/tests/test_011_performance_stress.py index 9f9636321..7750fee52 100644 --- a/tests/test_011_performance_stress.py +++ b/tests/test_011_performance_stress.py @@ -53,15 +53,13 @@ def test_exception_mid_batch_no_corrupt_data(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_mid_batch_exception") # Create simple table to test batch processing integrity - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_mid_batch_exception ( id INT, value NVARCHAR(50), amount FLOAT ) - """ - ) + """) db_connection.commit() # Insert 1000 rows using individual inserts to avoid executemany complications @@ -121,16 +119,14 @@ def test_python_c_api_null_handling_memory_pressure(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_memory_pressure") # Create table with various string types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_memory_pressure ( id INT, varchar_col VARCHAR(1000), nvarchar_col NVARCHAR(1000), varbinary_col VARBINARY(1000) ) - """ - ) + """) db_connection.commit() # Insert test data @@ -192,16 +188,14 @@ def test_thousands_of_empty_strings_allocation_stress(cursor, db_connection): try: drop_table_if_exists(cursor, "#pytest_empty_stress") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_empty_stress ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_varbinary VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert 10,000 rows with empty strings @@ -277,16 +271,14 @@ def test_large_result_set_100k_rows_no_overflow(cursor, db_connection): try: drop_table_if_exists(cursor, "#pytest_100k_rows") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_100k_rows ( id INT, varchar_col VARCHAR(50), nvarchar_col NVARCHAR(50), int_col INT ) - """ - ) + """) db_connection.commit() # Insert 100,000 rows with sequential IDs and predictable data @@ -375,16 +367,14 @@ def test_very_large_lob_10mb_data_integrity(cursor, db_connection): try: drop_table_if_exists(cursor, "#pytest_10mb_lob") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_10mb_lob ( id INT, varchar_lob VARCHAR(MAX), nvarchar_lob NVARCHAR(MAX), varbinary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create 10MB+ data @@ -494,15 +484,13 @@ def worker_thread(thread_id: int, conn_str: str, results_list: List, errors_list table_name = f"#pytest_concurrent_t{thread_id}" drop_table_if_exists(cursor, table_name) - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id INT, thread_id INT, data VARCHAR(100) ) - """ - ) + """) conn.commit() # Insert thread-specific data diff --git a/tests/test_013_SqlHandle_free_shutdown.py b/tests/test_013_SqlHandle_free_shutdown.py index 7e426cfbf..9944d8987 100644 --- a/tests/test_013_SqlHandle_free_shutdown.py +++ b/tests/test_013_SqlHandle_free_shutdown.py @@ -52,8 +52,7 @@ def test_aggressive_dbc_segfault_reproduction(self, conn_str): Expected with CURRENT CODE: May segfault (this is the bug we're testing for) Expected with FIXED CODE: No segfault """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys import gc from mssql_python import connect @@ -83,8 +82,7 @@ def test_aggressive_dbc_segfault_reproduction(self, conn_str): # Force immediate exit - this triggers finalize_garbage sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -117,8 +115,7 @@ def test_dbc_handle_outlives_env_handle(self, conn_str): Expected with CURRENT CODE: Likely segfault Expected with FIXED CODE: No segfault """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys import atexit from mssql_python import connect @@ -141,8 +138,7 @@ def on_exit(): print("Python GC will finalize DBC during shutdown") print("If DBC cleanup isn't skipped, SQLFreeHandle will access freed ENV") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -167,8 +163,7 @@ def test_force_gc_finalization_order_issue(self, conn_str): Expected with CURRENT CODE: May segfault Expected with FIXED CODE: No segfault """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys import gc import weakref @@ -207,8 +202,7 @@ def test_force_gc_finalization_order_issue(self, conn_str): print("Exiting - finalize_garbage will be called") print("If DBC handles aren't protected, segfault in SQLFreeHandle") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -234,8 +228,7 @@ def test_stmt_handle_cleanup_at_shutdown(self, conn_str): Expected: No segfault, clean exit """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys from mssql_python import connect @@ -251,8 +244,7 @@ def test_stmt_handle_cleanup_at_shutdown(self, conn_str): # Type 3 (STMT) handle should be skipped when pythonShuttingDown=true print("STMT handle cleanup test: Exiting without explicit cleanup") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -275,8 +267,7 @@ def test_dbc_handle_cleanup_at_shutdown(self, conn_str): Expected: No segfault, clean exit """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys from mssql_python import connect @@ -296,8 +287,7 @@ def test_dbc_handle_cleanup_at_shutdown(self, conn_str): # Type 2 (DBC) handles should be skipped when pythonShuttingDown=true print("DBC handle cleanup test: Exiting without explicit connection cleanup") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -326,8 +316,7 @@ def test_env_handle_cleanup_at_shutdown(self, conn_str): Note: ENV handle is static and destructs via normal C++ mechanisms, not during Python GC. This test verifies the overall flow. """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys from mssql_python import connect @@ -346,8 +335,7 @@ def test_env_handle_cleanup_at_shutdown(self, conn_str): # It does NOT have pythonShuttingDown protection (Type 1 not in check) print("ENV handle cleanup test: All connections closed properly") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -374,8 +362,7 @@ def test_mixed_handle_cleanup_at_shutdown(self, conn_str): Expected: No segfault, clean exit This tests the real-world scenario where cleanup is partial """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys from mssql_python import connect @@ -420,8 +407,7 @@ def test_mixed_handle_cleanup_at_shutdown(self, conn_str): # - Type 1 (ENV) handle: normal C++ static destruction print("Mixed handle cleanup test: Exiting with partial cleanup") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -446,8 +432,7 @@ def test_rapid_connection_churn_with_shutdown(self, conn_str): Expected: No segfault, proper handle cleanup order """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys import gc from mssql_python import connect @@ -475,8 +460,7 @@ def test_rapid_connection_churn_with_shutdown(self, conn_str): # Their DBC and STMT handles will be skipped during shutdown print("Rapid churn test: Exiting with mixed cleanup") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -499,8 +483,7 @@ def test_exception_during_query_with_shutdown(self, conn_str): Expected: No segfault, graceful error handling """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys from mssql_python import connect, ProgrammingError @@ -516,8 +499,7 @@ def test_exception_during_query_with_shutdown(self, conn_str): print("Exception test: Exiting after exception without cleanup") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -539,8 +521,7 @@ def test_weakref_cleanup_at_shutdown(self, conn_str): Expected: No segfault, proper weakref finalization """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys import weakref from mssql_python import connect @@ -571,8 +552,7 @@ def callback(ref): print("Weakref test: Exiting with weakrefs active") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -593,8 +573,7 @@ def test_gc_during_shutdown_with_circular_refs(self, conn_str): Expected: No segfault, proper cycle breaking """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys import gc from mssql_python import connect @@ -631,8 +610,7 @@ def execute_query(self): print("Circular ref test: Exiting after GC with cycles") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -653,8 +631,7 @@ def test_all_handle_types_comprehensive(self, conn_str): Expected: Clean shutdown with no segfaults """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import sys from mssql_python import connect @@ -705,8 +682,7 @@ def test_all_handle_types_comprehensive(self, conn_str): print("- Type 1 (ENV) handle: Normal C++ static destruction") print("=== Exiting ===") sys.exit(0) - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=5 @@ -950,8 +926,7 @@ def test_cleanup_connections_scenarios(self, conn_str, scenario, test_code, expe - empty_list: No errors with empty set - mixed_scenario: Mixed connection states handled correctly """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import mssql_python # Verify cleanup infrastructure exists @@ -962,8 +937,7 @@ def test_cleanup_connections_scenarios(self, conn_str, scenario, test_code, expe {test_code} print("{expected_msg}") - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=3 @@ -983,8 +957,7 @@ def test_active_connections_thread_safety(self, conn_str): - Cleanup can safely iterate while threads are registering - Lock prevents data corruption in WeakSet """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import mssql_python import threading import time @@ -1053,8 +1026,7 @@ def register_connections(thread_id, count): assert conn._closed, f"Connection {{conn.conn_id}} was not closed" print("Thread safety test: PASSED") - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=10 @@ -1076,8 +1048,7 @@ def test_cleanup_connections_list_copy_isolation(self, conn_str): 3. WeakSet can be modified (e.g., connections removed by GC) without breaking iteration 4. The copy prevents "Set changed size during iteration" RuntimeError """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import mssql_python import weakref import gc @@ -1152,8 +1123,7 @@ def close(self): print("List copy isolation: PASSED") print("[OK] connections_to_close = list(_active_connections) properly tested") - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=3 @@ -1177,8 +1147,7 @@ def test_cleanup_connections_weakset_modification_during_iteration(self, conn_st 2. With the list copy, iteration is safe even if WeakSet shrinks due to GC 3. The pattern prevents "dictionary changed size during iteration" type errors """ - script = textwrap.dedent( - f""" + script = textwrap.dedent(f""" import mssql_python import weakref import gc @@ -1244,8 +1213,7 @@ def close(self): print("WeakSet modification during iteration: PASSED") print("[OK] list() copy prevents 'set changed size during iteration' errors") - """ - ) + """) result = subprocess.run( [sys.executable, "-c", script], capture_output=True, text=True, timeout=3 diff --git a/tests/test_013_encoding_decoding.py b/tests/test_013_encoding_decoding.py index 6fe07619d..034afae68 100644 --- a/tests/test_013_encoding_decoding.py +++ b/tests/test_013_encoding_decoding.py @@ -1078,15 +1078,13 @@ def test_setdecoding_with_unicode_data(db_connection): try: # Create test table with NVARCHAR columns for Unicode support - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_decoding_unicode ( id INT IDENTITY(1,1), ascii_col VARCHAR(100), unicode_col NVARCHAR(100) ) - """ - ) + """) # Test ASCII strings in VARCHAR (safe) ascii_strings = [ @@ -1161,8 +1159,7 @@ def test_encoding_decoding_comprehensive_unicode_characters(db_connection): try: # Create test table with different column types - use NVARCHAR for better Unicode support - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_encoding_comprehensive ( id INT PRIMARY KEY, varchar_col VARCHAR(1000), @@ -1170,8 +1167,7 @@ def test_encoding_decoding_comprehensive_unicode_characters(db_connection): text_col TEXT, ntext_col NTEXT ) - """ - ) + """) # Test cases with different Unicode character categories test_cases = [ @@ -1333,8 +1329,7 @@ def test_encoding_decoding_edge_case_data_types(db_connection): try: # Create table with various data types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_encoding_datatypes ( id INT PRIMARY KEY, varchar_small VARCHAR(50), @@ -1346,8 +1341,7 @@ def test_encoding_decoding_edge_case_data_types(db_connection): text_type TEXT, ntext_type NTEXT ) - """ - ) + """) # Test different encoding configurations test_configs = [ @@ -1639,16 +1633,14 @@ def test_encoding_decoding_large_dataset_performance(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_large_encoding ( id INT PRIMARY KEY, ascii_data VARCHAR(1000), unicode_data NVARCHAR(1000), mixed_data NVARCHAR(MAX) ) - """ - ) + """) # Generate test data - ensure it fits in column sizes ascii_text = "This is ASCII text with numbers 12345." * 10 # ~400 chars @@ -1817,15 +1809,13 @@ def test_encoding_decoding_metadata_columns(db_connection): try: # Create table with Unicode column names if supported - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_metadata ( [normal_col] NVARCHAR(100), [column_with_unicode_测试] NVARCHAR(100), [special_chars_ñáéíóú] INT ) - """ - ) + """) # Test metadata decoding configuration db_connection.setdecoding(mssql_python.SQL_WMETADATA, encoding="utf-16le", ctype=SQL_WCHAR) @@ -1899,8 +1889,7 @@ def test_encoding_decoding_stress_test_comprehensive(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #stress_test_encoding ( id INT IDENTITY(1,1) PRIMARY KEY, ascii_text VARCHAR(500), @@ -1908,8 +1897,7 @@ def test_encoding_decoding_stress_test_comprehensive(db_connection): binary_data VARBINARY(500), mixed_content NVARCHAR(MAX) ) - """ - ) + """) # Generate diverse test data test_datasets = [] @@ -2030,15 +2018,13 @@ def test_encoding_decoding_sql_char_various_encodings(db_connection): try: # Create test table with VARCHAR columns (SQL_CHAR type) - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_sql_char_encodings ( id INT PRIMARY KEY, data_col VARCHAR(100), description VARCHAR(200) ) - """ - ) + """) # Define various encoding types to test with SQL_CHAR encoding_tests = [ @@ -2315,15 +2301,13 @@ def test_encoding_decoding_sql_char_with_unicode_fallback(db_connection): try: # Create test table with both VARCHAR and NVARCHAR - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_unicode_fallback ( id INT PRIMARY KEY, varchar_data VARCHAR(100), nvarchar_data NVARCHAR(100) ) - """ - ) + """) # Test Unicode data unicode_test_cases = [ @@ -2394,15 +2378,13 @@ def test_encoding_decoding_sql_char_native_character_sets(db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_native_chars ( id INT PRIMARY KEY, data VARCHAR(200), encoding_used VARCHAR(50) ) - """ - ) + """) # Test encoding-specific character sets that should work encoding_native_tests = [ @@ -2537,15 +2519,13 @@ def test_encoding_decoding_sql_char_boundary_encoding_cases(db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_encoding_boundaries ( id INT PRIMARY KEY, test_data VARCHAR(500), test_type VARCHAR(100) ) - """ - ) + """) # Test boundary cases for different encodings boundary_tests = [ @@ -2646,16 +2626,14 @@ def test_encoding_decoding_sql_char_unicode_issue_diagnosis(db_connection): try: # Create test table with both VARCHAR and NVARCHAR for comparison - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_unicode_issue ( id INT PRIMARY KEY, varchar_col VARCHAR(100), nvarchar_col NVARCHAR(100), encoding_used VARCHAR(50) ) - """ - ) + """) # Test Unicode strings that commonly cause issues test_strings = [ @@ -2701,11 +2679,9 @@ def test_encoding_decoding_sql_char_unicode_issue_diagnosis(db_connection): ) # Retrieve results - cursor.execute( - """ + cursor.execute(""" SELECT varchar_col, nvarchar_col FROM #test_unicode_issue WHERE id = 1 - """ - ) + """) result = cursor.fetchone() if result: @@ -2760,8 +2736,7 @@ def test_encoding_decoding_sql_char_best_practices_guide(db_connection): try: # Create test table demonstrating different column types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_best_practices ( id INT PRIMARY KEY, -- ASCII-safe columns (VARCHAR with SQL_CHAR) @@ -2775,8 +2750,7 @@ def test_encoding_decoding_sql_char_best_practices_guide(db_connection): -- Mixed approach column safe_text VARCHAR(200) ) - """ - ) + """) # Configure optimal settings db_connection.setencoding(encoding="utf-8", ctype=SQL_CHAR) # For ASCII data @@ -4992,15 +4966,13 @@ def test_execute_executemany_encoding_consistency(db_connection): try: # Create test table that can handle both VARCHAR and NVARCHAR data - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_encoding_consistency ( id INT IDENTITY(1,1) PRIMARY KEY, varchar_col VARCHAR(1000) COLLATE SQL_Latin1_General_CP1_CI_AS, nvarchar_col NVARCHAR(1000) ) - """ - ) + """) # Test data with various encoding challenges # Using ASCII-safe characters that work across different encodings @@ -5053,13 +5025,11 @@ def test_execute_executemany_encoding_consistency(db_connection): ) # Retrieve immediately to verify encoding worked - cursor.execute( - """ + cursor.execute(""" SELECT varchar_col, nvarchar_col FROM #test_encoding_consistency WHERE id = (SELECT MAX(id) FROM #test_encoding_consistency) - """ - ) + """) result = cursor.fetchone() execute_results.append((result[0], result[1])) @@ -5084,13 +5054,11 @@ def test_execute_executemany_encoding_consistency(db_connection): ) # Retrieve all results from executemany - cursor.execute( - """ + cursor.execute(""" SELECT varchar_col, nvarchar_col FROM #test_encoding_consistency ORDER BY id - """ - ) + """) executemany_results = cursor.fetchall() # Verify executemany results match execute results @@ -5127,13 +5095,11 @@ def test_execute_executemany_encoding_consistency(db_connection): test_string, ) - cursor.execute( - """ + cursor.execute(""" SELECT nvarchar_col FROM #test_encoding_consistency WHERE id = (SELECT MAX(id) FROM #test_encoding_consistency) - """ - ) + """) result = cursor.fetchone() unicode_execute_results.append(result[0]) @@ -5160,13 +5126,11 @@ def test_execute_executemany_encoding_consistency(db_connection): unicode_params, ) - cursor.execute( - """ + cursor.execute(""" SELECT nvarchar_col FROM #test_encoding_consistency ORDER BY id - """ - ) + """) unicode_executemany_results = cursor.fetchall() # Compare Unicode results diff --git a/tests/test_017_spatial_types.py b/tests/test_017_spatial_types.py index 6daa07347..7fcb09c68 100644 --- a/tests/test_017_spatial_types.py +++ b/tests/test_017_spatial_types.py @@ -201,15 +201,13 @@ def test_geography_large_polygon_fetch(cursor, db_connection): def test_geography_mixed_with_other_types(cursor, db_connection): - cursor.execute( - """CREATE TABLE #geo_mixed ( + cursor.execute("""CREATE TABLE #geo_mixed ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), geo_col GEOGRAPHY NULL, created_date DATETIME, score FLOAT - );""" - ) + );""") db_connection.commit() cursor.execute( @@ -332,13 +330,11 @@ def test_geography_description_metadata(cursor, db_connection): def test_geography_stdistance(cursor, db_connection): - cursor.execute( - """CREATE TABLE #geo_distance ( + cursor.execute("""CREATE TABLE #geo_distance ( id INT PRIMARY KEY IDENTITY(1,1), geo1 GEOGRAPHY NULL, geo2 GEOGRAPHY NULL - );""" - ) + );""") db_connection.commit() point2 = "POINT(-73.98500 40.75800)" # New York @@ -350,10 +346,8 @@ def test_geography_stdistance(cursor, db_connection): ) db_connection.commit() - row = cursor.execute( - """SELECT geo1.STDistance(geo2) as distance_meters - FROM #geo_distance;""" - ).fetchone() + row = cursor.execute("""SELECT geo1.STDistance(geo2) as distance_meters + FROM #geo_distance;""").fetchone() # Seattle to New York is approximately 3,870 km assert 3_500_000 < row[0] < 4_500_000 @@ -530,14 +524,12 @@ def test_geometry_description_metadata(cursor, db_connection): def test_geometry_mixed_with_other_types(cursor, db_connection): - cursor.execute( - """CREATE TABLE #geom_mixed ( + cursor.execute("""CREATE TABLE #geom_mixed ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), geom_col GEOMETRY NULL, area FLOAT - );""" - ) + );""") db_connection.commit() cursor.execute( @@ -688,13 +680,11 @@ def test_hierarchyid_description_metadata(cursor, db_connection): def test_hierarchyid_tree_structure(cursor, db_connection): - cursor.execute( - """CREATE TABLE #hid_tree ( + cursor.execute("""CREATE TABLE #hid_tree ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), node HIERARCHYID NULL - );""" - ) + );""") db_connection.commit() org_data = [ @@ -715,24 +705,20 @@ def test_hierarchyid_tree_structure(cursor, db_connection): db_connection.commit() # All descendants of VP Engineering (including self) - rows = cursor.execute( - """SELECT name, node.ToString() as path + rows = cursor.execute("""SELECT name, node.ToString() as path FROM #hid_tree WHERE node.IsDescendantOf(hierarchyid::Parse('/1/')) = 1 - ORDER BY node;""" - ).fetchall() + ORDER BY node;""").fetchall() assert len(rows) == 5 names = [r[0] for r in rows] assert names == ["VP Engineering", "Dev Manager", "Senior Dev", "Junior Dev", "QA Manager"] # Direct reports of Dev Manager - rows = cursor.execute( - """SELECT name, node.ToString() as path + rows = cursor.execute("""SELECT name, node.ToString() as path FROM #hid_tree WHERE node.GetAncestor(1) = hierarchyid::Parse('/1/1/') - ORDER BY node;""" - ).fetchall() + ORDER BY node;""").fetchall() assert len(rows) == 2 names = [r[0] for r in rows] @@ -740,14 +726,12 @@ def test_hierarchyid_tree_structure(cursor, db_connection): def test_hierarchyid_mixed_with_other_types(cursor, db_connection): - cursor.execute( - """CREATE TABLE #hid_mixed ( + cursor.execute("""CREATE TABLE #hid_mixed ( id INT PRIMARY KEY IDENTITY(1,1), name NVARCHAR(100), node HIERARCHYID NULL, salary DECIMAL(10,2) - );""" - ) + );""") db_connection.commit() cursor.execute( diff --git a/tests/test_017_varchar_cp1252_boundary.py b/tests/test_017_varchar_cp1252_boundary.py index 97b6a3a14..4c038ffdc 100644 --- a/tests/test_017_varchar_cp1252_boundary.py +++ b/tests/test_017_varchar_cp1252_boundary.py @@ -18,8 +18,7 @@ def test_varchar_cp1252_exact_length_boundary(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_boundary ( id INT PRIMARY KEY, varchar_10 VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS, @@ -27,8 +26,7 @@ def test_varchar_cp1252_exact_length_boundary(db_connection): varchar_50 VARCHAR(50) COLLATE SQL_Latin1_General_CP1_CI_AS, varchar_100 VARCHAR(100) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() # Set encoding to CP1252 for VARCHAR columns @@ -139,14 +137,12 @@ def test_varchar_cp1252_length_variations(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_variations ( id INT PRIMARY KEY, varchar_col VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -202,14 +198,12 @@ def test_varchar_cp1252_mixed_ascii_nonascii(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_mixed ( id INT PRIMARY KEY, varchar_col VARCHAR(15) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -252,14 +246,12 @@ def test_varchar_cp1252_empty_and_null(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_edge ( id INT PRIMARY KEY, varchar_col VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -298,15 +290,13 @@ def test_varchar_cp1252_parameterized_query(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_params ( id INT PRIMARY KEY, varchar_10 VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS, varchar_20 VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -348,16 +338,14 @@ def test_varchar_cp1252_fetchall_multi_column_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_multi_col ( id INT PRIMARY KEY, col_a VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS, col_b VARCHAR(15) COLLATE SQL_Latin1_General_CP1_CI_AS, col_c VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -403,14 +391,12 @@ def test_varchar_cp1252_fetchmany_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_fetchmany ( id INT PRIMARY KEY, data VARCHAR(10) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -455,8 +441,7 @@ def test_varchar_cp1252_mixed_types_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_mixed_types ( id INT PRIMARY KEY, name VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS, @@ -464,8 +449,7 @@ def test_varchar_cp1252_mixed_types_batch(db_connection): city VARCHAR(15) COLLATE SQL_Latin1_General_CP1_CI_AS, age SMALLINT ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -510,14 +494,12 @@ def test_varchar_cp1252_lob_with_collation(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_lob ( id INT PRIMARY KEY, data VARCHAR(MAX) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -550,14 +532,12 @@ def test_varchar_cp1252_varying_lengths_per_row(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_varying ( id INT PRIMARY KEY, data VARCHAR(50) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -603,14 +583,12 @@ def test_varchar_cp1252_null_interspersed_batch(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_nulls ( id INT PRIMARY KEY, data VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) @@ -661,14 +639,12 @@ def test_varchar_cp1252_decode_fallback_returns_bytes(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_decode_fallback_bytes ( id INT PRIMARY KEY, data VARCHAR(20) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() # Insert valid CP1252 data @@ -711,14 +687,12 @@ def test_varchar_cp1252_fetchall_many_rows(db_connection): cursor = db_connection.cursor() try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_cp1252_many ( id INT PRIMARY KEY, data VARCHAR(30) COLLATE SQL_Latin1_General_CP1_CI_AS ) - """ - ) + """) db_connection.commit() db_connection.setencoding(encoding="cp1252", ctype=SQL_CHAR) diff --git a/tests/test_018_polars_pandas_integration.py b/tests/test_018_polars_pandas_integration.py index 9d857afdb..529f6d8e5 100644 --- a/tests/test_018_polars_pandas_integration.py +++ b/tests/test_018_polars_pandas_integration.py @@ -78,8 +78,7 @@ def test_datetimeoffset_type_code_is_datetime_datetime(self, cursor): def test_all_types_are_isclass(self, cursor): """Every type_code in cursor.description must pass inspect.isclass().""" - cursor.execute( - """ + cursor.execute(""" SELECT CAST(1 AS INT) AS i, CAST(1 AS SMALLINT) AS si, @@ -104,8 +103,7 @@ def test_all_types_are_isclass(self, cursor): CAST(0x01 AS VARBINARY(10)) AS vbin, NEWID() AS guid, CAST('' AS XML) AS x - """ - ) + """) for desc in cursor.description: col_name = desc[0] type_code = desc[1] @@ -174,22 +172,18 @@ def test_polars_date_with_nulls(self, db_connection): cursor = db_connection.cursor() try: cursor.execute("DROP TABLE IF EXISTS #polars_null_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #polars_null_test ( id INT, d DATE ) - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" INSERT INTO #polars_null_test VALUES (1, '2024-01-15'), (2, NULL), (3, '2024-03-20') - """ - ) + """) db_connection.commit() df = pl.read_database( diff --git a/tests/test_019_bulkcopy.py b/tests/test_019_bulkcopy.py index 204276ecc..7542139d5 100644 --- a/tests/test_019_bulkcopy.py +++ b/tests/test_019_bulkcopy.py @@ -189,15 +189,13 @@ def test_bulkcopy_with_server_synonyms(conn_str): # Create table cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id INT, name NVARCHAR(50), value FLOAT ) - """ - ) + """) conn.commit() # Test data @@ -238,15 +236,13 @@ def test_bulkcopy_with_server_synonyms(conn_str): # Create table cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id INT, name NVARCHAR(50), value FLOAT ) - """ - ) + """) conn.commit() # Test data diff --git a/tests/test_cache_invalidation.py b/tests/test_cache_invalidation.py index 2c4bb48b3..fa1d34e2f 100644 --- a/tests/test_cache_invalidation.py +++ b/tests/test_cache_invalidation.py @@ -22,38 +22,29 @@ def test_cursor_cache_invalidation_different_column_orders(db_connection): try: # Setup test tables with different column orders and types - cursor.execute( - """ + cursor.execute(""" IF OBJECT_ID('tempdb..#test_cache_table1') IS NOT NULL DROP TABLE #test_cache_table1 - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" CREATE TABLE #test_cache_table1 ( id INT, name VARCHAR(50), age INT, salary DECIMAL(10,2) ) - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" INSERT INTO #test_cache_table1 VALUES (1, 'Alice', 30, 50000.00), (2, 'Bob', 25, 45000.00) - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" IF OBJECT_ID('tempdb..#test_cache_table2') IS NOT NULL DROP TABLE #test_cache_table2 - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" CREATE TABLE #test_cache_table2 ( salary DECIMAL(10,2), age INT, @@ -61,15 +52,12 @@ def test_cursor_cache_invalidation_different_column_orders(db_connection): name VARCHAR(50), bonus FLOAT ) - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" INSERT INTO #test_cache_table2 VALUES (60000.00, 35, 3, 'Charlie', 5000.5), (55000.00, 28, 4, 'Diana', 3000.75) - """ - ) + """) # Execute first query - columns: id, name, age, salary cursor.execute("SELECT id, name, age, salary FROM #test_cache_table1 ORDER BY id") @@ -140,13 +128,11 @@ def test_cursor_cache_invalidation_stored_procedure_multiple_resultsets(db_conne # the scenario where cached maps need to be invalidated between different queries # First result set: user info (3 columns) - cursor.execute( - """ + cursor.execute(""" SELECT 1 as user_id, 'John' as username, 'john@example.com' as email UNION ALL SELECT 2, 'Jane', 'jane@example.com' - """ - ) + """) # Validate first result set - user info assert len(cursor.description) == 3 @@ -161,13 +147,11 @@ def test_cursor_cache_invalidation_stored_procedure_multiple_resultsets(db_conne assert user_rows[0].email == "john@example.com" # Execute second query with completely different structure - cursor.execute( - """ + cursor.execute(""" SELECT 101 as product_id, 'Widget A' as product_name, 29.99 as price, 100 as stock_qty UNION ALL SELECT 102, 'Widget B', 39.99, 50 - """ - ) + """) # Validate second result set - product info (different structure) assert len(cursor.description) == 4 @@ -211,14 +195,11 @@ def test_cursor_cache_invalidation_metadata_then_select(db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" IF OBJECT_ID('tempdb..#test_metadata_table') IS NOT NULL DROP TABLE #test_metadata_table - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" CREATE TABLE #test_metadata_table ( meta_id INT PRIMARY KEY, meta_name VARCHAR(100), @@ -226,19 +207,15 @@ def test_cursor_cache_invalidation_metadata_then_select(db_connection): meta_date DATETIME, meta_flag BIT ) - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" INSERT INTO #test_metadata_table VALUES (1, 'Config1', 123.4567, '2023-01-15 10:30:00', 1), (2, 'Config2', 987.6543, '2023-02-20 14:45:00', 0) - """ - ) + """) # First: Execute a metadata-only query (no actual data rows) - cursor.execute( - """ + cursor.execute(""" SELECT COLUMN_NAME, DATA_TYPE, @@ -248,8 +225,7 @@ def test_cursor_cache_invalidation_metadata_then_select(db_connection): WHERE TABLE_NAME = 'test_metadata_table' AND TABLE_SCHEMA = 'tempdb' ORDER BY ORDINAL_POSITION - """ - ) + """) # Verify metadata result structure meta_description = cursor.description @@ -323,30 +299,24 @@ def test_cursor_cache_invalidation_fetch_methods_consistency(db_connection): try: # Create test data - cursor.execute( - """ + cursor.execute(""" IF OBJECT_ID('tempdb..#test_fetch_cache') IS NOT NULL DROP TABLE #test_fetch_cache - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" CREATE TABLE #test_fetch_cache ( first_col VARCHAR(20), second_col INT, third_col DECIMAL(8,2) ) - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" INSERT INTO #test_fetch_cache VALUES ('Row1', 10, 100.50), ('Row2', 20, 200.75), ('Row3', 30, 300.25), ('Row4', 40, 400.00) - """ - ) + """) # Execute first query with specific column order cursor.execute( @@ -410,11 +380,9 @@ def test_cache_specific_close_cleanup_validation(db_connection): try: # Setup test data - cursor.execute( - """ + cursor.execute(""" SELECT 1 as cache_col1, 'test' as cache_col2, 99.99 as cache_col3 - """ - ) + """) # Verify cache is populated assert cursor.description is not None @@ -559,15 +527,12 @@ def test_real_stored_procedure_cache_validation(db_connection): try: # Create a temporary stored procedure with multiple result sets - cursor.execute( - """ + cursor.execute(""" IF OBJECT_ID('tempdb..#sp_test_cache') IS NOT NULL DROP PROCEDURE #sp_test_cache - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE PROCEDURE #sp_test_cache AS BEGIN @@ -580,8 +545,7 @@ def test_real_stored_procedure_cache_validation(db_connection): -- Third result set: Summary (yet another structure) SELECT GETDATE() as report_date, 'Cache Test' as report_type, 1 as version_num; END - """ - ) + """) # Execute the stored procedure cursor.execute("EXEC #sp_test_cache") From cf559989c69874554ad564c48e33b7c9e9cb2c26 Mon Sep 17 00:00:00 2001 From: Jahnvi Thakkar Date: Mon, 2 Mar 2026 16:18:53 +0530 Subject: [PATCH 4/6] Adding tests for uncovered lines --- mssql_python/cursor.py | 15 +- tests/test_001_globals.py | 46 ++ tests/test_004_cursor.py | 976 ++++++++++++++++++++++++++------------ 3 files changed, 727 insertions(+), 310 deletions(-) diff --git a/mssql_python/cursor.py b/mssql_python/cursor.py index df6779a41..52fcbfd20 100644 --- a/mssql_python/cursor.py +++ b/mssql_python/cursor.py @@ -2296,15 +2296,28 @@ def executemany( # pylint: disable=too-many-locals,too-many-branches,too-many-s check_error(ddbc_sql_const.SQL_HANDLE_STMT.value, self.hstmt, ret) self.rowcount = ddbc_bindings.DDBCSQLRowCount(self.hstmt) self.last_executed_stmt = operation - self._initialize_description() + + # Fetch column metadata (e.g. for INSERT … OUTPUT) + column_metadata = [] + try: + ddbc_bindings.DDBCSQLDescribeCol(self.hstmt, column_metadata) + self._initialize_description(column_metadata) + except Exception: # pylint: disable=broad-exception-caught + self.description = None if self.description: self.rowcount = -1 self._reset_rownumber() + self._cached_column_map = { + col_desc[0]: i for i, col_desc in enumerate(self.description) + } + self._cached_converter_map = self._build_converter_map() self._uuid_str_indices = self._compute_uuid_str_indices() else: self.rowcount = ddbc_bindings.DDBCSQLRowCount(self.hstmt) self._clear_rownumber() + self._cached_column_map = None + self._cached_converter_map = None self._uuid_str_indices = None finally: # Reset input sizes after execution diff --git a/tests/test_001_globals.py b/tests/test_001_globals.py index 03e600066..6b60c7d36 100644 --- a/tests/test_001_globals.py +++ b/tests/test_001_globals.py @@ -869,3 +869,49 @@ def test_connection_native_uuid_attribute(): params = sig.parameters assert "native_uuid" in params, "Connection.__init__ should accept native_uuid parameter" assert params["native_uuid"].default is None + + +def test_compute_uuid_str_indices_no_description(db_connection): + """Test _compute_uuid_str_indices returns None when cursor has no description.""" + cursor = db_connection.cursor() + try: + # Execute a statement that produces no result set + cursor.execute( + "CREATE TABLE #no_desc_uuid_test (id INT); " "INSERT INTO #no_desc_uuid_test VALUES (1)" + ) + # description should be None after a non-SELECT statement + assert cursor.description is None + + # Directly call the helper — should return None via the early guard + result = cursor._compute_uuid_str_indices() + assert ( + result is None + ), "_compute_uuid_str_indices should return None when description is None" + finally: + cursor.execute("DROP TABLE IF EXISTS #no_desc_uuid_test") + cursor.close() + + +def test_stringify_uuids_with_tuple_values(): + """Test Row._stringify_uuids converts tuple values to list for in-place mutation.""" + import uuid as _uuid + from mssql_python.row import Row + + test_uuid = _uuid.UUID("12345678-1234-5678-1234-567812345678") + + # Pass values as a tuple (not a list) to trigger the isinstance guard + row = Row( + (42, test_uuid, "hello"), + {"id": 0, "guid": 1, "name": 2}, + cursor=None, + converter_map=None, + uuid_str_indices=(1,), + ) + + # The UUID should have been stringified to uppercase + assert row[1] == "12345678-1234-5678-1234-567812345678".upper() + # Other values should be unaffected + assert row[0] == 42 + assert row[2] == "hello" + # Internal storage should now be a list (converted from tuple) + assert isinstance(row._values, list) diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 05dc3b59b..91c0cbc9d 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -182,13 +182,15 @@ def test_mixed_empty_and_null_values(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_empty_vs_null") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_empty_vs_null ( id INT, text_col NVARCHAR(100), binary_col VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert mix of empty and NULL values @@ -886,13 +888,15 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """) + """ + ) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -988,12 +992,14 @@ def test_fetchmany_size_zero_lob(cursor, db_connection): """Test fetchmany with size=0 for LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_lob ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """) + """ + ) # Insert test data test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1018,12 +1024,14 @@ def test_fetchmany_more_than_exist_lob(cursor, db_connection): """Test fetchmany requesting more rows than exist with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_more") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_lob_more ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """) + """ + ) # Insert only 3 rows test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1057,12 +1065,14 @@ def test_fetchmany_empty_result_lob(cursor, db_connection): """Test fetchmany on empty result set with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_empty") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_lob_empty ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Query empty table @@ -1085,12 +1095,14 @@ def test_fetchmany_very_large_lob(cursor, db_connection): """Test fetchmany with very large LOB column data""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_large_lob") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_large_lob ( id INT PRIMARY KEY, large_lob NVARCHAR(MAX) ) - """) + """ + ) # Create very large data (10000 characters) large_data = "x" * 10000 @@ -1140,12 +1152,14 @@ def test_fetchmany_mixed_lob_sizes(cursor, db_connection): """Test fetchmany with mixed LOB sizes including empty and NULL""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_mixed_lob") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_fetchmany_mixed_lob ( id INT PRIMARY KEY, mixed_lob NVARCHAR(MAX) ) - """) + """ + ) # Mix of sizes: empty, NULL, small, medium, large test_data = [ @@ -1273,12 +1287,14 @@ def test_executemany_empty_strings(cursor, db_connection): """Test executemany with empty strings - regression test for Unix UTF-16 conversion issue""" try: # Create test table for empty string testing - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_empty_batch ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_empty_batch") @@ -1319,7 +1335,8 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): """Test executemany with empty strings in different column types""" try: # Create test table with different string types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_string_types ( id INT, varchar_col VARCHAR(50), @@ -1327,7 +1344,8 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): text_col TEXT, ntext_col NTEXT ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_string_types") @@ -1368,12 +1386,14 @@ def test_executemany_unicode_and_empty_strings(cursor, db_connection): """Test executemany with mix of Unicode characters and empty strings""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_unicode_test ( id INT, data NVARCHAR(100) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_unicode_test") @@ -1418,12 +1438,14 @@ def test_executemany_large_batch_with_empty_strings(cursor, db_connection): """Test executemany with large batch containing empty strings""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_large_batch ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_large_batch") @@ -1476,12 +1498,14 @@ def test_executemany_compare_with_execute(cursor, db_connection): """Test that executemany produces same results as individual execute calls""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_compare_test ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Test data with empty strings test_data = [ @@ -1534,13 +1558,15 @@ def test_executemany_edge_cases_empty_strings(cursor, db_connection): """Test executemany edge cases with empty strings and special characters""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_edge_cases ( id INT, varchar_data VARCHAR(100), nvarchar_data NVARCHAR(100) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_edge_cases") @@ -1594,12 +1620,14 @@ def test_executemany_null_vs_empty_string(cursor, db_connection): """Test that executemany correctly distinguishes between NULL and empty string""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_vs_empty ( id INT, data NVARCHAR(50) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_null_vs_empty") @@ -1664,12 +1692,14 @@ def test_executemany_binary_data_edge_cases(cursor, db_connection): """Test executemany with binary data and empty byte arrays""" try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_binary_test ( id INT, binary_data VARBINARY(100) ) - """) + """ + ) # Clear any existing data cursor.execute("DELETE FROM #pytest_binary_test") @@ -1831,7 +1861,8 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): """Test executemany with randomly mixed NULL and non-NULL values across multiple columns and rows (50 rows, 10 columns).""" try: # Create table with 10 columns of various types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_empty_params ( col1 INT, col2 VARCHAR(50), @@ -1844,7 +1875,8 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): col9 DATE, col10 REAL ) - """) + """ + ) # Generate 50 rows with randomly mixed NULL and non-NULL values across 10 columns data = [] @@ -1908,7 +1940,8 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): """Test executemany with multi-column NULL arrays (50 records, 8 columns).""" try: # Create table with 8 columns of various types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_arrays ( col1 INT, col2 VARCHAR(100), @@ -1919,7 +1952,8 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): col7 BIGINT, col8 DATE ) - """) + """ + ) # Generate 50 rows with all NULL values across 8 columns data = [(None, None, None, None, None, None, None, None) for _ in range(50)] @@ -1939,12 +1973,14 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): assert null_count == 50, f"Expected 50 NULLs in col{col_num}, got {null_count}" # Verify no non-NULL values exist - cursor.execute(""" + cursor.execute( + """ SELECT COUNT(*) FROM #pytest_null_arrays WHERE col1 IS NOT NULL OR col2 IS NOT NULL OR col3 IS NOT NULL OR col4 IS NOT NULL OR col5 IS NOT NULL OR col6 IS NOT NULL OR col7 IS NOT NULL OR col8 IS NOT NULL - """) + """ + ) non_null_count = cursor.fetchone()[0] assert non_null_count == 0, f"Expected 0 non-NULL values, got {non_null_count}" @@ -1983,7 +2019,8 @@ def test_executemany_concurrent_null_parameters(db_connection): # Create table with db_connection.cursor() as cursor: - cursor.execute(f""" + cursor.execute( + f""" IF OBJECT_ID('{table_name}', 'U') IS NOT NULL DROP TABLE {table_name} @@ -1995,7 +2032,8 @@ def test_executemany_concurrent_null_parameters(db_connection): col3 FLOAT, col4 DATETIME ) - """) + """ + ) db_connection.commit() # Execute multiple sequential insert operations @@ -2250,12 +2288,14 @@ def test_insert_data_for_join(cursor, db_connection): def test_join_operations(cursor): """Test join operations""" try: - cursor.execute(""" + cursor.execute( + """ SELECT e.name, d.department_name, p.project_name FROM #pytest_employees e JOIN #pytest_departments d ON e.department_id = d.department_id JOIN #pytest_projects p ON e.employee_id = p.employee_id - """) + """ + ) rows = cursor.fetchall() assert len(rows) == 3, "Join operation returned incorrect number of rows" assert rows[0] == [ @@ -2345,10 +2385,12 @@ def test_execute_stored_procedure_with_parameters(cursor): def test_execute_stored_procedure_without_parameters(cursor): """Test executing stored procedure without parameters""" try: - cursor.execute(""" + cursor.execute( + """ DECLARE @EmployeeID INT = 2 EXEC dbo.GetEmployeeProjects @EmployeeID - """) + """ + ) rows = cursor.fetchall() assert ( len(rows) == 1 @@ -2568,21 +2610,25 @@ def test_row_attribute_access(cursor, db_connection): """Test accessing row values by column name as attributes""" try: # Create test table with multiple columns - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_row_attr_test ( id INT PRIMARY KEY, name VARCHAR(50), email VARCHAR(100), age INT ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_row_attr_test (id, name, email, age) VALUES (1, 'John Doe', 'john@example.com', 30) - """) + """ + ) db_connection.commit() # Test attribute access @@ -2678,13 +2724,15 @@ def test_row_comparison_with_list(cursor, db_connection): def test_row_string_representation(cursor, db_connection): """Test Row string and repr representations""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_row_test ( id INT PRIMARY KEY, text_col NVARCHAR(50), null_col INT ) - """) + """ + ) db_connection.commit() cursor.execute( @@ -2717,13 +2765,15 @@ def test_row_string_representation(cursor, db_connection): def test_row_column_mapping(cursor, db_connection): """Test Row column name mapping""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_row_test ( FirstColumn INT PRIMARY KEY, Second_Column NVARCHAR(50), [Complex Name!] INT ) - """) + """ + ) db_connection.commit() cursor.execute( @@ -3206,10 +3256,12 @@ def test_execute_rowcount_chaining(cursor, db_connection): assert count == 1, "INSERT should affect 1 row" # Test multiple INSERT rowcount chaining - count = cursor.execute(""" + count = cursor.execute( + """ INSERT INTO #test_chaining (id, value) VALUES (2, 'test2'), (3, 'test3'), (4, 'test4') - """).rowcount + """ + ).rowcount assert count == 3, "Multiple INSERT should affect 3 rows" # Test UPDATE rowcount chaining @@ -3444,7 +3496,8 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): """Test next() functionality with various data types""" try: # Create test table with various data types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_next_types ( id INT, name NVARCHAR(50), @@ -3453,7 +3506,8 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): created_date DATE, created_time DATETIME ) - """) + """ + ) db_connection.commit() # Insert test data with different types @@ -3645,14 +3699,16 @@ def test_execute_chaining_compatibility_examples(cursor, db_connection): """Test real-world chaining examples""" try: # Create users table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #users ( user_id INT IDENTITY(1,1) PRIMARY KEY, user_name NVARCHAR(50), last_logon DATETIME, status NVARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test users @@ -4351,7 +4407,8 @@ def test_fetchval_different_data_types(cursor, db_connection): try: # Create test table with different data types drop_table_if_exists(cursor, "#pytest_fetchval_types") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_fetchval_types ( int_col INTEGER, float_col FLOAT, @@ -4363,14 +4420,17 @@ def test_fetchval_different_data_types(cursor, db_connection): date_col DATE, time_col TIME ) - """) + """ + ) # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_fetchval_types VALUES (123, 45.67, 89.12, 'ASCII text', N'Unicode text', 1, '2024-05-20 12:34:56', '2024-05-20', '12:34:56') - """) + """ + ) db_connection.commit() # Test different data types @@ -5668,21 +5728,25 @@ def test_cursor_rollback_data_consistency(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_rollback_orders") drop_table_if_exists(cursor, "#pytest_rollback_customers") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_rollback_customers ( id INTEGER PRIMARY KEY, name VARCHAR(50) ) - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_rollback_orders ( id INTEGER PRIMARY KEY, customer_id INTEGER, amount DECIMAL(10,2), FOREIGN KEY (customer_id) REFERENCES #pytest_rollback_customers(id) ) - """) + """ + ) cursor.commit() # Insert initial data @@ -6164,26 +6228,32 @@ def test_tables_setup(cursor, db_connection): cursor.execute("DROP VIEW IF EXISTS pytest_tables_schema.test_view") # Create regular table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_tables_schema.regular_table ( id INT PRIMARY KEY, name VARCHAR(100) ) - """) + """ + ) # Create another table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_tables_schema.another_table ( id INT PRIMARY KEY, description VARCHAR(200) ) - """) + """ + ) # Create a view - cursor.execute(""" + cursor.execute( + """ CREATE VIEW pytest_tables_schema.test_view AS SELECT id, name FROM pytest_tables_schema.regular_table - """) + """ + ) db_connection.commit() except Exception as e: @@ -6535,12 +6605,14 @@ def test_emoji_round_trip(cursor, db_connection): "1🚀' OR '1'='1", ] - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_emoji_test ( id INT IDENTITY PRIMARY KEY, content NVARCHAR(MAX) ); - """) + """ + ) db_connection.commit() for text in test_inputs: @@ -6692,14 +6764,16 @@ def test_empty_values_fetchmany(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_fetchmany_empty") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_fetchmany_empty ( id INT, varchar_col VARCHAR(50), nvarchar_col NVARCHAR(50), binary_col VARBINARY(50) ) - """) + """ + ) db_connection.commit() # Insert multiple rows with empty values @@ -6824,7 +6898,8 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_batch_empty_assertions") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_batch_empty_assertions ( id INT, empty_varchar VARCHAR(100), @@ -6834,24 +6909,29 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): null_nvarchar NVARCHAR(100), null_binary VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert rows with mix of empty and NULL values - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_batch_empty_assertions VALUES (1, '', '', 0x, NULL, NULL, NULL), (2, '', '', 0x, NULL, NULL, NULL), (3, '', '', 0x, NULL, NULL, NULL) - """) + """ + ) db_connection.commit() # Test fetchall - should not trigger any assertions about dataLen - cursor.execute(""" + cursor.execute( + """ SELECT empty_varchar, empty_nvarchar, empty_binary, null_varchar, null_nvarchar, null_binary FROM #pytest_batch_empty_assertions ORDER BY id - """) + """ + ) rows = cursor.fetchall() assert len(rows) == 3, "Should return 3 rows" @@ -6868,10 +6948,12 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): assert row[5] is None, f"Row {i+1} null_binary should be None" # Test fetchmany - should also not trigger assertions - cursor.execute(""" + cursor.execute( + """ SELECT empty_nvarchar, empty_binary FROM #pytest_batch_empty_assertions ORDER BY id - """) + """ + ) # Fetch in batches first_batch = cursor.fetchmany(2) @@ -6911,13 +6993,15 @@ def test_executemany_utf16_length_validation(cursor, db_connection): try: # Create test table with small column size to trigger validation drop_table_if_exists(cursor, "#pytest_utf16_validation") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_utf16_validation ( id INT, short_text NVARCHAR(5), -- Small column to test length validation medium_text NVARCHAR(10) -- Medium column for edge cases ) - """) + """ + ) db_connection.commit() # Test 1: Valid strings that should work on all platforms @@ -7063,12 +7147,14 @@ def test_binary_data_over_8000_bytes(cursor, db_connection): try: # Create test table with VARBINARY(MAX) to handle large data drop_table_if_exists(cursor, "#pytest_small_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_small_binary ( id INT, large_binary VARBINARY(MAX) ) - """) + """ + ) # Test data that fits within both parameter and fetch limits (< 4096 bytes) medium_data = b"B" * 3000 # 3,000 bytes - under both limits @@ -7102,12 +7188,14 @@ def test_varbinarymax_insert_fetch(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_varbinarymax") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_varbinarymax ( id INT, binary_data VARBINARY(MAX) ) - """) + """ + ) # Prepare test data - use moderate sizes to guarantee LOB fetch path (line 867-868) efficiently test_data = [ @@ -7174,12 +7262,14 @@ def test_all_empty_binaries(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_all_empty_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_all_empty_binary ( id INT, empty_binary VARBINARY(100) ) - """) + """ + ) # Insert multiple rows with only empty binary data test_data = [ @@ -7218,12 +7308,14 @@ def test_mixed_bytes_and_bytearray_types(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_binary_types") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_mixed_binary_types ( id INT, binary_data VARBINARY(100) ) - """) + """ + ) # Test data mixing bytes and bytearray for the same column test_data = [ @@ -7278,12 +7370,14 @@ def test_binary_mostly_small_one_large(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_size_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_mixed_size_binary ( id INT, binary_data VARBINARY(MAX) ) - """) + """ + ) # Create large binary value within both parameter and fetch limits (< 4096 bytes) large_binary = b"X" * 3500 # 3,500 bytes - under both limits @@ -7343,12 +7437,14 @@ def test_varbinarymax_insert_fetch_null(cursor, db_connection): """Test insertion and retrieval of NULL value in VARBINARY(MAX) column.""" try: drop_table_if_exists(cursor, "#pytest_varbinarymax_null") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_varbinarymax_null ( id INT, binary_data VARBINARY(MAX) ) - """) + """ + ) # Insert a row with NULL for binary_data cursor.execute( @@ -7378,13 +7474,15 @@ def test_sql_double_type(cursor, db_connection): """Test SQL_DOUBLE type (FLOAT(53)) to cover line 3213 in dispatcher.""" try: drop_table_if_exists(cursor, "#pytest_double_type") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_double_type ( id INT PRIMARY KEY, double_col FLOAT(53), float_col FLOAT ) - """) + """ + ) # Insert test data with various double precision values test_data = [ @@ -7434,13 +7532,15 @@ def test_null_guid_type(cursor, db_connection): try: mssql_python.native_uuid = True drop_table_if_exists(cursor, "#pytest_null_guid") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_guid ( id INT PRIMARY KEY, guid_col UNIQUEIDENTIFIER, guid_nullable UNIQUEIDENTIFIER NULL ) - """) + """ + ) # Insert test data with NULL and non-NULL GUIDs test_guid = uuid.uuid4() @@ -7493,12 +7593,14 @@ def test_only_null_and_empty_binary(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_null_empty_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_null_empty_binary ( id INT, binary_data VARBINARY(100) ) - """) + """ + ) # Test data with only NULL and empty values test_data = [ @@ -7821,7 +7923,8 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): """Test inserting and retrieving valid MONEY and SMALLMONEY values including boundaries and typical data""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, @@ -7829,7 +7932,8 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): d DECIMAL(19,4), n NUMERIC(10,4) ) - """) + """ + ) db_connection.commit() # Max values @@ -7919,13 +8023,15 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): def test_money_smallmoney_null_handling(cursor, db_connection): """Test that NULL values for MONEY and SMALLMONEY are stored and retrieved correctly""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() # Row with both NULLs @@ -7975,13 +8081,15 @@ def test_money_smallmoney_null_handling(cursor, db_connection): def test_money_smallmoney_roundtrip(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using decimal.Decimal roundtrip""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() values = (decimal.Decimal("12345.6789"), decimal.Decimal("987.6543")) @@ -8005,13 +8113,15 @@ def test_money_smallmoney_boundaries(cursor, db_connection): """Test boundary values for MONEY and SMALLMONEY types are handled correctly""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() # Insert max boundary @@ -8051,13 +8161,15 @@ def test_money_smallmoney_boundaries(cursor, db_connection): def test_money_smallmoney_invalid_values(cursor, db_connection): """Test that invalid or out-of-range MONEY and SMALLMONEY values raise errors""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() # Out of range MONEY @@ -8088,13 +8200,15 @@ def test_money_smallmoney_invalid_values(cursor, db_connection): def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using executemany with decimal.Decimal""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() test_data = [ @@ -8128,13 +8242,15 @@ def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): def test_money_smallmoney_executemany_null_handling(cursor, db_connection): """Test inserting NULLs into MONEY and SMALLMONEY using executemany""" try: - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """) + """ + ) db_connection.commit() rows = [ @@ -8192,12 +8308,14 @@ def test_uuid_insert_and_select_none(cursor, db_connection): table_name = "#pytest_uuid_nullable" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Insert a row with None for the UUID @@ -8223,12 +8341,14 @@ def test_insert_multiple_uuids(cursor, db_connection): try: mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Prepare test data @@ -8267,12 +8387,14 @@ def test_fetchmany_uuids(cursor, db_connection): try: mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() uuids_to_insert = {f"Item {i}": uuid.uuid4() for i in range(10)} @@ -8309,12 +8431,14 @@ def test_uuid_insert_with_none(cursor, db_connection): table_name = "#pytest_uuid_none" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """) + """ + ) db_connection.commit() cursor.execute(f"INSERT INTO {table_name} (id, name) VALUES (?, ?)", [None, "Alice"]) @@ -8413,12 +8537,14 @@ def test_executemany_uuid_insert_and_select(cursor, db_connection): try: # Drop and create a temporary table for the test cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Generate data for insertion @@ -8468,12 +8594,14 @@ def test_executemany_uuid_roundtrip_fixed_value(cursor, db_connection): table_name = "#pytest_uuid_fixed" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() fixed_uuid = uuid.UUID("12345678-1234-5678-1234-567812345678") @@ -8514,7 +8642,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -8522,14 +8651,17 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) - db_connection.commit() + """ + ) + db_connection.commit() # Test with default separator first cursor.execute("SELECT * FROM #pytest_decimal_multi_test") @@ -8565,19 +8697,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -8616,12 +8752,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -8706,21 +8844,25 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """) + """ + ) db_connection.commit() # First test with lowercase=False (default) @@ -8775,12 +8917,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -8862,7 +9006,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -8870,13 +9015,16 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) + """ + ) db_connection.commit() # Test with default separator first @@ -8913,19 +9061,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -9438,21 +9590,25 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """) + """ + ) db_connection.commit() # First test with lowercase=False (default) @@ -9507,12 +9663,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -9594,7 +9752,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -9602,13 +9761,16 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) + """ + ) db_connection.commit() # Test with default separator first @@ -9645,19 +9807,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -9696,12 +9862,14 @@ def test_cursor_setinputsizes_basic(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes ( string_col NVARCHAR(100), int_col INT ) - """) + """ + ) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9727,13 +9895,15 @@ def test_cursor_setinputsizes_with_executemany_float(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_float") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_float ( id INT, name NVARCHAR(50), price REAL /* Use REAL instead of DECIMAL */ ) - """) + """ + ) # Prepare data with float values data = [(1, "Item 1", 10.99), (2, "Item 2", 20.50), (3, "Item 3", 30.75)] @@ -9770,12 +9940,14 @@ def test_cursor_setinputsizes_reset(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_reset") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_reset ( col1 NVARCHAR(100), col2 INT ) - """) + """ + ) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9810,12 +9982,14 @@ def test_cursor_setinputsizes_override_inference(db_connection): # Create a test table with specific types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_override") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_override ( small_int SMALLINT, big_text NVARCHAR(MAX) ) - """) + """ + ) # Set input sizes that override the default inference # For SMALLINT, use a valid precision value (5 is typical for SMALLINT) @@ -9871,13 +10045,15 @@ def test_setinputsizes_parameter_count_mismatch_fewer(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100), col3 FLOAT ) - """) + """ + ) # Set fewer input sizes than parameters cursor.setinputsizes( @@ -9920,12 +10096,14 @@ def test_setinputsizes_parameter_count_mismatch_more(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100) ) - """) + """ + ) # Set more input sizes than parameters cursor.setinputsizes( @@ -9960,7 +10138,8 @@ def test_setinputsizes_with_null_values(db_connection): # Create a test table with multiple data types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_null") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_inputsizes_null ( int_col INT, string_col NVARCHAR(100), @@ -9968,7 +10147,8 @@ def test_setinputsizes_with_null_values(db_connection): date_col DATE, binary_col VARBINARY(100) ) - """) + """ + ) # Set input sizes for all columns cursor.setinputsizes( @@ -10271,15 +10451,18 @@ def test_procedures_setup(cursor, db_connection): ) # Create test stored procedures - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc1 AS BEGIN SELECT 1 AS result END - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc2 @param1 INT, @param2 VARCHAR(50) OUTPUT @@ -10288,7 +10471,8 @@ def test_procedures_setup(cursor, db_connection): SELECT @param2 = 'Output ' + CAST(@param1 AS VARCHAR(10)) RETURN @param1 END - """) + """ + ) db_connection.commit() except Exception as e: @@ -10406,7 +10590,8 @@ def test_procedures_with_parameters(cursor, db_connection): """Test that procedures() correctly reports parameter information""" try: # Create a simpler procedure with basic parameters - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_params_proc @in1 INT, @in2 VARCHAR(50) @@ -10414,7 +10599,8 @@ def test_procedures_with_parameters(cursor, db_connection): BEGIN SELECT @in1 AS value1, @in2 AS value2 END - """) + """ + ) db_connection.commit() # Get procedure info @@ -10448,23 +10634,28 @@ def test_procedures_result_set_info(cursor, db_connection): """Test that procedures() reports information about result sets""" try: # Create procedures with different result set patterns - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_no_results AS BEGIN DECLARE @x INT = 1 END - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_one_result AS BEGIN SELECT 1 AS col1, 'test' AS col2 END - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ CREATE OR ALTER PROCEDURE pytest_proc_schema.test_multiple_results AS BEGIN @@ -10472,7 +10663,8 @@ def test_procedures_result_set_info(cursor, db_connection): SELECT 'test' AS result2 SELECT GETDATE() AS result3 END - """) + """ + ) db_connection.commit() # Get procedure info for all test procedures @@ -10554,15 +10746,18 @@ def test_foreignkeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.customers") # Create parent table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.customers ( customer_id INT PRIMARY KEY, customer_name VARCHAR(100) NOT NULL ) - """) + """ + ) # Create child table with foreign key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.orders ( order_id INT PRIMARY KEY, order_date DATETIME NOT NULL, @@ -10571,18 +10766,23 @@ def test_foreignkeys_setup(cursor, db_connection): CONSTRAINT FK_Orders_Customers FOREIGN KEY (customer_id) REFERENCES pytest_fk_schema.customers (customer_id) ) - """) + """ + ) # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO pytest_fk_schema.customers (customer_id, customer_name) VALUES (1, 'Test Customer 1'), (2, 'Test Customer 2') - """) + """ + ) - cursor.execute(""" + cursor.execute( + """ INSERT INTO pytest_fk_schema.orders (order_id, order_date, customer_id, total_amount) VALUES (101, GETDATE(), 1, 150.00), (102, GETDATE(), 2, 250.50) - """) + """ + ) db_connection.commit() except Exception as e: @@ -10810,17 +11010,20 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.product_variants") # Create parent table with composite primary key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.product_variants ( product_id INT NOT NULL, variant_id INT NOT NULL, variant_name VARCHAR(100) NOT NULL, PRIMARY KEY (product_id, variant_id) ) - """) + """ + ) # Create child table with composite foreign key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_fk_schema.order_details ( order_id INT NOT NULL, product_id INT NOT NULL, @@ -10830,7 +11033,8 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): CONSTRAINT FK_OrderDetails_ProductVariants FOREIGN KEY (product_id, variant_id) REFERENCES pytest_fk_schema.product_variants (product_id, variant_id) ) - """) + """ + ) db_connection.commit() @@ -10895,23 +11099,27 @@ def test_primarykeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_pk_schema.composite_pk_test") # Create table with simple primary key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_pk_schema.single_pk_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, description VARCHAR(200) NULL ) - """) + """ + ) # Create table with composite primary key - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_pk_schema.composite_pk_test ( dept_id INT NOT NULL, emp_id INT NOT NULL, hire_date DATE NOT NULL, CONSTRAINT PK_composite_test PRIMARY KEY (dept_id, emp_id) ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -11222,13 +11430,15 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """) + """ + ) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -11263,26 +11473,31 @@ def test_specialcolumns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_special_schema.identity_test") # Create table with primary key (for rowIdColumns) - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.rowid_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, unique_col NVARCHAR(100) UNIQUE, non_unique_col NVARCHAR(100) ) - """) + """ + ) # Create table with rowversion column (for rowVerColumns) - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.timestamp_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_updated ROWVERSION ) - """) + """ + ) # Create table with multiple unique identifiers - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.multiple_unique_test ( id INT NOT NULL, code VARCHAR(10) NOT NULL, @@ -11290,16 +11505,19 @@ def test_specialcolumns_setup(cursor, db_connection): order_number VARCHAR(20) UNIQUE, CONSTRAINT PK_multiple_unique_test PRIMARY KEY (id, code) ) - """) + """ + ) # Create table with identity column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.identity_test ( id INT IDENTITY(1,1) PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_modified DATETIME DEFAULT GETDATE() ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -11418,12 +11636,14 @@ def test_rowid_columns_nullable(cursor, db_connection): """Test rowIdColumns with nullable parameter""" try: # First create a table with nullable unique column and non-nullable PK - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.nullable_test ( id INT PRIMARY KEY, -- PK can't be nullable in SQL Server data NVARCHAR(100) NULL ) - """) + """ + ) db_connection.commit() # Test with nullable=True (default) @@ -11516,12 +11736,14 @@ def test_rowver_columns_nullable(cursor, db_connection): """Test rowVerColumns with nullable parameter (not expected to have effect)""" try: # First create a table with rowversion column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_special_schema.nullable_rowver_test ( id INT PRIMARY KEY, ts ROWVERSION ) - """) + """ + ) db_connection.commit() # Test with nullable=True (default) @@ -11630,7 +11852,8 @@ def test_statistics_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_stats_schema.empty_stats_test") # Create test table with various indexes - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_stats_schema.stats_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, @@ -11639,25 +11862,32 @@ def test_statistics_setup(cursor, db_connection): salary DECIMAL(10, 2) NULL, hire_date DATE NOT NULL ) - """) + """ + ) # Create a non-unique index - cursor.execute(""" + cursor.execute( + """ CREATE INDEX IX_stats_test_dept_date ON pytest_stats_schema.stats_test (department, hire_date) - """) + """ + ) # Create a unique index on multiple columns - cursor.execute(""" + cursor.execute( + """ CREATE UNIQUE INDEX UX_stats_test_name_dept ON pytest_stats_schema.stats_test (name, department) - """) + """ + ) # Create an empty table for testing - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_stats_schema.empty_stats_test ( id INT PRIMARY KEY, data VARCHAR(100) NULL ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -11922,7 +12152,8 @@ def test_columns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_cols_schema.columns_special_test") # Create test table with various column types - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_cols_schema.columns_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, @@ -11934,10 +12165,12 @@ def test_columns_setup(cursor, db_connection): notes TEXT NULL, [computed_col] AS (name + ' - ' + CAST(id AS VARCHAR(10))) ) - """) + """ + ) # Create table with special column names and edge cases - fix the problematic column name - cursor.execute(""" + cursor.execute( + """ CREATE TABLE pytest_cols_schema.columns_special_test ( [ID] INT PRIMARY KEY, [User Name] NVARCHAR(100) NULL, @@ -11949,7 +12182,8 @@ def test_columns_setup(cursor, db_connection): [Column/With/Slashes] VARCHAR(20) NULL, [Column_With_Underscores] VARCHAR(20) NULL -- Changed from problematic nested brackets ) - """) + """ + ) db_connection.commit() except Exception as e: @@ -12413,21 +12647,25 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """) + """ + ) db_connection.commit() # First test with lowercase=False (default) @@ -12482,12 +12720,14 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test values with default separator (.) @@ -12569,7 +12809,8 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -12577,13 +12818,16 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """) + """ + ) db_connection.commit() # Test with default separator first @@ -12620,19 +12864,23 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """) + """ + ) db_connection.commit() # Insert test data - cursor.execute(""" + cursor.execute( + """ INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """) + """ + ) db_connection.commit() # Test with default separator @@ -12669,12 +12917,14 @@ def test_executemany_with_uuids(cursor, db_connection): table_name = "#pytest_uuid_batch" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """) + """ + ) db_connection.commit() # Prepare test data: mix of UUIDs and None @@ -12822,11 +13072,13 @@ def test_date_string_parameter_binding(cursor, db_connection): table_name = "#pytest_date_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( a_column VARCHAR(20) ) - """) + """ + ) cursor.execute(f"INSERT INTO {table_name} (a_column) VALUES ('string1'), ('string2')") db_connection.commit() @@ -12853,11 +13105,13 @@ def test_time_string_parameter_binding(cursor, db_connection): table_name = "#pytest_time_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( time_col VARCHAR(22) ) - """) + """ + ) cursor.execute(f"INSERT INTO {table_name} (time_col) VALUES ('prefix_14:30:45_suffix')") db_connection.commit() @@ -12882,11 +13136,13 @@ def test_datetime_string_parameter_binding(cursor, db_connection): table_name = "#pytest_datetime_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute(f""" + cursor.execute( + f""" CREATE TABLE {table_name} ( datetime_col VARCHAR(33) ) - """) + """ + ) cursor.execute( f"INSERT INTO {table_name} (datetime_col) VALUES ('prefix_2025-08-12T14:30:45_suffix')" ) @@ -13750,12 +14006,14 @@ def test_column_metadata_error_handling(cursor): """Test column metadata retrieval error handling (Lines 1156-1167).""" # Execute a complex query that might stress metadata retrieval - cursor.execute(""" + cursor.execute( + """ SELECT CAST(1 as INT) as int_col, CAST('test' as NVARCHAR(100)) as nvarchar_col, CAST(NEWID() as UNIQUEIDENTIFIER) as guid_col - """) + """ + ) # This should exercise the metadata retrieval code paths # If there are any errors, they should be logged but not crash @@ -13871,12 +14129,14 @@ def test_row_uuid_processing_with_braces(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_uuid_braces") # Create table with UNIQUEIDENTIFIER column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_uuid_braces ( id INT IDENTITY(1,1), guid_col UNIQUEIDENTIFIER ) - """) + """ + ) # Insert a GUID with braces (this is how SQL Server often returns them) test_guid = "12345678-1234-5678-9ABC-123456789ABC" @@ -13920,12 +14180,14 @@ def test_row_uuid_processing_sql_guid_type(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_sql_guid_type") # Create table with UNIQUEIDENTIFIER column - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_sql_guid_type ( id INT, guid_col UNIQUEIDENTIFIER ) - """) + """ + ) # Insert test data test_guid = "ABCDEF12-3456-7890-ABCD-1234567890AB" @@ -13971,12 +14233,14 @@ def test_row_output_converter_overflow_error(cursor, db_connection): try: # Create a table with integer column drop_table_if_exists(cursor, "#pytest_overflow_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_overflow_test ( id INT, small_int TINYINT -- TINYINT can only hold 0-255 ) - """) + """ + ) # Insert a valid value first cursor.execute("INSERT INTO #pytest_overflow_test (id, small_int) VALUES (?, ?)", [1, 100]) @@ -14026,12 +14290,14 @@ def test_row_output_converter_general_exception(cursor, db_connection): try: # Create a table with string column drop_table_if_exists(cursor, "#pytest_exception_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_exception_test ( id INT, text_col VARCHAR(50) ) - """) + """ + ) # Insert test data cursor.execute( @@ -14082,12 +14348,14 @@ def test_row_cursor_log_method_availability(cursor, db_connection): try: # Create test data drop_table_if_exists(cursor, "#pytest_log_check") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_log_check ( id INT, value_col INT ) - """) + """ + ) cursor.execute("INSERT INTO #pytest_log_check (id, value_col) VALUES (?, ?)", [1, 42]) db_connection.commit() @@ -14115,7 +14383,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14125,7 +14394,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """) + """ + ) db_connection.commit() # Insert row with all NULLs @@ -14167,14 +14437,16 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create large data that will trigger LOB handling @@ -14207,12 +14479,14 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14239,12 +14513,14 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14271,12 +14547,14 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -14303,14 +14581,16 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert empty (non-NULL) values @@ -14338,12 +14618,14 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """) + """ + ) db_connection.commit() # Insert NULL GUID @@ -14370,12 +14652,14 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """) + """ + ) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -14402,12 +14686,14 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """) + """ + ) db_connection.commit() # Insert various decimal values including edge cases @@ -14528,7 +14814,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14538,7 +14825,8 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """) + """ + ) db_connection.commit() # Insert row with all NULLs @@ -14580,14 +14868,16 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create large data that will trigger LOB handling @@ -14620,12 +14910,14 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14652,12 +14944,14 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """) + """ + ) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14684,12 +14978,14 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """) + """ + ) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -14716,14 +15012,16 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """) + """ + ) db_connection.commit() # Insert empty (non-NULL) values @@ -14751,12 +15049,14 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """) + """ + ) db_connection.commit() # Insert NULL GUID @@ -14783,12 +15083,14 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """) + """ + ) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -14815,12 +15117,14 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """) + """ + ) db_connection.commit() # Insert various decimal values including edge cases @@ -14941,14 +15245,16 @@ def test_fetchall_with_integrity_constraint(cursor, db_connection): try: # Setup table with unique constraint cursor.execute("DROP TABLE IF EXISTS #uniq_cons_test") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #uniq_cons_test ( id INTEGER NOT NULL IDENTITY, data VARCHAR(50) NULL, PRIMARY KEY (id), UNIQUE (data) ) - """) + """ + ) # Insert initial row - should work cursor.execute( @@ -15243,7 +15549,8 @@ def test_native_uuid_non_uuid_columns_unaffected(db_connection): mssql_python.native_uuid = False drop_table_if_exists(cursor, "#test_uuid_other_cols") - cursor.execute(""" + cursor.execute( + """ CREATE TABLE #test_uuid_other_cols ( id UNIQUEIDENTIFIER, int_col INT, @@ -15251,7 +15558,8 @@ def test_native_uuid_non_uuid_columns_unaffected(db_connection): float_col FLOAT, bit_col BIT ) - """) + """ + ) test_uuid = uuid.uuid4() cursor.execute( "INSERT INTO #test_uuid_other_cols VALUES (?, ?, ?, ?, ?)", @@ -15560,3 +15868,53 @@ def test_per_connection_native_uuid_invalid_type(conn_str): with pytest.raises(ValueError, match="native_uuid must be a boolean"): mssql_python.connect(conn_str, native_uuid=1) + + +def test_executemany_uuid_output_sets_uuid_str_indices(conn_str): + """Test that executemany with OUTPUT clause computes _uuid_str_indices.""" + import uuid as _uuid + + original = mssql_python.native_uuid + try: + mssql_python.native_uuid = False + conn = mssql_python.connect(conn_str) + cursor = conn.cursor() + + cursor.execute( + """ + CREATE TABLE #executemany_uuid_output ( + id INT IDENTITY(1,1), + guid UNIQUEIDENTIFIER DEFAULT NEWID() + ) + """ + ) + + # executemany with OUTPUT — produces a result set with a UUID column + cursor.executemany( + "INSERT INTO #executemany_uuid_output (guid) OUTPUT INSERTED.guid VALUES (?)", + [ + (_uuid.UUID("11111111-1111-1111-1111-111111111111"),), + (_uuid.UUID("22222222-2222-2222-2222-222222222222"),), + ], + ) + + # After executemany, description should exist (OUTPUT returns rows) + assert cursor.description is not None, "OUTPUT clause should produce a description" + + # _uuid_str_indices should have been computed (native_uuid=False + UUID column) + assert ( + cursor._uuid_str_indices is not None + ), "_uuid_str_indices should be set after executemany with OUTPUT" + + # Fetch the returned rows — should be uppercase strings, not uuid.UUID + rows = cursor.fetchall() + assert len(rows) >= 1, "OUTPUT clause should return at least one row" + for row in rows: + assert isinstance(row[0], str), f"Expected str, got {type(row[0])}" + assert row[0] == row[0].upper(), "UUID string should be uppercase" + + cursor.execute("DROP TABLE IF EXISTS #executemany_uuid_output") + cursor.close() + conn.close() + finally: + mssql_python.native_uuid = original From 81f585efc3c7abdfdb696cfcfddcb31df42107cd Mon Sep 17 00:00:00 2001 From: Jahnvi Thakkar Date: Mon, 2 Mar 2026 16:21:07 +0530 Subject: [PATCH 5/6] Applying black formatter --- tests/test_004_cursor.py | 930 +++++++++++++-------------------------- 1 file changed, 310 insertions(+), 620 deletions(-) diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 91c0cbc9d..90e30159a 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -182,15 +182,13 @@ def test_mixed_empty_and_null_values(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_empty_vs_null") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_empty_vs_null ( id INT, text_col NVARCHAR(100), binary_col VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert mix of empty and NULL values @@ -888,15 +886,13 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """ - ) + """) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -992,14 +988,12 @@ def test_fetchmany_size_zero_lob(cursor, db_connection): """Test fetchmany with size=0 for LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_lob ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """ - ) + """) # Insert test data test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1024,14 +1018,12 @@ def test_fetchmany_more_than_exist_lob(cursor, db_connection): """Test fetchmany requesting more rows than exist with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_more") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_lob_more ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """ - ) + """) # Insert only 3 rows test_data = [(1, "First LOB data"), (2, "Second LOB data"), (3, "Third LOB data")] @@ -1065,14 +1057,12 @@ def test_fetchmany_empty_result_lob(cursor, db_connection): """Test fetchmany on empty result set with LOB columns""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_lob_empty") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_lob_empty ( id INT PRIMARY KEY, lob_data NVARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Query empty table @@ -1095,14 +1085,12 @@ def test_fetchmany_very_large_lob(cursor, db_connection): """Test fetchmany with very large LOB column data""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_large_lob") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_large_lob ( id INT PRIMARY KEY, large_lob NVARCHAR(MAX) ) - """ - ) + """) # Create very large data (10000 characters) large_data = "x" * 10000 @@ -1152,14 +1140,12 @@ def test_fetchmany_mixed_lob_sizes(cursor, db_connection): """Test fetchmany with mixed LOB sizes including empty and NULL""" try: cursor.execute("DROP TABLE IF EXISTS #test_fetchmany_mixed_lob") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_fetchmany_mixed_lob ( id INT PRIMARY KEY, mixed_lob NVARCHAR(MAX) ) - """ - ) + """) # Mix of sizes: empty, NULL, small, medium, large test_data = [ @@ -1287,14 +1273,12 @@ def test_executemany_empty_strings(cursor, db_connection): """Test executemany with empty strings - regression test for Unix UTF-16 conversion issue""" try: # Create test table for empty string testing - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_empty_batch ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_empty_batch") @@ -1335,8 +1319,7 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): """Test executemany with empty strings in different column types""" try: # Create test table with different string types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_string_types ( id INT, varchar_col VARCHAR(50), @@ -1344,8 +1327,7 @@ def test_executemany_empty_strings_various_types(cursor, db_connection): text_col TEXT, ntext_col NTEXT ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_string_types") @@ -1386,14 +1368,12 @@ def test_executemany_unicode_and_empty_strings(cursor, db_connection): """Test executemany with mix of Unicode characters and empty strings""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_unicode_test ( id INT, data NVARCHAR(100) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_unicode_test") @@ -1438,14 +1418,12 @@ def test_executemany_large_batch_with_empty_strings(cursor, db_connection): """Test executemany with large batch containing empty strings""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_large_batch ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_large_batch") @@ -1498,14 +1476,12 @@ def test_executemany_compare_with_execute(cursor, db_connection): """Test that executemany produces same results as individual execute calls""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_compare_test ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Test data with empty strings test_data = [ @@ -1558,15 +1534,13 @@ def test_executemany_edge_cases_empty_strings(cursor, db_connection): """Test executemany edge cases with empty strings and special characters""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_edge_cases ( id INT, varchar_data VARCHAR(100), nvarchar_data NVARCHAR(100) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_edge_cases") @@ -1620,14 +1594,12 @@ def test_executemany_null_vs_empty_string(cursor, db_connection): """Test that executemany correctly distinguishes between NULL and empty string""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_vs_empty ( id INT, data NVARCHAR(50) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_null_vs_empty") @@ -1692,14 +1664,12 @@ def test_executemany_binary_data_edge_cases(cursor, db_connection): """Test executemany with binary data and empty byte arrays""" try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_binary_test ( id INT, binary_data VARBINARY(100) ) - """ - ) + """) # Clear any existing data cursor.execute("DELETE FROM #pytest_binary_test") @@ -1861,8 +1831,7 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): """Test executemany with randomly mixed NULL and non-NULL values across multiple columns and rows (50 rows, 10 columns).""" try: # Create table with 10 columns of various types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_empty_params ( col1 INT, col2 VARCHAR(50), @@ -1875,8 +1844,7 @@ def test_executemany_mixed_null_and_typed_values(cursor, db_connection): col9 DATE, col10 REAL ) - """ - ) + """) # Generate 50 rows with randomly mixed NULL and non-NULL values across 10 columns data = [] @@ -1940,8 +1908,7 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): """Test executemany with multi-column NULL arrays (50 records, 8 columns).""" try: # Create table with 8 columns of various types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_arrays ( col1 INT, col2 VARCHAR(100), @@ -1952,8 +1919,7 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): col7 BIGINT, col8 DATE ) - """ - ) + """) # Generate 50 rows with all NULL values across 8 columns data = [(None, None, None, None, None, None, None, None) for _ in range(50)] @@ -1973,14 +1939,12 @@ def test_executemany_multi_column_null_arrays(cursor, db_connection): assert null_count == 50, f"Expected 50 NULLs in col{col_num}, got {null_count}" # Verify no non-NULL values exist - cursor.execute( - """ + cursor.execute(""" SELECT COUNT(*) FROM #pytest_null_arrays WHERE col1 IS NOT NULL OR col2 IS NOT NULL OR col3 IS NOT NULL OR col4 IS NOT NULL OR col5 IS NOT NULL OR col6 IS NOT NULL OR col7 IS NOT NULL OR col8 IS NOT NULL - """ - ) + """) non_null_count = cursor.fetchone()[0] assert non_null_count == 0, f"Expected 0 non-NULL values, got {non_null_count}" @@ -2019,8 +1983,7 @@ def test_executemany_concurrent_null_parameters(db_connection): # Create table with db_connection.cursor() as cursor: - cursor.execute( - f""" + cursor.execute(f""" IF OBJECT_ID('{table_name}', 'U') IS NOT NULL DROP TABLE {table_name} @@ -2032,8 +1995,7 @@ def test_executemany_concurrent_null_parameters(db_connection): col3 FLOAT, col4 DATETIME ) - """ - ) + """) db_connection.commit() # Execute multiple sequential insert operations @@ -2288,14 +2250,12 @@ def test_insert_data_for_join(cursor, db_connection): def test_join_operations(cursor): """Test join operations""" try: - cursor.execute( - """ + cursor.execute(""" SELECT e.name, d.department_name, p.project_name FROM #pytest_employees e JOIN #pytest_departments d ON e.department_id = d.department_id JOIN #pytest_projects p ON e.employee_id = p.employee_id - """ - ) + """) rows = cursor.fetchall() assert len(rows) == 3, "Join operation returned incorrect number of rows" assert rows[0] == [ @@ -2385,12 +2345,10 @@ def test_execute_stored_procedure_with_parameters(cursor): def test_execute_stored_procedure_without_parameters(cursor): """Test executing stored procedure without parameters""" try: - cursor.execute( - """ + cursor.execute(""" DECLARE @EmployeeID INT = 2 EXEC dbo.GetEmployeeProjects @EmployeeID - """ - ) + """) rows = cursor.fetchall() assert ( len(rows) == 1 @@ -2610,25 +2568,21 @@ def test_row_attribute_access(cursor, db_connection): """Test accessing row values by column name as attributes""" try: # Create test table with multiple columns - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_row_attr_test ( id INT PRIMARY KEY, name VARCHAR(50), email VARCHAR(100), age INT ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_row_attr_test (id, name, email, age) VALUES (1, 'John Doe', 'john@example.com', 30) - """ - ) + """) db_connection.commit() # Test attribute access @@ -2724,15 +2678,13 @@ def test_row_comparison_with_list(cursor, db_connection): def test_row_string_representation(cursor, db_connection): """Test Row string and repr representations""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_row_test ( id INT PRIMARY KEY, text_col NVARCHAR(50), null_col INT ) - """ - ) + """) db_connection.commit() cursor.execute( @@ -2765,15 +2717,13 @@ def test_row_string_representation(cursor, db_connection): def test_row_column_mapping(cursor, db_connection): """Test Row column name mapping""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_row_test ( FirstColumn INT PRIMARY KEY, Second_Column NVARCHAR(50), [Complex Name!] INT ) - """ - ) + """) db_connection.commit() cursor.execute( @@ -3256,12 +3206,10 @@ def test_execute_rowcount_chaining(cursor, db_connection): assert count == 1, "INSERT should affect 1 row" # Test multiple INSERT rowcount chaining - count = cursor.execute( - """ + count = cursor.execute(""" INSERT INTO #test_chaining (id, value) VALUES (2, 'test2'), (3, 'test3'), (4, 'test4') - """ - ).rowcount + """).rowcount assert count == 3, "Multiple INSERT should affect 3 rows" # Test UPDATE rowcount chaining @@ -3496,8 +3444,7 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): """Test next() functionality with various data types""" try: # Create test table with various data types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_next_types ( id INT, name NVARCHAR(50), @@ -3506,8 +3453,7 @@ def test_cursor_next_with_different_data_types(cursor, db_connection): created_date DATE, created_time DATETIME ) - """ - ) + """) db_connection.commit() # Insert test data with different types @@ -3699,16 +3645,14 @@ def test_execute_chaining_compatibility_examples(cursor, db_connection): """Test real-world chaining examples""" try: # Create users table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #users ( user_id INT IDENTITY(1,1) PRIMARY KEY, user_name NVARCHAR(50), last_logon DATETIME, status NVARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test users @@ -4407,8 +4351,7 @@ def test_fetchval_different_data_types(cursor, db_connection): try: # Create test table with different data types drop_table_if_exists(cursor, "#pytest_fetchval_types") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_fetchval_types ( int_col INTEGER, float_col FLOAT, @@ -4420,17 +4363,14 @@ def test_fetchval_different_data_types(cursor, db_connection): date_col DATE, time_col TIME ) - """ - ) + """) # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_fetchval_types VALUES (123, 45.67, 89.12, 'ASCII text', N'Unicode text', 1, '2024-05-20 12:34:56', '2024-05-20', '12:34:56') - """ - ) + """) db_connection.commit() # Test different data types @@ -5728,25 +5668,21 @@ def test_cursor_rollback_data_consistency(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_rollback_orders") drop_table_if_exists(cursor, "#pytest_rollback_customers") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_rollback_customers ( id INTEGER PRIMARY KEY, name VARCHAR(50) ) - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_rollback_orders ( id INTEGER PRIMARY KEY, customer_id INTEGER, amount DECIMAL(10,2), FOREIGN KEY (customer_id) REFERENCES #pytest_rollback_customers(id) ) - """ - ) + """) cursor.commit() # Insert initial data @@ -6228,32 +6164,26 @@ def test_tables_setup(cursor, db_connection): cursor.execute("DROP VIEW IF EXISTS pytest_tables_schema.test_view") # Create regular table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_tables_schema.regular_table ( id INT PRIMARY KEY, name VARCHAR(100) ) - """ - ) + """) # Create another table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_tables_schema.another_table ( id INT PRIMARY KEY, description VARCHAR(200) ) - """ - ) + """) # Create a view - cursor.execute( - """ + cursor.execute(""" CREATE VIEW pytest_tables_schema.test_view AS SELECT id, name FROM pytest_tables_schema.regular_table - """ - ) + """) db_connection.commit() except Exception as e: @@ -6605,14 +6535,12 @@ def test_emoji_round_trip(cursor, db_connection): "1🚀' OR '1'='1", ] - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_emoji_test ( id INT IDENTITY PRIMARY KEY, content NVARCHAR(MAX) ); - """ - ) + """) db_connection.commit() for text in test_inputs: @@ -6764,16 +6692,14 @@ def test_empty_values_fetchmany(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_fetchmany_empty") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_fetchmany_empty ( id INT, varchar_col VARCHAR(50), nvarchar_col NVARCHAR(50), binary_col VARBINARY(50) ) - """ - ) + """) db_connection.commit() # Insert multiple rows with empty values @@ -6898,8 +6824,7 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): try: # Create comprehensive test table drop_table_if_exists(cursor, "#pytest_batch_empty_assertions") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_batch_empty_assertions ( id INT, empty_varchar VARCHAR(100), @@ -6909,29 +6834,24 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): null_nvarchar NVARCHAR(100), null_binary VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert rows with mix of empty and NULL values - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_batch_empty_assertions VALUES (1, '', '', 0x, NULL, NULL, NULL), (2, '', '', 0x, NULL, NULL, NULL), (3, '', '', 0x, NULL, NULL, NULL) - """ - ) + """) db_connection.commit() # Test fetchall - should not trigger any assertions about dataLen - cursor.execute( - """ + cursor.execute(""" SELECT empty_varchar, empty_nvarchar, empty_binary, null_varchar, null_nvarchar, null_binary FROM #pytest_batch_empty_assertions ORDER BY id - """ - ) + """) rows = cursor.fetchall() assert len(rows) == 3, "Should return 3 rows" @@ -6948,12 +6868,10 @@ def test_batch_fetch_empty_values_no_assertion_failure(cursor, db_connection): assert row[5] is None, f"Row {i+1} null_binary should be None" # Test fetchmany - should also not trigger assertions - cursor.execute( - """ + cursor.execute(""" SELECT empty_nvarchar, empty_binary FROM #pytest_batch_empty_assertions ORDER BY id - """ - ) + """) # Fetch in batches first_batch = cursor.fetchmany(2) @@ -6993,15 +6911,13 @@ def test_executemany_utf16_length_validation(cursor, db_connection): try: # Create test table with small column size to trigger validation drop_table_if_exists(cursor, "#pytest_utf16_validation") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_utf16_validation ( id INT, short_text NVARCHAR(5), -- Small column to test length validation medium_text NVARCHAR(10) -- Medium column for edge cases ) - """ - ) + """) db_connection.commit() # Test 1: Valid strings that should work on all platforms @@ -7147,14 +7063,12 @@ def test_binary_data_over_8000_bytes(cursor, db_connection): try: # Create test table with VARBINARY(MAX) to handle large data drop_table_if_exists(cursor, "#pytest_small_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_small_binary ( id INT, large_binary VARBINARY(MAX) ) - """ - ) + """) # Test data that fits within both parameter and fetch limits (< 4096 bytes) medium_data = b"B" * 3000 # 3,000 bytes - under both limits @@ -7188,14 +7102,12 @@ def test_varbinarymax_insert_fetch(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_varbinarymax") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_varbinarymax ( id INT, binary_data VARBINARY(MAX) ) - """ - ) + """) # Prepare test data - use moderate sizes to guarantee LOB fetch path (line 867-868) efficiently test_data = [ @@ -7262,14 +7174,12 @@ def test_all_empty_binaries(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_all_empty_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_all_empty_binary ( id INT, empty_binary VARBINARY(100) ) - """ - ) + """) # Insert multiple rows with only empty binary data test_data = [ @@ -7308,14 +7218,12 @@ def test_mixed_bytes_and_bytearray_types(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_binary_types") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_mixed_binary_types ( id INT, binary_data VARBINARY(100) ) - """ - ) + """) # Test data mixing bytes and bytearray for the same column test_data = [ @@ -7370,14 +7278,12 @@ def test_binary_mostly_small_one_large(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_mixed_size_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_mixed_size_binary ( id INT, binary_data VARBINARY(MAX) ) - """ - ) + """) # Create large binary value within both parameter and fetch limits (< 4096 bytes) large_binary = b"X" * 3500 # 3,500 bytes - under both limits @@ -7437,14 +7343,12 @@ def test_varbinarymax_insert_fetch_null(cursor, db_connection): """Test insertion and retrieval of NULL value in VARBINARY(MAX) column.""" try: drop_table_if_exists(cursor, "#pytest_varbinarymax_null") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_varbinarymax_null ( id INT, binary_data VARBINARY(MAX) ) - """ - ) + """) # Insert a row with NULL for binary_data cursor.execute( @@ -7474,15 +7378,13 @@ def test_sql_double_type(cursor, db_connection): """Test SQL_DOUBLE type (FLOAT(53)) to cover line 3213 in dispatcher.""" try: drop_table_if_exists(cursor, "#pytest_double_type") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_double_type ( id INT PRIMARY KEY, double_col FLOAT(53), float_col FLOAT ) - """ - ) + """) # Insert test data with various double precision values test_data = [ @@ -7532,15 +7434,13 @@ def test_null_guid_type(cursor, db_connection): try: mssql_python.native_uuid = True drop_table_if_exists(cursor, "#pytest_null_guid") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_guid ( id INT PRIMARY KEY, guid_col UNIQUEIDENTIFIER, guid_nullable UNIQUEIDENTIFIER NULL ) - """ - ) + """) # Insert test data with NULL and non-NULL GUIDs test_guid = uuid.uuid4() @@ -7593,14 +7493,12 @@ def test_only_null_and_empty_binary(cursor, db_connection): try: # Create test table drop_table_if_exists(cursor, "#pytest_null_empty_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_null_empty_binary ( id INT, binary_data VARBINARY(100) ) - """ - ) + """) # Test data with only NULL and empty values test_data = [ @@ -7923,8 +7821,7 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): """Test inserting and retrieving valid MONEY and SMALLMONEY values including boundaries and typical data""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, @@ -7932,8 +7829,7 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): d DECIMAL(19,4), n NUMERIC(10,4) ) - """ - ) + """) db_connection.commit() # Max values @@ -8023,15 +7919,13 @@ def test_money_smallmoney_insert_fetch(cursor, db_connection): def test_money_smallmoney_null_handling(cursor, db_connection): """Test that NULL values for MONEY and SMALLMONEY are stored and retrieved correctly""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() # Row with both NULLs @@ -8081,15 +7975,13 @@ def test_money_smallmoney_null_handling(cursor, db_connection): def test_money_smallmoney_roundtrip(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using decimal.Decimal roundtrip""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() values = (decimal.Decimal("12345.6789"), decimal.Decimal("987.6543")) @@ -8113,15 +8005,13 @@ def test_money_smallmoney_boundaries(cursor, db_connection): """Test boundary values for MONEY and SMALLMONEY types are handled correctly""" try: drop_table_if_exists(cursor, "#pytest_money_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() # Insert max boundary @@ -8161,15 +8051,13 @@ def test_money_smallmoney_boundaries(cursor, db_connection): def test_money_smallmoney_invalid_values(cursor, db_connection): """Test that invalid or out-of-range MONEY and SMALLMONEY values raise errors""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() # Out of range MONEY @@ -8200,15 +8088,13 @@ def test_money_smallmoney_invalid_values(cursor, db_connection): def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): """Test inserting and retrieving MONEY and SMALLMONEY using executemany with decimal.Decimal""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() test_data = [ @@ -8242,15 +8128,13 @@ def test_money_smallmoney_roundtrip_executemany(cursor, db_connection): def test_money_smallmoney_executemany_null_handling(cursor, db_connection): """Test inserting NULLs into MONEY and SMALLMONEY using executemany""" try: - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_money_test ( id INT IDENTITY PRIMARY KEY, m MONEY, sm SMALLMONEY ) - """ - ) + """) db_connection.commit() rows = [ @@ -8308,14 +8192,12 @@ def test_uuid_insert_and_select_none(cursor, db_connection): table_name = "#pytest_uuid_nullable" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Insert a row with None for the UUID @@ -8341,14 +8223,12 @@ def test_insert_multiple_uuids(cursor, db_connection): try: mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Prepare test data @@ -8387,14 +8267,12 @@ def test_fetchmany_uuids(cursor, db_connection): try: mssql_python.native_uuid = True cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() uuids_to_insert = {f"Item {i}": uuid.uuid4() for i in range(10)} @@ -8431,14 +8309,12 @@ def test_uuid_insert_with_none(cursor, db_connection): table_name = "#pytest_uuid_none" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, name NVARCHAR(50) ) - """ - ) + """) db_connection.commit() cursor.execute(f"INSERT INTO {table_name} (id, name) VALUES (?, ?)", [None, "Alice"]) @@ -8537,14 +8413,12 @@ def test_executemany_uuid_insert_and_select(cursor, db_connection): try: # Drop and create a temporary table for the test cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER PRIMARY KEY, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Generate data for insertion @@ -8594,14 +8468,12 @@ def test_executemany_uuid_roundtrip_fixed_value(cursor, db_connection): table_name = "#pytest_uuid_fixed" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() fixed_uuid = uuid.UUID("12345678-1234-5678-1234-567812345678") @@ -8642,8 +8514,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -8651,16 +8522,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -8697,23 +8565,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -8752,14 +8616,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -8844,25 +8706,21 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """ - ) + """) db_connection.commit() # First test with lowercase=False (default) @@ -8917,14 +8775,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -9006,8 +8862,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -9015,16 +8870,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -9061,23 +8913,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -9590,25 +9438,21 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """ - ) + """) db_connection.commit() # First test with lowercase=False (default) @@ -9663,14 +9507,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -9752,8 +9594,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -9761,16 +9602,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -9807,23 +9645,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -9862,14 +9696,12 @@ def test_cursor_setinputsizes_basic(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes ( string_col NVARCHAR(100), int_col INT ) - """ - ) + """) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9895,15 +9727,13 @@ def test_cursor_setinputsizes_with_executemany_float(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_float") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_float ( id INT, name NVARCHAR(50), price REAL /* Use REAL instead of DECIMAL */ ) - """ - ) + """) # Prepare data with float values data = [(1, "Item 1", 10.99), (2, "Item 2", 20.50), (3, "Item 3", 30.75)] @@ -9940,14 +9770,12 @@ def test_cursor_setinputsizes_reset(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_reset") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_reset ( col1 NVARCHAR(100), col2 INT ) - """ - ) + """) # Set input sizes for parameters cursor.setinputsizes([(mssql_python.SQL_WVARCHAR, 100, 0), (mssql_python.SQL_INTEGER, 0, 0)]) @@ -9982,14 +9810,12 @@ def test_cursor_setinputsizes_override_inference(db_connection): # Create a test table with specific types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_override") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_override ( small_int SMALLINT, big_text NVARCHAR(MAX) ) - """ - ) + """) # Set input sizes that override the default inference # For SMALLINT, use a valid precision value (5 is typical for SMALLINT) @@ -10045,15 +9871,13 @@ def test_setinputsizes_parameter_count_mismatch_fewer(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100), col3 FLOAT ) - """ - ) + """) # Set fewer input sizes than parameters cursor.setinputsizes( @@ -10096,14 +9920,12 @@ def test_setinputsizes_parameter_count_mismatch_more(db_connection): # Create a test table cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_mismatch") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_mismatch ( col1 INT, col2 NVARCHAR(100) ) - """ - ) + """) # Set more input sizes than parameters cursor.setinputsizes( @@ -10138,8 +9960,7 @@ def test_setinputsizes_with_null_values(db_connection): # Create a test table with multiple data types cursor.execute("DROP TABLE IF EXISTS #test_inputsizes_null") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_inputsizes_null ( int_col INT, string_col NVARCHAR(100), @@ -10147,8 +9968,7 @@ def test_setinputsizes_with_null_values(db_connection): date_col DATE, binary_col VARBINARY(100) ) - """ - ) + """) # Set input sizes for all columns cursor.setinputsizes( @@ -10451,18 +10271,15 @@ def test_procedures_setup(cursor, db_connection): ) # Create test stored procedures - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc1 AS BEGIN SELECT 1 AS result END - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_proc2 @param1 INT, @param2 VARCHAR(50) OUTPUT @@ -10471,8 +10288,7 @@ def test_procedures_setup(cursor, db_connection): SELECT @param2 = 'Output ' + CAST(@param1 AS VARCHAR(10)) RETURN @param1 END - """ - ) + """) db_connection.commit() except Exception as e: @@ -10590,8 +10406,7 @@ def test_procedures_with_parameters(cursor, db_connection): """Test that procedures() correctly reports parameter information""" try: # Create a simpler procedure with basic parameters - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_params_proc @in1 INT, @in2 VARCHAR(50) @@ -10599,8 +10414,7 @@ def test_procedures_with_parameters(cursor, db_connection): BEGIN SELECT @in1 AS value1, @in2 AS value2 END - """ - ) + """) db_connection.commit() # Get procedure info @@ -10634,28 +10448,23 @@ def test_procedures_result_set_info(cursor, db_connection): """Test that procedures() reports information about result sets""" try: # Create procedures with different result set patterns - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_no_results AS BEGIN DECLARE @x INT = 1 END - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_one_result AS BEGIN SELECT 1 AS col1, 'test' AS col2 END - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" CREATE OR ALTER PROCEDURE pytest_proc_schema.test_multiple_results AS BEGIN @@ -10663,8 +10472,7 @@ def test_procedures_result_set_info(cursor, db_connection): SELECT 'test' AS result2 SELECT GETDATE() AS result3 END - """ - ) + """) db_connection.commit() # Get procedure info for all test procedures @@ -10746,18 +10554,15 @@ def test_foreignkeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.customers") # Create parent table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.customers ( customer_id INT PRIMARY KEY, customer_name VARCHAR(100) NOT NULL ) - """ - ) + """) # Create child table with foreign key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.orders ( order_id INT PRIMARY KEY, order_date DATETIME NOT NULL, @@ -10766,23 +10571,18 @@ def test_foreignkeys_setup(cursor, db_connection): CONSTRAINT FK_Orders_Customers FOREIGN KEY (customer_id) REFERENCES pytest_fk_schema.customers (customer_id) ) - """ - ) + """) # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO pytest_fk_schema.customers (customer_id, customer_name) VALUES (1, 'Test Customer 1'), (2, 'Test Customer 2') - """ - ) + """) - cursor.execute( - """ + cursor.execute(""" INSERT INTO pytest_fk_schema.orders (order_id, order_date, customer_id, total_amount) VALUES (101, GETDATE(), 1, 150.00), (102, GETDATE(), 2, 250.50) - """ - ) + """) db_connection.commit() except Exception as e: @@ -11010,20 +10810,17 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_fk_schema.product_variants") # Create parent table with composite primary key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.product_variants ( product_id INT NOT NULL, variant_id INT NOT NULL, variant_name VARCHAR(100) NOT NULL, PRIMARY KEY (product_id, variant_id) ) - """ - ) + """) # Create child table with composite foreign key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_fk_schema.order_details ( order_id INT NOT NULL, product_id INT NOT NULL, @@ -11033,8 +10830,7 @@ def test_foreignkeys_multiple_column_fk(cursor, db_connection): CONSTRAINT FK_OrderDetails_ProductVariants FOREIGN KEY (product_id, variant_id) REFERENCES pytest_fk_schema.product_variants (product_id, variant_id) ) - """ - ) + """) db_connection.commit() @@ -11099,27 +10895,23 @@ def test_primarykeys_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_pk_schema.composite_pk_test") # Create table with simple primary key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_pk_schema.single_pk_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, description VARCHAR(200) NULL ) - """ - ) + """) # Create table with composite primary key - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_pk_schema.composite_pk_test ( dept_id INT NOT NULL, emp_id INT NOT NULL, hire_date DATE NOT NULL, CONSTRAINT PK_composite_test PRIMARY KEY (dept_id, emp_id) ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -11430,15 +11222,13 @@ def test_rowcount(cursor, db_connection): cursor.execute("INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe3');") assert cursor.rowcount == 1, "Rowcount should be 1 after third insert" - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_test_rowcount (name) VALUES ('JohnDoe4'), ('JohnDoe5'), ('JohnDoe6'); - """ - ) + """) assert cursor.rowcount == 3, "Rowcount should be 3 after inserting multiple rows" cursor.execute("SELECT * FROM #pytest_test_rowcount;") @@ -11473,31 +11263,26 @@ def test_specialcolumns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_special_schema.identity_test") # Create table with primary key (for rowIdColumns) - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.rowid_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, unique_col NVARCHAR(100) UNIQUE, non_unique_col NVARCHAR(100) ) - """ - ) + """) # Create table with rowversion column (for rowVerColumns) - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.timestamp_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_updated ROWVERSION ) - """ - ) + """) # Create table with multiple unique identifiers - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.multiple_unique_test ( id INT NOT NULL, code VARCHAR(10) NOT NULL, @@ -11505,19 +11290,16 @@ def test_specialcolumns_setup(cursor, db_connection): order_number VARCHAR(20) UNIQUE, CONSTRAINT PK_multiple_unique_test PRIMARY KEY (id, code) ) - """ - ) + """) # Create table with identity column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.identity_test ( id INT IDENTITY(1,1) PRIMARY KEY, name NVARCHAR(100) NOT NULL, last_modified DATETIME DEFAULT GETDATE() ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -11636,14 +11418,12 @@ def test_rowid_columns_nullable(cursor, db_connection): """Test rowIdColumns with nullable parameter""" try: # First create a table with nullable unique column and non-nullable PK - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.nullable_test ( id INT PRIMARY KEY, -- PK can't be nullable in SQL Server data NVARCHAR(100) NULL ) - """ - ) + """) db_connection.commit() # Test with nullable=True (default) @@ -11736,14 +11516,12 @@ def test_rowver_columns_nullable(cursor, db_connection): """Test rowVerColumns with nullable parameter (not expected to have effect)""" try: # First create a table with rowversion column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_special_schema.nullable_rowver_test ( id INT PRIMARY KEY, ts ROWVERSION ) - """ - ) + """) db_connection.commit() # Test with nullable=True (default) @@ -11852,8 +11630,7 @@ def test_statistics_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_stats_schema.empty_stats_test") # Create test table with various indexes - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_stats_schema.stats_test ( id INT PRIMARY KEY, name VARCHAR(100) NOT NULL, @@ -11862,32 +11639,25 @@ def test_statistics_setup(cursor, db_connection): salary DECIMAL(10, 2) NULL, hire_date DATE NOT NULL ) - """ - ) + """) # Create a non-unique index - cursor.execute( - """ + cursor.execute(""" CREATE INDEX IX_stats_test_dept_date ON pytest_stats_schema.stats_test (department, hire_date) - """ - ) + """) # Create a unique index on multiple columns - cursor.execute( - """ + cursor.execute(""" CREATE UNIQUE INDEX UX_stats_test_name_dept ON pytest_stats_schema.stats_test (name, department) - """ - ) + """) # Create an empty table for testing - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_stats_schema.empty_stats_test ( id INT PRIMARY KEY, data VARCHAR(100) NULL ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -12152,8 +11922,7 @@ def test_columns_setup(cursor, db_connection): cursor.execute("DROP TABLE IF EXISTS pytest_cols_schema.columns_special_test") # Create test table with various column types - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_cols_schema.columns_test ( id INT PRIMARY KEY, name NVARCHAR(100) NOT NULL, @@ -12165,12 +11934,10 @@ def test_columns_setup(cursor, db_connection): notes TEXT NULL, [computed_col] AS (name + ' - ' + CAST(id AS VARCHAR(10))) ) - """ - ) + """) # Create table with special column names and edge cases - fix the problematic column name - cursor.execute( - """ + cursor.execute(""" CREATE TABLE pytest_cols_schema.columns_special_test ( [ID] INT PRIMARY KEY, [User Name] NVARCHAR(100) NULL, @@ -12182,8 +11949,7 @@ def test_columns_setup(cursor, db_connection): [Column/With/Slashes] VARCHAR(20) NULL, [Column_With_Underscores] VARCHAR(20) NULL -- Changed from problematic nested brackets ) - """ - ) + """) db_connection.commit() except Exception as e: @@ -12647,25 +12413,21 @@ def test_lowercase_attribute(cursor, db_connection): try: # Create a test table with mixed-case column names - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lowercase_test ( ID INT PRIMARY KEY, UserName VARCHAR(50), EMAIL_ADDRESS VARCHAR(100), PhoneNumber VARCHAR(20) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_lowercase_test (ID, UserName, EMAIL_ADDRESS, PhoneNumber) VALUES (1, 'JohnDoe', 'john@example.com', '555-1234') - """ - ) + """) db_connection.commit() # First test with lowercase=False (default) @@ -12720,14 +12482,12 @@ def test_decimal_separator_function(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_separator_test ( id INT PRIMARY KEY, decimal_value DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test values with default separator (.) @@ -12809,8 +12569,7 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_multi_test ( id INT PRIMARY KEY, positive_value DECIMAL(10, 2), @@ -12818,16 +12577,13 @@ def test_decimal_separator_with_multiple_values(cursor, db_connection): zero_value DECIMAL(10, 2), small_value DECIMAL(10, 4) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_multi_test VALUES (1, 123.45, -67.89, 0.00, 0.0001) - """ - ) + """) db_connection.commit() # Test with default separator first @@ -12864,23 +12620,19 @@ def test_decimal_separator_calculations(cursor, db_connection): try: # Create test table - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_calc_test ( id INT PRIMARY KEY, value1 DECIMAL(10, 2), value2 DECIMAL(10, 2) ) - """ - ) + """) db_connection.commit() # Insert test data - cursor.execute( - """ + cursor.execute(""" INSERT INTO #pytest_decimal_calc_test VALUES (1, 10.25, 5.75) - """ - ) + """) db_connection.commit() # Test with default separator @@ -12917,14 +12669,12 @@ def test_executemany_with_uuids(cursor, db_connection): table_name = "#pytest_uuid_batch" try: cursor.execute(f"DROP TABLE IF EXISTS {table_name}") - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( id UNIQUEIDENTIFIER, description NVARCHAR(50) ) - """ - ) + """) db_connection.commit() # Prepare test data: mix of UUIDs and None @@ -13072,13 +12822,11 @@ def test_date_string_parameter_binding(cursor, db_connection): table_name = "#pytest_date_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( a_column VARCHAR(20) ) - """ - ) + """) cursor.execute(f"INSERT INTO {table_name} (a_column) VALUES ('string1'), ('string2')") db_connection.commit() @@ -13105,13 +12853,11 @@ def test_time_string_parameter_binding(cursor, db_connection): table_name = "#pytest_time_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( time_col VARCHAR(22) ) - """ - ) + """) cursor.execute(f"INSERT INTO {table_name} (time_col) VALUES ('prefix_14:30:45_suffix')") db_connection.commit() @@ -13136,13 +12882,11 @@ def test_datetime_string_parameter_binding(cursor, db_connection): table_name = "#pytest_datetime_string" try: drop_table_if_exists(cursor, table_name) - cursor.execute( - f""" + cursor.execute(f""" CREATE TABLE {table_name} ( datetime_col VARCHAR(33) ) - """ - ) + """) cursor.execute( f"INSERT INTO {table_name} (datetime_col) VALUES ('prefix_2025-08-12T14:30:45_suffix')" ) @@ -14006,14 +13750,12 @@ def test_column_metadata_error_handling(cursor): """Test column metadata retrieval error handling (Lines 1156-1167).""" # Execute a complex query that might stress metadata retrieval - cursor.execute( - """ + cursor.execute(""" SELECT CAST(1 as INT) as int_col, CAST('test' as NVARCHAR(100)) as nvarchar_col, CAST(NEWID() as UNIQUEIDENTIFIER) as guid_col - """ - ) + """) # This should exercise the metadata retrieval code paths # If there are any errors, they should be logged but not crash @@ -14129,14 +13871,12 @@ def test_row_uuid_processing_with_braces(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_uuid_braces") # Create table with UNIQUEIDENTIFIER column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_uuid_braces ( id INT IDENTITY(1,1), guid_col UNIQUEIDENTIFIER ) - """ - ) + """) # Insert a GUID with braces (this is how SQL Server often returns them) test_guid = "12345678-1234-5678-9ABC-123456789ABC" @@ -14180,14 +13920,12 @@ def test_row_uuid_processing_sql_guid_type(cursor, db_connection): drop_table_if_exists(cursor, "#pytest_sql_guid_type") # Create table with UNIQUEIDENTIFIER column - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_sql_guid_type ( id INT, guid_col UNIQUEIDENTIFIER ) - """ - ) + """) # Insert test data test_guid = "ABCDEF12-3456-7890-ABCD-1234567890AB" @@ -14233,14 +13971,12 @@ def test_row_output_converter_overflow_error(cursor, db_connection): try: # Create a table with integer column drop_table_if_exists(cursor, "#pytest_overflow_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_overflow_test ( id INT, small_int TINYINT -- TINYINT can only hold 0-255 ) - """ - ) + """) # Insert a valid value first cursor.execute("INSERT INTO #pytest_overflow_test (id, small_int) VALUES (?, ?)", [1, 100]) @@ -14290,14 +14026,12 @@ def test_row_output_converter_general_exception(cursor, db_connection): try: # Create a table with string column drop_table_if_exists(cursor, "#pytest_exception_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_exception_test ( id INT, text_col VARCHAR(50) ) - """ - ) + """) # Insert test data cursor.execute( @@ -14348,14 +14082,12 @@ def test_row_cursor_log_method_availability(cursor, db_connection): try: # Create test data drop_table_if_exists(cursor, "#pytest_log_check") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_log_check ( id INT, value_col INT ) - """ - ) + """) cursor.execute("INSERT INTO #pytest_log_check (id, value_col) VALUES (?, ?)", [1, 42]) db_connection.commit() @@ -14383,8 +14115,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14394,8 +14125,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """ - ) + """) db_connection.commit() # Insert row with all NULLs @@ -14437,16 +14167,14 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create large data that will trigger LOB handling @@ -14479,14 +14207,12 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14513,14 +14239,12 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14547,14 +14271,12 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -14581,16 +14303,14 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert empty (non-NULL) values @@ -14618,14 +14338,12 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """ - ) + """) db_connection.commit() # Insert NULL GUID @@ -14652,14 +14370,12 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """ - ) + """) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -14686,14 +14402,12 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """ - ) + """) db_connection.commit() # Insert various decimal values including edge cases @@ -14814,8 +14528,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): """Test NULL handling for all numeric types to ensure processor functions handle NULLs correctly""" try: drop_table_if_exists(cursor, "#pytest_all_numeric_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_all_numeric_nulls ( int_col INT, bigint_col BIGINT, @@ -14825,8 +14538,7 @@ def test_all_numeric_types_with_nulls(cursor, db_connection): real_col REAL, float_col FLOAT ) - """ - ) + """) db_connection.commit() # Insert row with all NULLs @@ -14868,16 +14580,14 @@ def test_lob_data_types(cursor, db_connection): """Test LOB (Large Object) data types to ensure LOB fallback paths are exercised""" try: drop_table_if_exists(cursor, "#pytest_lob_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_test ( id INT, text_lob VARCHAR(MAX), ntext_lob NVARCHAR(MAX), binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create large data that will trigger LOB handling @@ -14910,14 +14620,12 @@ def test_lob_char_column_types(cursor, db_connection): """Test LOB fetching specifically for CHAR/VARCHAR columns (covers lines 3313-3314)""" try: drop_table_if_exists(cursor, "#pytest_lob_char") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_char ( id INT, char_lob VARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create data large enough to trigger LOB path (>8000 bytes) @@ -14944,14 +14652,12 @@ def test_lob_wchar_column_types(cursor, db_connection): """Test LOB fetching specifically for WCHAR/NVARCHAR columns (covers lines 3358-3359)""" try: drop_table_if_exists(cursor, "#pytest_lob_wchar") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_wchar ( id INT, wchar_lob NVARCHAR(MAX) ) - """ - ) + """) db_connection.commit() # Create unicode data large enough to trigger LOB path (>4000 characters for NVARCHAR) @@ -14978,14 +14684,12 @@ def test_lob_binary_column_types(cursor, db_connection): """Test LOB fetching specifically for BINARY/VARBINARY columns (covers lines 3384-3385)""" try: drop_table_if_exists(cursor, "#pytest_lob_binary") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_lob_binary ( id INT, binary_lob VARBINARY(MAX) ) - """ - ) + """) db_connection.commit() # Create binary data large enough to trigger LOB path (>8000 bytes) @@ -15012,16 +14716,14 @@ def test_zero_length_complex_types(cursor, db_connection): """Test zero-length data for complex types (covers lines 3531-3533)""" try: drop_table_if_exists(cursor, "#pytest_zero_length") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_zero_length ( id INT, empty_varchar VARCHAR(100), empty_nvarchar NVARCHAR(100), empty_binary VARBINARY(100) ) - """ - ) + """) db_connection.commit() # Insert empty (non-NULL) values @@ -15049,14 +14751,12 @@ def test_guid_with_nulls(cursor, db_connection): """Test GUID type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_guid_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_guid_nulls ( id INT, guid_col UNIQUEIDENTIFIER ) - """ - ) + """) db_connection.commit() # Insert NULL GUID @@ -15083,14 +14783,12 @@ def test_datetimeoffset_with_nulls(cursor, db_connection): """Test DATETIMEOFFSET type with NULL values""" try: drop_table_if_exists(cursor, "#pytest_dto_nulls") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_dto_nulls ( id INT, dto_col DATETIMEOFFSET ) - """ - ) + """) db_connection.commit() # Insert NULL DATETIMEOFFSET @@ -15117,14 +14815,12 @@ def test_decimal_conversion_edge_cases(cursor, db_connection): """Test DECIMAL/NUMERIC type conversion including edge cases""" try: drop_table_if_exists(cursor, "#pytest_decimal_edge") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #pytest_decimal_edge ( id INT, dec_col DECIMAL(18, 4) ) - """ - ) + """) db_connection.commit() # Insert various decimal values including edge cases @@ -15245,16 +14941,14 @@ def test_fetchall_with_integrity_constraint(cursor, db_connection): try: # Setup table with unique constraint cursor.execute("DROP TABLE IF EXISTS #uniq_cons_test") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #uniq_cons_test ( id INTEGER NOT NULL IDENTITY, data VARCHAR(50) NULL, PRIMARY KEY (id), UNIQUE (data) ) - """ - ) + """) # Insert initial row - should work cursor.execute( @@ -15549,8 +15243,7 @@ def test_native_uuid_non_uuid_columns_unaffected(db_connection): mssql_python.native_uuid = False drop_table_if_exists(cursor, "#test_uuid_other_cols") - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #test_uuid_other_cols ( id UNIQUEIDENTIFIER, int_col INT, @@ -15558,8 +15251,7 @@ def test_native_uuid_non_uuid_columns_unaffected(db_connection): float_col FLOAT, bit_col BIT ) - """ - ) + """) test_uuid = uuid.uuid4() cursor.execute( "INSERT INTO #test_uuid_other_cols VALUES (?, ?, ?, ?, ?)", @@ -15880,14 +15572,12 @@ def test_executemany_uuid_output_sets_uuid_str_indices(conn_str): conn = mssql_python.connect(conn_str) cursor = conn.cursor() - cursor.execute( - """ + cursor.execute(""" CREATE TABLE #executemany_uuid_output ( id INT IDENTITY(1,1), guid UNIQUEIDENTIFIER DEFAULT NEWID() ) - """ - ) + """) # executemany with OUTPUT — produces a result set with a UUID column cursor.executemany( From 3c079d1b43d7af0180d026a3a148199d01f46a1b Mon Sep 17 00:00:00 2001 From: Jahnvi Thakkar Date: Mon, 2 Mar 2026 16:46:25 +0530 Subject: [PATCH 6/6] Adding concurrancy tests --- tests/test_004_cursor.py | 486 ++++++++++++++++++++++++--------------- 1 file changed, 304 insertions(+), 182 deletions(-) diff --git a/tests/test_004_cursor.py b/tests/test_004_cursor.py index 90e30159a..e178ba33c 100644 --- a/tests/test_004_cursor.py +++ b/tests/test_004_cursor.py @@ -14,9 +14,11 @@ import time as time_module import decimal from contextlib import closing +import threading import mssql_python import uuid import re +from unittest.mock import patch from conftest import is_azure_sql_connection # Setup test table @@ -15040,8 +15042,6 @@ def test_close(db_connection): def test_native_uuid_true_returns_uuid_objects(db_connection): """Test that with native_uuid=True, UNIQUEIDENTIFIER columns return uuid.UUID.""" - import uuid - original_value = mssql_python.native_uuid cursor = db_connection.cursor() @@ -15081,8 +15081,6 @@ def test_native_uuid_true_returns_uuid_objects(db_connection): def test_native_uuid_false_returns_strings(db_connection): """Test that with native_uuid=False, UNIQUEIDENTIFIER columns return str.""" - import uuid - original_value = mssql_python.native_uuid cursor = db_connection.cursor() @@ -15126,45 +15124,6 @@ def test_native_uuid_false_returns_strings(db_connection): db_connection.commit() -def test_native_uuid_false_string_operations(db_connection): - """Test that with native_uuid=False, string operations work on returned UUIDs.""" - import uuid - import json - - original_value = mssql_python.native_uuid - cursor = db_connection.cursor() - - try: - mssql_python.native_uuid = False - - drop_table_if_exists(cursor, "#test_uuid_str_ops") - cursor.execute("CREATE TABLE #test_uuid_str_ops (id UNIQUEIDENTIFIER)") - test_uuid = uuid.uuid4() - cursor.execute("INSERT INTO #test_uuid_str_ops VALUES (?)", [test_uuid]) - - cursor.execute("SELECT id FROM #test_uuid_str_ops") - row = cursor.fetchone() - guid_value = row[0] - - # These operations should all work without errors when native_uuid=False - assert isinstance(guid_value, str) - assert guid_value == str(test_uuid).upper() - assert guid_value.replace("-", "") == str(test_uuid).upper().replace("-", "") - assert guid_value.strip() == str(test_uuid).upper() - - # JSON serialization should work - payload = json.dumps({"id": guid_value}) - assert str(test_uuid).upper() in payload - - # String equality should work (pyodbc-compatible uppercase) - assert guid_value == str(test_uuid).upper() - - finally: - mssql_python.native_uuid = original_value - drop_table_if_exists(cursor, "#test_uuid_str_ops") - db_connection.commit() - - def test_native_uuid_null_handling(db_connection): """Test that NULL UNIQUEIDENTIFIER values remain None regardless of native_uuid setting.""" original_value = mssql_python.native_uuid @@ -15193,49 +15152,8 @@ def test_native_uuid_null_handling(db_connection): db_connection.commit() -def test_native_uuid_multiple_rows(db_connection): - """Test native_uuid=False with multiple UUID rows via fetchall and fetchmany.""" - import uuid - - original_value = mssql_python.native_uuid - cursor = db_connection.cursor() - - try: - mssql_python.native_uuid = False - - drop_table_if_exists(cursor, "#test_uuid_multi") - cursor.execute("CREATE TABLE #test_uuid_multi (id UNIQUEIDENTIFIER, seq INT)") - - test_uuids = [uuid.uuid4() for _ in range(10)] - for i, uid in enumerate(test_uuids): - cursor.execute("INSERT INTO #test_uuid_multi VALUES (?, ?)", [uid, i]) - db_connection.commit() - - # fetchall — all should be strings - cursor.execute("SELECT id, seq FROM #test_uuid_multi ORDER BY seq") - rows = cursor.fetchall() - assert len(rows) == 10 - for i, row in enumerate(rows): - assert isinstance(row[0], str), f"Row {i}: expected str, got {type(row[0])}" - assert row[0] == str(test_uuids[i]).upper() - - # fetchmany — should also return strings - cursor.execute("SELECT id, seq FROM #test_uuid_multi ORDER BY seq") - rows = cursor.fetchmany(5) - assert len(rows) == 5 - for i, row in enumerate(rows): - assert isinstance(row[0], str), f"Row {i}: expected str, got {type(row[0])}" - - finally: - mssql_python.native_uuid = original_value - drop_table_if_exists(cursor, "#test_uuid_multi") - db_connection.commit() - - def test_native_uuid_non_uuid_columns_unaffected(db_connection): """Test that native_uuid=False does not affect non-UUID columns.""" - import uuid - original_value = mssql_python.native_uuid cursor = db_connection.cursor() @@ -15277,8 +15195,6 @@ def test_native_uuid_non_uuid_columns_unaffected(db_connection): def test_native_uuid_setting_snapshot_at_execute(db_connection): """Test that native_uuid is snapshotted at execute() time, not fetch() time.""" - import uuid - original_value = mssql_python.native_uuid cursor = db_connection.cursor() @@ -15307,48 +15223,8 @@ def test_native_uuid_setting_snapshot_at_execute(db_connection): db_connection.commit() -def test_native_uuid_wide_result_set_performance(db_connection): - """Test UUID handling in wide result sets — only UUID columns should be affected.""" - import uuid - - original_value = mssql_python.native_uuid - cursor = db_connection.cursor() - - try: - mssql_python.native_uuid = False - - drop_table_if_exists(cursor, "#test_uuid_wide") - # Create a wide table: 1 UUID column + 30 varchar columns - cols = ", ".join([f"col{i} VARCHAR(50)" for i in range(1, 31)]) - cursor.execute(f"CREATE TABLE #test_uuid_wide (id UNIQUEIDENTIFIER, {cols})") - - test_uuid = uuid.uuid4() - values = [test_uuid] + [f"Value {i}" for i in range(1, 31)] - placeholders = ", ".join(["?"] * 31) - cursor.execute(f"INSERT INTO #test_uuid_wide VALUES ({placeholders})", values) - - cursor.execute("SELECT * FROM #test_uuid_wide") - row = cursor.fetchone() - - # UUID column should be str (uppercase, matching pyodbc / SQL Server format) - assert isinstance(row[0], str), f"UUID col: expected str, got {type(row[0])}" - assert row[0] == str(test_uuid).upper() - - # All other columns should remain str (unaffected) - for i in range(1, 31): - assert isinstance(row[i], str), f"Column {i}: expected str, got {type(row[i])}" - assert row[i] == f"Value {i}" - - finally: - mssql_python.native_uuid = original_value - drop_table_if_exists(cursor, "#test_uuid_wide") - db_connection.commit() - - def test_native_uuid_input_parameter_accepts_uuid_objects(db_connection): """Test that uuid.UUID objects are still accepted as input parameters regardless of native_uuid.""" - import uuid - original_value = mssql_python.native_uuid cursor = db_connection.cursor() @@ -15383,54 +15259,8 @@ def test_native_uuid_input_parameter_accepts_uuid_objects(db_connection): # ────────────────────────────────────────────────────────────────────────────── -def test_per_connection_native_uuid_false(conn_str): - """Test that connect(native_uuid=False) returns UUID columns as strings.""" - import uuid - - conn = mssql_python.connect(conn_str, native_uuid=False) - cursor = conn.cursor() - try: - drop_table_if_exists(cursor, "#test_conn_uuid_false") - cursor.execute( - "CREATE TABLE #test_conn_uuid_false (id UNIQUEIDENTIFIER, name NVARCHAR(50))" - ) - test_uuid = uuid.uuid4() - cursor.execute("INSERT INTO #test_conn_uuid_false VALUES (?, ?)", [test_uuid, "test"]) - - cursor.execute("SELECT id, name FROM #test_conn_uuid_false") - row = cursor.fetchone() - assert isinstance(row[0], str), f"Expected str, got {type(row[0])}" - assert row[0] == str(test_uuid).upper() - finally: - drop_table_if_exists(cursor, "#test_conn_uuid_false") - conn.close() - - -def test_per_connection_native_uuid_true(conn_str): - """Test that connect(native_uuid=True) returns UUID columns as uuid.UUID objects.""" - import uuid - - conn = mssql_python.connect(conn_str, native_uuid=True) - cursor = conn.cursor() - try: - drop_table_if_exists(cursor, "#test_conn_uuid_true") - cursor.execute("CREATE TABLE #test_conn_uuid_true (id UNIQUEIDENTIFIER, name NVARCHAR(50))") - test_uuid = uuid.uuid4() - cursor.execute("INSERT INTO #test_conn_uuid_true VALUES (?, ?)", [test_uuid, "test"]) - - cursor.execute("SELECT id, name FROM #test_conn_uuid_true") - row = cursor.fetchone() - assert isinstance(row[0], uuid.UUID), f"Expected uuid.UUID, got {type(row[0])}" - assert row[0] == test_uuid - finally: - drop_table_if_exists(cursor, "#test_conn_uuid_true") - conn.close() - - def test_per_connection_native_uuid_none_uses_module_default(conn_str): """Test that connect(native_uuid=None) defers to module-level setting.""" - import uuid - original_value = mssql_python.native_uuid conn = mssql_python.connect(conn_str, native_uuid=None) cursor = conn.cursor() @@ -15460,8 +15290,6 @@ def test_per_connection_native_uuid_none_uses_module_default(conn_str): def test_per_connection_overrides_module_level(conn_str): """Test that per-connection native_uuid overrides the module-level setting.""" - import uuid - original_value = mssql_python.native_uuid conn = None conn2 = None @@ -15508,8 +15336,6 @@ def test_per_connection_overrides_module_level(conn_str): def test_two_connections_different_native_uuid(conn_str): """Test that two simultaneous connections can have different native_uuid settings.""" - import uuid - original_value = mssql_python.native_uuid try: conn_str_mode = conn_str @@ -15553,8 +15379,6 @@ def test_two_connections_different_native_uuid(conn_str): def test_per_connection_native_uuid_invalid_type(conn_str): """Test that connect(native_uuid=) raises ValueError.""" - import pytest - with pytest.raises(ValueError, match="native_uuid must be a boolean"): mssql_python.connect(conn_str, native_uuid="false") @@ -15564,8 +15388,6 @@ def test_per_connection_native_uuid_invalid_type(conn_str): def test_executemany_uuid_output_sets_uuid_str_indices(conn_str): """Test that executemany with OUTPUT clause computes _uuid_str_indices.""" - import uuid as _uuid - original = mssql_python.native_uuid try: mssql_python.native_uuid = False @@ -15583,8 +15405,8 @@ def test_executemany_uuid_output_sets_uuid_str_indices(conn_str): cursor.executemany( "INSERT INTO #executemany_uuid_output (guid) OUTPUT INSERTED.guid VALUES (?)", [ - (_uuid.UUID("11111111-1111-1111-1111-111111111111"),), - (_uuid.UUID("22222222-2222-2222-2222-222222222222"),), + (uuid.UUID("11111111-1111-1111-1111-111111111111"),), + (uuid.UUID("22222222-2222-2222-2222-222222222222"),), ], ) @@ -15608,3 +15430,303 @@ def test_executemany_uuid_output_sets_uuid_str_indices(conn_str): conn.close() finally: mssql_python.native_uuid = original + + +def test_executemany_no_result_set_clears_uuid_str_indices(conn_str): + """Test that executemany without OUTPUT clears description and uuid state. + + Covers the ``else`` branch (cursor.description is None) inside executemany(). + """ + original = mssql_python.native_uuid + try: + mssql_python.native_uuid = False + conn = mssql_python.connect(conn_str) + cursor = conn.cursor() + + cursor.execute(""" + CREATE TABLE #executemany_no_output ( + id INT, + guid UNIQUEIDENTIFIER + ) + """) + + # Plain INSERT — no OUTPUT clause, no result set produced + cursor.executemany( + "INSERT INTO #executemany_no_output (id, guid) VALUES (?, ?)", + [ + (1, uuid.UUID("11111111-1111-1111-1111-111111111111")), + (2, uuid.UUID("22222222-2222-2222-2222-222222222222")), + ], + ) + + # description should be None + assert cursor.description is None, "Plain INSERT should have no description" + # _uuid_str_indices should be None + assert cursor._uuid_str_indices is None + # rowcount should reflect the inserted rows + assert cursor.rowcount == 2 + + cursor.execute("DROP TABLE IF EXISTS #executemany_no_output") + cursor.close() + conn.close() + finally: + mssql_python.native_uuid = original + + +def test_executemany_describe_col_exception_sets_description_none(conn_str): + """Test that executemany handles DDBCSQLDescribeCol raising an exception. + + The except handler that sets + self.description = None when the C++ binding raises during column + metadata retrieval. + """ + original = mssql_python.native_uuid + try: + mssql_python.native_uuid = False + conn = mssql_python.connect(conn_str) + cursor = conn.cursor() + + cursor.execute(""" + CREATE TABLE #executemany_except_test ( + id INT, + guid UNIQUEIDENTIFIER + ) + """) + + # Capture the real DDBCSQLDescribeCol so we can patch only during executemany + real_describe = mssql_python.cursor.ddbc_bindings.DDBCSQLDescribeCol + call_count = 0 + + def describe_raises(*args, **kwargs): + """Raise on the first call (inside executemany), delegate otherwise.""" + nonlocal call_count + call_count += 1 + raise RuntimeError("Simulated DDBCSQLDescribeCol failure") + + # executemany with OUTPUT — would normally produce a result set, but we + # force DDBCSQLDescribeCol to raise so the except branch is taken. + with patch.object( + mssql_python.cursor.ddbc_bindings, + "DDBCSQLDescribeCol", + side_effect=describe_raises, + ): + cursor.executemany( + "INSERT INTO #executemany_except_test (id, guid) OUTPUT INSERTED.guid VALUES (?, ?)", + [ + (1, uuid.UUID("11111111-1111-1111-1111-111111111111")), + ], + ) + + # The except branch should have set description to None + assert ( + cursor.description is None + ), "description should be None after DDBCSQLDescribeCol raises" + assert cursor._uuid_str_indices is None + assert call_count >= 1, "DDBCSQLDescribeCol mock should have been called" + + cursor.execute("DROP TABLE IF EXISTS #executemany_except_test") + cursor.close() + conn.close() + finally: + mssql_python.native_uuid = original + + +# ────────────────────────────────────────────────────────────────────────────── +# native_uuid concurrency & thread-safety tests +# ────────────────────────────────────────────────────────────────────────────── + + +def test_native_uuid_concurrent_connections_isolation(conn_str): + """Multiple threads with different per-connection native_uuid execute simultaneously. + + Verifies that per-connection native_uuid settings are fully isolated: + each thread's results match its own connection's setting regardless of + what other threads are doing concurrently. + """ + NUM_THREADS = 6 + ITERATIONS = 5 + errors = [] + barrier = threading.Barrier(NUM_THREADS) + + def worker(thread_id, native_uuid_setting): + """Each thread creates its own connection and verifies return types.""" + try: + conn = mssql_python.connect(conn_str, native_uuid=native_uuid_setting) + cursor = conn.cursor() + table = f"#concurrent_uuid_{thread_id}" + + try: + cursor.execute(f"CREATE TABLE {table} (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + cursor.execute(f"INSERT INTO {table} VALUES (?)", [test_uuid]) + + # Synchronize — all threads start querying at the same time + barrier.wait(timeout=10) + + for _ in range(ITERATIONS): + cursor.execute(f"SELECT id FROM {table}") + row = cursor.fetchone() + if native_uuid_setting: + if not isinstance(row[0], uuid.UUID): + errors.append( + f"Thread {thread_id}: expected uuid.UUID, " f"got {type(row[0])}" + ) + else: + if not isinstance(row[0], str): + errors.append( + f"Thread {thread_id}: expected str, " f"got {type(row[0])}" + ) + + cursor.execute(f"DROP TABLE IF EXISTS {table}") + finally: + conn.close() + except Exception as e: + errors.append(f"Thread {thread_id}: {e}") + + threads = [] + for i in range(NUM_THREADS): + # Alternate True / False across threads + setting = i % 2 == 0 + t = threading.Thread(target=worker, args=(i, setting)) + threads.append(t) + + for t in threads: + t.start() + for t in threads: + t.join(timeout=30) + + assert not errors, f"Concurrent connection isolation errors: {errors}" + + +def test_native_uuid_snapshot_under_concurrent_modification(conn_str): + """Snapshot-at-execute holds even when another thread modifies the module setting. + + Thread A executes with native_uuid=False, then thread B toggles the + module setting to True *before* thread A fetches. Thread A must still + receive strings because the setting was snapshotted at execute() time. + """ + original = mssql_python.native_uuid + errors = [] + executed_event = threading.Event() # A signals after execute() + toggled_event = threading.Event() # B signals after toggling + + try: + mssql_python.native_uuid = False + conn = mssql_python.connect(conn_str) # native_uuid=None → uses module + cursor = conn.cursor() + + cursor.execute("CREATE TABLE #snapshot_conc (id UNIQUEIDENTIFIER)") + test_uuid = uuid.uuid4() + cursor.execute("INSERT INTO #snapshot_conc VALUES (?)", [test_uuid]) + + def thread_a(): + """Execute with False, wait for toggle, then fetch.""" + try: + cursor.execute("SELECT id FROM #snapshot_conc") + executed_event.set() # signal: execute() done + toggled_event.wait(timeout=10) # wait for B to toggle + row = cursor.fetchone() + if not isinstance(row[0], str): + errors.append( + f"Thread A: snapshot broken — expected str, " f"got {type(row[0])}" + ) + except Exception as e: + errors.append(f"Thread A: {e}") + + def thread_b(): + """Wait for A to execute, then toggle module setting.""" + try: + executed_event.wait(timeout=10) + mssql_python.native_uuid = True # toggle after execute + toggled_event.set() + except Exception as e: + errors.append(f"Thread B: {e}") + + ta = threading.Thread(target=thread_a) + tb = threading.Thread(target=thread_b) + ta.start() + tb.start() + ta.join(timeout=15) + tb.join(timeout=15) + + assert not errors, f"Snapshot-under-concurrency errors: {errors}" + + cursor.execute("DROP TABLE IF EXISTS #snapshot_conc") + conn.close() + finally: + mssql_python.native_uuid = original + + +def test_native_uuid_concurrent_toggle_consistency(conn_str): + """One thread rapidly toggles module-level native_uuid while others query. + + Each querying thread must get a *consistent* result for each execute/fetch + cycle — either all uuid.UUID or all str within a single cursor, never a + mix. This validates that the snapshot-at-execute design prevents torn reads. + """ + original = mssql_python.native_uuid + NUM_READERS = 4 + ITERATIONS = 10 + errors = [] + stop_event = threading.Event() + + def toggler(): + """Rapidly flip the module-level native_uuid flag.""" + try: + while not stop_event.is_set(): + mssql_python.native_uuid = True + mssql_python.native_uuid = False + except Exception as e: + errors.append(f"Toggler: {e}") + + def reader(reader_id): + """Open a connection and repeatedly execute + fetch, checking consistency.""" + try: + conn = mssql_python.connect(conn_str) # native_uuid=None → module + cursor = conn.cursor() + table = f"#toggle_reader_{reader_id}" + + try: + cursor.execute(f"CREATE TABLE {table} (id UNIQUEIDENTIFIER)") + uuids = [uuid.uuid4() for _ in range(3)] + for u in uuids: + cursor.execute(f"INSERT INTO {table} VALUES (?)", [u]) + + for _ in range(ITERATIONS): + cursor.execute(f"SELECT id FROM {table}") + rows = cursor.fetchall() + types = {type(r[0]) for r in rows} + + # All rows in a single fetch must be the same type + if len(types) != 1: + errors.append( + f"Reader {reader_id}: mixed types in single " f"fetch: {types}" + ) + + cursor.execute(f"DROP TABLE IF EXISTS {table}") + finally: + conn.close() + except Exception as e: + errors.append(f"Reader {reader_id}: {e}") + + try: + toggle_thread = threading.Thread(target=toggler, daemon=True) + toggle_thread.start() + + reader_threads = [] + for i in range(NUM_READERS): + t = threading.Thread(target=reader, args=(i,)) + reader_threads.append(t) + + for t in reader_threads: + t.start() + for t in reader_threads: + t.join(timeout=30) + + stop_event.set() + toggle_thread.join(timeout=5) + + assert not errors, f"Concurrent toggle consistency errors: {errors}" + finally: + stop_event.set() + mssql_python.native_uuid = original