Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions mssql_python/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,24 @@ def lowercase(self, value: bool) -> None:
with _settings_lock:
_settings.lowercase = value

@property
def native_uuid(self) -> bool:
"""Get the native_uuid setting.
Controls whether UNIQUEIDENTIFIER columns return uuid.UUID objects (True)
or str (False). Default is False (matching pyodbc behavior).
Set to True to return native uuid.UUID objects.
"""
return _settings.native_uuid

@native_uuid.setter
def native_uuid(self, value: bool) -> None:
"""Set the native_uuid setting."""
if not isinstance(value, bool):
raise ValueError("native_uuid must be a boolean value")
with _settings_lock:
_settings.native_uuid = value


# Replace the current module with our custom module class
old_module: types.ModuleType = sys.modules[__name__]
Expand All @@ -357,3 +375,4 @@ def lowercase(self, value: bool) -> None:

# Initialize property values
lowercase: bool = _settings.lowercase
native_uuid: bool = _settings.native_uuid
13 changes: 13 additions & 0 deletions mssql_python/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ def __init__(
autocommit: bool = False,
attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None,
timeout: int = 0,
native_uuid: Optional[bool] = None,
**kwargs: Any,
) -> None:
"""
Expand All @@ -219,6 +220,9 @@ def __init__(
connecting, such as SQL_ATTR_LOGIN_TIMEOUT,
SQL_ATTR_ODBC_CURSORS, and SQL_ATTR_PACKET_SIZE.
timeout (int): Login timeout in seconds. 0 means no timeout.
native_uuid (bool, optional): Controls whether UNIQUEIDENTIFIER columns return
uuid.UUID objects (True) or str (False) for cursors created from this connection.
None (default) defers to the module-level ``mssql_python.native_uuid`` setting.
**kwargs: Additional key/value pairs for the connection string.

Returns:
Expand All @@ -236,7 +240,16 @@ def __init__(
>>> import mssql_python as ms
>>> conn = ms.connect("Server=myserver;Database=mydb",
... attrs_before={ms.SQL_ATTR_LOGIN_TIMEOUT: 30})

>>> # Return native uuid.UUID objects instead of strings
>>> conn = ms.connect("Server=myserver;Database=mydb", native_uuid=True)
"""
# Store per-connection native_uuid override.
# None means "use module-level mssql_python.native_uuid".
if native_uuid is not None and not isinstance(native_uuid, bool):
raise ValueError("native_uuid must be a boolean value or None")
self._native_uuid = native_uuid

self.connection_str = self._construct_connection_string(connection_str, **kwargs)
self._attrs_before = attrs_before or {}

Expand Down
86 changes: 73 additions & 13 deletions mssql_python/cursor.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,10 @@ def __init__(self, connection: "Connection", timeout: int = 0) -> None:

self._cached_column_map = None
self._cached_converter_map = None
self._uuid_str_indices = None # Pre-computed UUID column indices for str conversion
# Cache the effective native_uuid setting for this cursor's connection.
# Resolution order: connection._native_uuid (if not None) → module-level setting.
self._conn_native_uuid = getattr(self.connection, "_native_uuid", None)
self._next_row_index = 0 # internal: index of the next row the driver will return (0-based)
self._has_result_set = False # Track if we have an active result set
self._skip_increment_for_next_fetch = (
Expand Down Expand Up @@ -1009,6 +1013,32 @@ def _build_converter_map(self):

return converter_map

def _compute_uuid_str_indices(self):
"""
Compute the tuple of column indices whose uuid.UUID values should be
stringified (as uppercase), based on the effective native_uuid setting.

Resolution order: connection-level (if set) → module-level (fallback).

Returns:
tuple of int or None: Column indices to stringify, or None when
native_uuid is True — meaning zero per-row overhead.
"""
if not self.description:
return None

effective_native_uuid = (
self._conn_native_uuid
if self._conn_native_uuid is not None
else get_settings().native_uuid
)
if not effective_native_uuid:
indices = tuple(
i for i, desc in enumerate(self.description) if desc and desc[1] is uuid.UUID
)
return indices if indices else None
return None

def _get_column_and_converter_maps(self):
"""
Get column map and converter map for Row construction (thread-safe).
Expand Down Expand Up @@ -1429,20 +1459,13 @@ def execute( # pylint: disable=too-many-locals,too-many-branches,too-many-state
col_desc[0]: i for i, col_desc in enumerate(self.description)
}
self._cached_converter_map = self._build_converter_map()
self._uuid_str_indices = self._compute_uuid_str_indices()
else:
self.rowcount = ddbc_bindings.DDBCSQLRowCount(self.hstmt)
self._clear_rownumber()
self._cached_column_map = None
self._cached_converter_map = None

# After successful execution, initialize description if there are results
column_metadata = []
try:
ddbc_bindings.DDBCSQLDescribeCol(self.hstmt, column_metadata)
self._initialize_description(column_metadata)
except Exception as e:
# If describe fails, it's likely there are no results (e.g., for INSERT)
self.description = None
self._uuid_str_indices = None

self._reset_inputsizes() # Reset input sizes after execution
# Return self for method chaining
Expand Down Expand Up @@ -2273,14 +2296,29 @@ def executemany( # pylint: disable=too-many-locals,too-many-branches,too-many-s
check_error(ddbc_sql_const.SQL_HANDLE_STMT.value, self.hstmt, ret)
self.rowcount = ddbc_bindings.DDBCSQLRowCount(self.hstmt)
self.last_executed_stmt = operation
self._initialize_description()

# Fetch column metadata (e.g. for INSERT … OUTPUT)
column_metadata = []
try:
ddbc_bindings.DDBCSQLDescribeCol(self.hstmt, column_metadata)
self._initialize_description(column_metadata)
except Exception: # pylint: disable=broad-exception-caught
self.description = None

if self.description:
self.rowcount = -1
self._reset_rownumber()
self._cached_column_map = {
col_desc[0]: i for i, col_desc in enumerate(self.description)
}
self._cached_converter_map = self._build_converter_map()
self._uuid_str_indices = self._compute_uuid_str_indices()
else:
self.rowcount = ddbc_bindings.DDBCSQLRowCount(self.hstmt)
self._clear_rownumber()
self._cached_column_map = None
self._cached_converter_map = None
self._uuid_str_indices = None
finally:
# Reset input sizes after execution
self._reset_inputsizes()
Expand Down Expand Up @@ -2328,7 +2366,13 @@ def fetchone(self) -> Union[None, Row]:

# Get column and converter maps
column_map, converter_map = self._get_column_and_converter_maps()
return Row(row_data, column_map, cursor=self, converter_map=converter_map)
return Row(
row_data,
column_map,
cursor=self,
converter_map=converter_map,
uuid_str_indices=self._uuid_str_indices,
)
except Exception as e:
# On error, don't increment rownumber - rethrow the error
raise e
Expand Down Expand Up @@ -2386,8 +2430,15 @@ def fetchmany(self, size: Optional[int] = None) -> List[Row]:
column_map, converter_map = self._get_column_and_converter_maps()

# Convert raw data to Row objects
uuid_idx = self._uuid_str_indices
return [
Row(row_data, column_map, cursor=self, converter_map=converter_map)
Row(
row_data,
column_map,
cursor=self,
converter_map=converter_map,
uuid_str_indices=uuid_idx,
)
for row_data in rows_data
]
except Exception as e:
Expand Down Expand Up @@ -2439,8 +2490,15 @@ def fetchall(self) -> List[Row]:
column_map, converter_map = self._get_column_and_converter_maps()

# Convert raw data to Row objects
uuid_idx = self._uuid_str_indices
return [
Row(row_data, column_map, cursor=self, converter_map=converter_map)
Row(
row_data,
column_map,
cursor=self,
converter_map=converter_map,
uuid_str_indices=uuid_idx,
)
for row_data in rows_data
]
except Exception as e:
Expand All @@ -2466,6 +2524,7 @@ def nextset(self) -> Union[bool, None]:
# Clear cached column and converter maps for the new result set
self._cached_column_map = None
self._cached_converter_map = None
self._uuid_str_indices = None

# Skip to the next result set
ret = ddbc_bindings.DDBCSQLMoreResults(self.hstmt)
Expand All @@ -2491,6 +2550,7 @@ def nextset(self) -> Union[bool, None]:
col_desc[0]: i for i, col_desc in enumerate(self.description)
}
self._cached_converter_map = self._build_converter_map()
self._uuid_str_indices = self._compute_uuid_str_indices()
except Exception as e: # pylint: disable=broad-exception-caught
# If describe fails, there might be no results in this result set
self.description = None
Expand Down
22 changes: 18 additions & 4 deletions mssql_python/db_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ def connect(
autocommit: bool = False,
attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None,
timeout: int = 0,
native_uuid: Optional[bool] = None,
**kwargs: Any,
) -> Connection:
"""
Expand All @@ -22,10 +23,18 @@ def connect(
Args:
connection_str (str): The connection string to connect to.
autocommit (bool): If True, causes a commit to be performed after each SQL statement.
TODO: Add the following parameters to the function signature:
attrs_before (dict, optional): A dictionary of connection attributes to set before
connecting.
timeout (int): The timeout for the connection attempt, in seconds.
readonly (bool): If True, the connection is set to read-only.
attrs_before (dict): A dictionary of connection attributes to set before connecting.
native_uuid (bool, optional): Controls whether UNIQUEIDENTIFIER columns return
uuid.UUID objects (True) or str (False) for this connection.
- True: UNIQUEIDENTIFIER columns return uuid.UUID objects.
- False: UNIQUEIDENTIFIER columns return str (pyodbc-compatible).
- None (default): Uses the module-level ``mssql_python.native_uuid`` setting (False).

This per-connection override is useful for incremental adoption of native UUIDs:
connections that are ready can pass native_uuid=True, while the default (False)
preserves pyodbc-compatible string behavior.
Keyword Args:
**kwargs: Additional key/value pairs for the connection string.
Below attributes are not implemented in the internal driver:
Expand All @@ -44,6 +53,11 @@ def connect(
transactions, and closing the connection.
"""
conn = Connection(
connection_str, autocommit=autocommit, attrs_before=attrs_before, timeout=timeout, **kwargs
connection_str,
autocommit=autocommit,
attrs_before=attrs_before,
timeout=timeout,
native_uuid=native_uuid,
**kwargs,
)
return conn
6 changes: 5 additions & 1 deletion mssql_python/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,13 +360,17 @@ class Settings:
Settings class for mssql_python package configuration.

This class holds global settings that affect the behavior of the package,
including lowercase column names, decimal separator.
including lowercase column names, decimal separator, and UUID handling.
"""

def __init__(self) -> None:
self.lowercase: bool = False
# Use the pre-determined separator - no locale access here
self.decimal_separator: str = _default_decimal_separator
# Controls whether UNIQUEIDENTIFIER columns return uuid.UUID (True)
# or str (False). Default False matches pyodbc behavior for seamless migration.
# Set to True to return native uuid.UUID objects.
self.native_uuid: bool = False
Comment on lines +371 to +373
Copy link

Copilot AI Mar 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

native_uuid is initialized to False, which changes the default UNIQUEIDENTIFIER result type from uuid.UUID to str for existing users (previously UUIDs were always returned as uuid.UUID based on the type mapping). If this is intentional, it should be treated as a breaking change (documented prominently and aligned with the package’s versioning/release notes); otherwise, consider defaulting to True and letting users opt into pyodbc-style strings via native_uuid=False.

Suggested change
# or str (False). Default False matches pyodbc behavior for seamless migration.
# Set to True to return native uuid.UUID objects.
self.native_uuid: bool = False
# or str (False). Default True preserves the historical behavior of returning
# native uuid.UUID objects. Set to False to opt into pyodbc-style string results.
self.native_uuid: bool = True

Copilot uses AI. Check for mistakes.


# Global settings instance
Expand Down
20 changes: 6 additions & 14 deletions mssql_python/mssql_python.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -129,21 +129,11 @@ class Row:

def __init__(
self,
cursor: "Cursor",
description: List[
Tuple[
str,
Any,
Optional[int],
Optional[int],
Optional[int],
Optional[int],
Optional[bool],
]
],
values: List[Any],
column_map: Optional[Dict[str, int]] = None,
settings_snapshot: Optional[Dict[str, Any]] = None,
column_map: Dict[str, int],
cursor: Optional["Cursor"] = None,
converter_map: Optional[List[Any]] = None,
uuid_str_indices: Optional[Tuple[int, ...]] = None,
) -> None: ...
def __getitem__(self, index: int) -> Any: ...
def __getattr__(self, name: str) -> Any: ...
Expand Down Expand Up @@ -247,6 +237,7 @@ class Connection:
autocommit: bool = False,
attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None,
timeout: int = 0,
native_uuid: Optional[bool] = None,
**kwargs: Any,
) -> None: ...

Expand Down Expand Up @@ -289,6 +280,7 @@ def connect(
autocommit: bool = False,
attrs_before: Optional[Dict[int, Union[int, str, bytes]]] = None,
timeout: int = 0,
native_uuid: Optional[bool] = None,
**kwargs: Any,
) -> Connection: ...

Expand Down
34 changes: 33 additions & 1 deletion mssql_python/row.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
"""

import decimal
import uuid as _uuid
from typing import Any
from mssql_python.helpers import get_settings
from mssql_python.logging import logger
Expand All @@ -26,14 +27,19 @@ class Row:
print(row.column_name) # Access by column name (case sensitivity varies)
"""

def __init__(self, values, column_map, cursor=None, converter_map=None):
def __init__(self, values, column_map, cursor=None, converter_map=None, uuid_str_indices=None):
"""
Initialize a Row object with values and pre-built column map.
Args:
values: List of values for this row
column_map: Pre-built column name to index mapping (shared across rows)
cursor: Optional cursor reference (for backward compatibility and lowercase access)
converter_map: Pre-computed converter map (shared across rows for performance)
uuid_str_indices: Tuple of column indices whose uuid.UUID values should be
converted to uppercase str. Pre-computed once per result set when
native_uuid=False (the default). The uppercase format matches pyodbc
and SQL Server's native text representation.
None means no conversion (native_uuid=True).
"""
# Apply output converters if available using pre-computed converter map
if converter_map:
Expand All @@ -48,9 +54,35 @@ def __init__(self, values, column_map, cursor=None, converter_map=None):
else:
self._values = values

# Convert UUID columns to str when native_uuid=False (the default).
# uuid_str_indices is pre-computed once at execute() time, so this is
# O(num_uuid_columns) per row — zero cost when native_uuid=True.
if uuid_str_indices:
self._stringify_uuids(uuid_str_indices)

self._column_map = column_map
self._cursor = cursor

def _stringify_uuids(self, indices):
"""
Convert uuid.UUID values at the given column indices to uppercase str in-place.

This is only called when native_uuid=False. The uppercase format matches
the behavior of pyodbc and SQL Server's native UNIQUEIDENTIFIER text
representation, ensuring seamless migration. It operates directly on
self._values to avoid creating an extra list copy.
"""
vals = self._values
# If values are still the original list (no converters), we need a mutable copy
if not isinstance(vals, list):
vals = list(vals)
self._values = vals

for i in indices:
v = vals[i]
if v is not None and isinstance(v, _uuid.UUID):
vals[i] = str(v).upper()

def _apply_output_converters(self, values, cursor):
"""
Apply output converters to raw values.
Expand Down
Loading
Loading