Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "hatchling.build"

[project]
name = "slopometry"
version = "2026.3.4"
version = "2026.3.11"
description = "Opinionated code quality metrics for code agents and humans"
readme = "README.md"
requires-python = ">=3.13"
Expand Down
96 changes: 20 additions & 76 deletions src/slopometry/core/complexity_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import time
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from typing import Any

from slopometry.core.code_analyzer import CodeAnalyzer, _analyze_single_file
from slopometry.core.models.complexity import (
Expand All @@ -13,6 +14,7 @@
ExtendedComplexityMetrics,
FileAnalysisResult,
)
from slopometry.core.models.smell import SMELL_REGISTRY
from slopometry.core.python_feature_analyzer import PythonFeatureAnalyzer, _count_loc
from slopometry.core.settings import settings

Expand Down Expand Up @@ -214,33 +216,14 @@ def _calculate_delta(
current_metrics.str_type_percentage - baseline_metrics.str_type_percentage
)

delta.orphan_comment_change = current_metrics.orphan_comment_count - baseline_metrics.orphan_comment_count
delta.untracked_todo_change = current_metrics.untracked_todo_count - baseline_metrics.untracked_todo_count
delta.inline_import_change = current_metrics.inline_import_count - baseline_metrics.inline_import_count
delta.dict_get_with_default_change = (
current_metrics.dict_get_with_default_count - baseline_metrics.dict_get_with_default_count
)
delta.hasattr_getattr_change = (
current_metrics.hasattr_getattr_count - baseline_metrics.hasattr_getattr_count
)
delta.nonempty_init_change = current_metrics.nonempty_init_count - baseline_metrics.nonempty_init_count
delta.test_skip_change = current_metrics.test_skip_count - baseline_metrics.test_skip_count
delta.swallowed_exception_change = (
current_metrics.swallowed_exception_count - baseline_metrics.swallowed_exception_count
)
delta.type_ignore_change = current_metrics.type_ignore_count - baseline_metrics.type_ignore_count
delta.dynamic_execution_change = (
current_metrics.dynamic_execution_count - baseline_metrics.dynamic_execution_count
)
delta.single_method_class_change = (
current_metrics.single_method_class_count - baseline_metrics.single_method_class_count
)
delta.deep_inheritance_change = (
current_metrics.deep_inheritance_count - baseline_metrics.deep_inheritance_count
)
delta.passthrough_wrapper_change = (
current_metrics.passthrough_wrapper_count - baseline_metrics.passthrough_wrapper_count
)
for name in SMELL_REGISTRY:
count_field = f"{name}_count"
change_field = f"{name}_change"
setattr(
delta,
change_field,
getattr(current_metrics, count_field) - getattr(baseline_metrics, count_field),
)

return delta

Expand All @@ -261,7 +244,8 @@ def _build_files_by_loc(self, python_files: list[Path], target_dir: Path) -> dic
_, code_loc = _count_loc(content)
relative_path = self._get_relative_path(file_path, target_dir)
files_by_loc[relative_path] = code_loc
except (OSError, UnicodeDecodeError):
except (OSError, UnicodeDecodeError) as e:
logger.warning(f"Skipping unreadable file {file_path}: {e}")
continue
return files_by_loc

Expand Down Expand Up @@ -433,6 +417,13 @@ def analyze_extended_complexity(self, directory: Path | None = None) -> Extended
any_type_percentage = (feature_stats.any_type_count / total_type_refs * 100.0) if total_type_refs > 0 else 0.0
str_type_percentage = (feature_stats.str_type_count / total_type_refs * 100.0) if total_type_refs > 0 else 0.0

smell_kwargs: dict[str, Any] = {}
for defn in SMELL_REGISTRY.values():
smell_kwargs[defn.count_field] = getattr(feature_stats, defn.count_field)
smell_kwargs[defn.files_field] = sorted(
[self._get_relative_path(p, target_dir) for p in getattr(feature_stats, defn.files_field)]
)

return ExtendedComplexityMetrics(
total_complexity=total_complexity,
average_complexity=average_complexity,
Expand Down Expand Up @@ -461,58 +452,11 @@ def analyze_extended_complexity(self, directory: Path | None = None) -> Extended
files_by_complexity=files_by_complexity,
files_by_effort=files_by_effort,
files_with_parse_errors=files_with_parse_errors,
orphan_comment_count=feature_stats.orphan_comment_count,
untracked_todo_count=feature_stats.untracked_todo_count,
inline_import_count=feature_stats.inline_import_count,
dict_get_with_default_count=feature_stats.dict_get_with_default_count,
hasattr_getattr_count=feature_stats.hasattr_getattr_count,
nonempty_init_count=feature_stats.nonempty_init_count,
test_skip_count=feature_stats.test_skip_count,
swallowed_exception_count=feature_stats.swallowed_exception_count,
type_ignore_count=feature_stats.type_ignore_count,
dynamic_execution_count=feature_stats.dynamic_execution_count,
orphan_comment_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.orphan_comment_files]
),
untracked_todo_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.untracked_todo_files]
),
inline_import_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.inline_import_files]
),
dict_get_with_default_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.dict_get_with_default_files]
),
hasattr_getattr_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.hasattr_getattr_files]
),
nonempty_init_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.nonempty_init_files]
),
test_skip_files=sorted([self._get_relative_path(p, target_dir) for p in feature_stats.test_skip_files]),
swallowed_exception_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.swallowed_exception_files]
),
type_ignore_files=sorted([self._get_relative_path(p, target_dir) for p in feature_stats.type_ignore_files]),
dynamic_execution_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.dynamic_execution_files]
),
single_method_class_count=feature_stats.single_method_class_count,
deep_inheritance_count=feature_stats.deep_inheritance_count,
passthrough_wrapper_count=feature_stats.passthrough_wrapper_count,
single_method_class_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.single_method_class_files]
),
deep_inheritance_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.deep_inheritance_files]
),
passthrough_wrapper_files=sorted(
[self._get_relative_path(p, target_dir) for p in feature_stats.passthrough_wrapper_files]
),
total_loc=feature_stats.total_loc,
code_loc=feature_stats.code_loc,
files_by_loc={
self._get_relative_path(p, target_dir): loc
for p, loc in self._build_files_by_loc(python_files, target_dir).items()
},
**smell_kwargs,
)
22 changes: 15 additions & 7 deletions src/slopometry/core/hook_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,15 +367,25 @@ def handle_stop_event(session_id: str, parsed_input: "StopInput | SubagentStopIn
logger.debug(f"Failed to get modified source files: {e}")
edited_files = set()

# Smell feedback is stable (based on code state, not session activity)
# Smell feedback: split into code-based (stable) and context-derived (unstable)
# Context-derived smells (e.g., unread_related_tests) change with every transcript
# read and must NOT be included in the cache hash to avoid repeated triggers
if current_metrics:
scoped_smells = scope_smells_for_session(
current_metrics, delta, edited_files, stats.working_directory, stats.context_coverage
)
smell_feedback, has_smells, _ = format_code_smell_feedback(scoped_smells, session_id, stats.working_directory)
if has_smells:
feedback_parts.append(smell_feedback)
cache_stable_parts.append(smell_feedback)

code_smells = [s for s in scoped_smells if s.name != "unread_related_tests"]
context_smells = [s for s in scoped_smells if s.name == "unread_related_tests"]

code_feedback, has_code_smells, _ = format_code_smell_feedback(code_smells, session_id)
if has_code_smells:
feedback_parts.append(code_feedback)
cache_stable_parts.append(code_feedback)

context_smell_feedback, has_context_smells, _ = format_code_smell_feedback(context_smells, session_id)
if has_context_smells:
feedback_parts.append(context_smell_feedback)

# Context coverage - informational but NOT stable (changes with every Read/Glob/Grep)
# Excluded from cache hash to avoid invalidation on tool calls
Expand Down Expand Up @@ -656,14 +666,12 @@ def scope_smells_for_session(
def format_code_smell_feedback(
scoped_smells: list[ScopedSmell],
session_id: str | None = None,
working_directory: str | None = None,
) -> tuple[str, bool, bool]:
"""Format pre-classified smell data into feedback output.

Args:
scoped_smells: Pre-classified smells from scope_smells_for_session
session_id: Session ID for generating the smell-details command
working_directory: Path to working directory (unused, kept for caller compatibility)

Returns:
Tuple of (formatted feedback string, has_smells, has_blocking_smells)
Expand Down
18 changes: 18 additions & 0 deletions src/slopometry/core/language_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,24 @@ def get_combined_ignore_dirs(languages: list[ProjectLanguage] | None = None) ->
return ignore_dirs


def is_source_file(file_path: Path | str, languages: list[ProjectLanguage] | None = None) -> bool:
"""Check if a file path matches a supported source extension.

Args:
file_path: Path to check
languages: List of languages to match against, or None for all supported

Returns:
True if the file has a recognized source extension
"""
if languages is None:
configs = get_all_supported_configs()
else:
configs = [get_language_config(lang) for lang in languages]

return any(config.matches_extension(file_path) for config in configs)


def should_ignore_path(file_path: Path | str, languages: list[ProjectLanguage] | None = None) -> bool:
"""Check if a file path should be ignored based on language configs.

Expand Down
103 changes: 16 additions & 87 deletions src/slopometry/core/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ class SmellCounts(BaseModel):
deep_inheritance: int = 0
passthrough_wrapper: int = 0
sys_path_manipulation: int = 0
relative_import: int = 0


class ComplexityMetrics(BaseModel):
Expand Down Expand Up @@ -131,25 +132,13 @@ class ComplexityDelta(BaseModel):
deep_inheritance_change: int = 0
passthrough_wrapper_change: int = 0
sys_path_manipulation_change: int = 0
relative_import_change: int = 0

def get_smell_changes(self) -> dict[str, int]:
"""Return smell name to change value mapping for direct access."""
return {
"orphan_comment": self.orphan_comment_change,
"untracked_todo": self.untracked_todo_change,
"inline_import": self.inline_import_change,
"dict_get_with_default": self.dict_get_with_default_change,
"hasattr_getattr": self.hasattr_getattr_change,
"nonempty_init": self.nonempty_init_change,
"test_skip": self.test_skip_change,
"swallowed_exception": self.swallowed_exception_change,
"type_ignore": self.type_ignore_change,
"dynamic_execution": self.dynamic_execution_change,
"single_method_class": self.single_method_class_change,
"deep_inheritance": self.deep_inheritance_change,
"passthrough_wrapper": self.passthrough_wrapper_change,
"sys_path_manipulation": self.sys_path_manipulation_change,
}
from slopometry.core.models.smell import SMELL_REGISTRY

return {name: getattr(self, f"{name}_change") for name in SMELL_REGISTRY}


class ExtendedComplexityMetrics(ComplexityMetrics):
Expand Down Expand Up @@ -246,6 +235,10 @@ class ExtendedComplexityMetrics(ComplexityMetrics):
default=0,
description="sys.path mutations bypass the package system — restructure package boundaries and use absolute imports from installed packages instead",
)
relative_import_count: int = Field(
default=0,
description="Prefer absolute imports for clarity and refactor-safety; relative imports create implicit coupling to package structure",
)

# LOC metrics (for file filtering in QPE)
total_loc: int = Field(default=0, description="Total lines of code across all files")
Expand All @@ -270,87 +263,23 @@ class ExtendedComplexityMetrics(ComplexityMetrics):
)
passthrough_wrapper_files: list[str] = Field(default_factory=list, description="Files with pass-through wrappers")
sys_path_manipulation_files: list[str] = Field(default_factory=list, description="Files with sys.path mutations")
relative_import_files: list[str] = Field(default_factory=list, description="Files with relative imports")

def get_smell_counts(self) -> SmellCounts:
"""Return smell counts as a typed model for QPE and display."""
return SmellCounts(**{smell.name: smell.count for smell in self.get_smells()})

def get_smells(self) -> list["SmellData"]:
"""Return all smell data as structured objects with direct field access."""
# Import here to avoid circular imports at runtime
from slopometry.core.models.smell import SmellData
from slopometry.core.models.smell import SMELL_REGISTRY, SmellData

return [
SmellData(
name="orphan_comment",
count=self.orphan_comment_count,
files=self.orphan_comment_files,
),
SmellData(
name="untracked_todo",
count=self.untracked_todo_count,
files=self.untracked_todo_files,
),
SmellData(
name="swallowed_exception",
count=self.swallowed_exception_count,
files=self.swallowed_exception_files,
),
SmellData(
name="test_skip",
count=self.test_skip_count,
files=self.test_skip_files,
),
SmellData(
name="type_ignore",
count=self.type_ignore_count,
files=self.type_ignore_files,
),
SmellData(
name="dynamic_execution",
count=self.dynamic_execution_count,
files=self.dynamic_execution_files,
),
SmellData(
name="inline_import",
count=self.inline_import_count,
files=self.inline_import_files,
),
SmellData(
name="dict_get_with_default",
count=self.dict_get_with_default_count,
files=self.dict_get_with_default_files,
),
SmellData(
name="hasattr_getattr",
count=self.hasattr_getattr_count,
files=self.hasattr_getattr_files,
),
SmellData(
name="nonempty_init",
count=self.nonempty_init_count,
files=self.nonempty_init_files,
),
SmellData(
name="single_method_class",
count=self.single_method_class_count,
files=self.single_method_class_files,
),
SmellData(
name="deep_inheritance",
count=self.deep_inheritance_count,
files=self.deep_inheritance_files,
),
SmellData(
name="passthrough_wrapper",
count=self.passthrough_wrapper_count,
files=self.passthrough_wrapper_files,
),
SmellData(
name="sys_path_manipulation",
count=self.sys_path_manipulation_count,
files=self.sys_path_manipulation_files,
),
name=defn.internal_name,
count=getattr(self, defn.count_field),
files=getattr(self, defn.files_field),
)
for defn in SMELL_REGISTRY.values()
]

def get_smell_files(self) -> dict[str, list[str]]:
Expand Down
9 changes: 9 additions & 0 deletions src/slopometry/core/models/smell.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,15 @@ class SmellDefinition(BaseModel):
count_field="sys_path_manipulation_count",
files_field="sys_path_manipulation_files",
),
"relative_import": SmellDefinition(
internal_name="relative_import",
label="Relative Imports",
category=SmellCategory.PYTHON,
weight=0.03,
guidance="Prefer absolute imports for clarity and refactor-safety; relative imports create implicit coupling to package structure",
count_field="relative_import_count",
files_field="relative_import_files",
),
}


Expand Down
Loading