diff --git a/.commitlintrc.json b/.commitlintrc.json
index eca3a08..18fb2ea 100644
--- a/.commitlintrc.json
+++ b/.commitlintrc.json
@@ -6,7 +6,7 @@
"always",
[
"build",
- "chore",
+ "chore",
"ci",
"docs",
"feat",
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..ebf50e1
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,29 @@
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ exclude: ^mkdocs\.yml$
+ - id: check-added-large-files
+ - id: check-toml
+
+ - repo: https://github.com/psf/black
+ rev: 23.12.1
+ hooks:
+ - id: black
+ language_version: python3.12
+
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.9
+ hooks:
+ - id: ruff
+ args: [--fix, --exit-non-zero-on-fix]
+
+ - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook
+ rev: v9.11.0
+ hooks:
+ - id: commitlint
+ stages: [commit-msg]
+ additional_dependencies: ['@commitlint/cli@^18.6.1', '@commitlint/config-conventional@^18.6.2']
diff --git a/README.md b/README.md
index ddf32c8..4c7b233 100644
--- a/README.md
+++ b/README.md
@@ -75,4 +75,4 @@ For detailed guidelines, see [CONTRIBUTING.md](./CONTRIBUTING.md).
## License
-AGPL v3: see [file](./LICENSE)
\ No newline at end of file
+AGPL v3: see [file](./LICENSE)
diff --git a/docs/articles/devs/architecture/ChainOfResponsibilityPattern.md b/docs/articles/devs/architecture/ChainOfResponsibilityPattern.md
index 7c35760..a95707f 100644
--- a/docs/articles/devs/architecture/ChainOfResponsibilityPattern.md
+++ b/docs/articles/devs/architecture/ChainOfResponsibilityPattern.md
@@ -89,17 +89,17 @@ All component types follow the same abstract base class pattern (replace `Compon
class ComponentBase(ABC):
def __init__(self, next_component: Optional['ComponentBase'] = None):
self.next_component = next_component
-
+
@abstractmethod
def can_handle(self, version_or_data) -> bool:
"""Determine if this component can handle the given version/data."""
pass
-
+
def set_next(self, component: 'ComponentBase') -> 'ComponentBase':
"""Set the next component in the chain."""
self.next_component = component
return component
-
+
# Delegation methods follow the same pattern:
def operation(self, data):
"""Perform operation or delegate to next component."""
@@ -117,30 +117,30 @@ All component types use identical factory patterns for chain construction (repla
class ComponentFactory:
_component_registry: Dict[str, Type[ComponentBase]] = {}
_version_order: List[str] = [] # Newest to oldest
-
+
@classmethod
def create_component_chain(cls, target_version: Optional[str] = None) -> ComponentBase:
"""Create component chain from target version to oldest."""
-
+
# Determine target version (latest if not specified)
if target_version is None:
target_version = cls._version_order[0]
-
+
# Create chain starting from target version down to oldest
target_index = cls._version_order.index(target_version)
chain_versions = cls._version_order[target_index:]
-
+
# Create components in order (newest to oldest)
components = []
for version in chain_versions:
component_class = cls._component_registry[version]
component = component_class()
components.append(component)
-
+
# Link components (each points to the next older one)
for i in range(len(components) - 1):
components[i].set_next(components[i + 1])
-
+
return components[0] # Return head of chain
```
@@ -155,7 +155,7 @@ Each component type implements delegation for unchanged concerns:
```python
class V121PackageAccessor(HatchPkgAccessorBase):
"""v1.2.1 accessor - handles dual entry points, delegates dependencies."""
-
+
def get_entry_points(self, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Handle dual entry point access for v1.2.1."""
# v1.2.1-specific logic for dual entry points
@@ -163,7 +163,7 @@ class V121PackageAccessor(HatchPkgAccessorBase):
'mcp_server': metadata.get('mcp_server', {}),
'hatch_mcp_server': metadata.get('hatch_mcp_server', {})
}
-
+
def get_dependencies(self, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Delegate dependency access to v1.2.0."""
if self.next_accessor:
@@ -172,12 +172,12 @@ class V121PackageAccessor(HatchPkgAccessorBase):
class V120PackageAccessor(HatchPkgAccessorBase):
"""v1.2.0 accessor - handles unified dependencies, delegates basic fields."""
-
+
def get_dependencies(self, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Handle unified dependency structure for v1.2.0."""
# v1.2.0-specific logic for unified dependencies
return metadata.get('dependencies', {})
-
+
def get_name(self, metadata: Dict[str, Any]) -> str:
"""Delegate basic field access to v1.1.0."""
if self.next_accessor:
@@ -223,11 +223,11 @@ class ValidatorFactory:
# Import and register v1.2.1 validator (newest first)
from hatch_validator.package.v1_2_1.validator import Validator as V121Validator
cls.register_validator('1.2.1', V121Validator)
-
+
# Import and register v1.2.0 validator
from hatch_validator.package.v1_2_0.validator import Validator as V120Validator
cls.register_validator('1.2.0', V120Validator)
-
+
# Import and register v1.1.0 validator
from hatch_validator.package.v1_1_0.validator import Validator as V110Validator
cls.register_validator('1.1.0', V110Validator)
@@ -362,14 +362,14 @@ To add support for a new schema version (e.g., v1.3.0):
```python
class V130PackageAccessor(HatchPkgAccessorBase):
"""v1.3.0 accessor - handles new features, delegates unchanged concerns."""
-
+
def can_handle(self, schema_version: str) -> bool:
return schema_version == "1.3.0"
-
+
def get_new_feature(self, metadata: Dict[str, Any]) -> Any:
"""Handle v1.3.0-specific new feature."""
return metadata.get('new_feature', {})
-
+
# All others are automatically delegated from HatchPkgAccessorBase implementation
```
@@ -423,11 +423,11 @@ def operation(self, data):
return self._handle_operation(data)
except Exception as e:
logger.warning(f"Component {self.__class__.__name__} failed: {e}")
-
+
# Delegate to next component
if self.next_component:
return self.next_component.operation(data)
-
+
# No component could handle the operation
raise NotImplementedError(f"No component can handle operation for {data}")
```
diff --git a/docs/articles/devs/architecture/ComponentTypes.md b/docs/articles/devs/architecture/ComponentTypes.md
index 8faa0d1..55d1411 100644
--- a/docs/articles/devs/architecture/ComponentTypes.md
+++ b/docs/articles/devs/architecture/ComponentTypes.md
@@ -86,12 +86,12 @@ Package Accessors provide unified access to package metadata across schema versi
class HatchPkgAccessor(ABC):
def __init__(self, next_accessor: Optional['HatchPkgAccessor'] = None):
self.next_accessor = next_accessor
-
+
@abstractmethod
def can_handle(self, schema_version: str) -> bool:
"""Determine if this accessor can handle the schema version."""
pass
-
+
def get_dependencies(self, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Get dependencies or delegate to next accessor."""
if self.next_accessor:
@@ -147,12 +147,12 @@ Registry Accessors enable consistent registry data access regardless of registry
class RegistryAccessorBase(ABC):
def __init__(self, successor: Optional['RegistryAccessorBase'] = None):
self._successor = successor
-
+
@abstractmethod
def can_handle(self, registry_data: Dict[str, Any]) -> bool:
"""Check if this accessor can handle the registry data."""
pass
-
+
def handle_request(self, registry_data: Dict[str, Any]) -> Optional['RegistryAccessorBase']:
"""Handle request using chain of responsibility pattern."""
if self.can_handle(registry_data):
@@ -189,7 +189,7 @@ class PackageService:
# Create package accessor chain
schema_version = metadata.get("package_schema_version")
self._accessor = HatchPkgAccessorFactory.create_accessor_chain(schema_version)
-
+
def get_dependencies(self) -> Dict[str, Any]:
"""Use accessor chain for version-agnostic dependency access."""
return self._accessor.get_dependencies(self._metadata)
@@ -204,7 +204,7 @@ class RegistryService:
if registry_data:
# Create registry accessor chain
self._accessor = RegistryAccessorFactory.create_accessor_for_data(registry_data)
-
+
def package_exists(self, package_name: str) -> bool:
"""Use accessor chain for version-agnostic registry operations."""
return self._accessor.package_exists(self._registry_data, package_name)
@@ -216,20 +216,20 @@ class RegistryService:
class HatchPackageValidator:
def validate_package(self, package_path: Path) -> Tuple[bool, Dict[str, Any]]:
"""Use validator chain for version-agnostic validation."""
-
+
# Load metadata and detect schema version
with open(package_path / "hatch_metadata.json", 'r') as f:
metadata = json.load(f)
-
+
schema_version = metadata.get("package_schema_version")
-
+
# Create validator chain
validator = ValidatorFactory.create_validator_chain(schema_version)
-
+
# Execute validation through chain
context = ValidationContext(registry_data=self.registry_data)
is_valid, errors = validator.validate(metadata, context)
-
+
return is_valid, self._format_results(is_valid, errors, metadata)
```
@@ -246,16 +246,16 @@ class DependencyValidation:
# Use package accessor for version-agnostic dependency access
package_service = PackageService(metadata)
dependencies = package_service.get_dependencies()
-
+
# Use registry accessor for dependency existence validation
registry_service = RegistryService(context.registry_data)
-
+
errors = []
for dep_type, deps in dependencies.items():
for dep in deps:
if not registry_service.package_exists(dep['name']):
errors.append(f"Dependency {dep['name']} not found in registry")
-
+
return len(errors) == 0, errors
```
@@ -271,18 +271,18 @@ Factory classes coordinate component creation:
# Coordinated factory usage
def create_validation_system(metadata: Dict[str, Any], registry_data: Dict[str, Any]):
"""Create coordinated validation system with all component types."""
-
+
schema_version = metadata.get("package_schema_version")
-
+
# Create coordinated chains
validator = ValidatorFactory.create_validator_chain(schema_version)
package_accessor = HatchPkgAccessorFactory.create_accessor_chain(schema_version)
registry_accessor = RegistryAccessorFactory.create_accessor_for_data(registry_data)
-
+
# Create coordinated services
package_service = PackageService(metadata)
registry_service = RegistryService(registry_data)
-
+
return {
'validator': validator,
'package_service': package_service,
diff --git a/docs/articles/devs/architecture/SchemaIntegration.md b/docs/articles/devs/architecture/SchemaIntegration.md
index bb7d8b7..2487f63 100644
--- a/docs/articles/devs/architecture/SchemaIntegration.md
+++ b/docs/articles/devs/architecture/SchemaIntegration.md
@@ -145,11 +145,11 @@ The system automatically detects schema versions from metadata:
def detect_schema_version(metadata: Dict[str, Any]) -> str:
"""Detect schema version from package metadata."""
schema_version = metadata.get("package_schema_version")
-
+
if not schema_version:
# Fallback to default version for legacy packages
return "1.1.0"
-
+
# Normalize version format (remove 'v' prefix if present)
return schema_version.lstrip('v')
```
@@ -163,23 +163,23 @@ class ValidatorFactory:
@classmethod
def create_validator_chain(cls, target_version: Optional[str] = None) -> Validator:
"""Create validator chain based on detected schema version."""
-
+
if target_version is None:
# Use latest available version
target_version = cls._version_order[0]
-
+
# Normalize version format
target_version = target_version.lstrip('v')
-
+
# Create chain from target version to oldest
target_index = cls._version_order.index(target_version)
chain_versions = cls._version_order[target_index:]
-
+
# Build and link chain
validators = [cls._validator_registry[v]() for v in chain_versions]
for i in range(len(validators) - 1):
validators[i].set_next(validators[i + 1])
-
+
return validators[0]
```
@@ -213,14 +213,14 @@ Validators use schema information for validation:
class SchemaValidation:
def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
"""Validate metadata against appropriate schema."""
-
+
schema_version = metadata.get("package_schema_version", "1.1.0")
-
+
# Get appropriate schema from cache/repository
schema = get_package_schema(schema_version)
if not schema:
return False, [f"Schema not available for version {schema_version}"]
-
+
# Validate metadata against schema
try:
jsonschema.validate(metadata, schema)
@@ -237,16 +237,16 @@ Package accessors adapt to schema structure changes:
class V120PackageAccessor(HatchPkgAccessorBase):
def get_dependencies(self, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Access dependencies based on v1.2.0 schema structure."""
-
+
# v1.2.0 introduced unified dependencies structure
dependencies = metadata.get('dependencies', {})
-
+
# Validate structure matches expected schema
expected_types = ['hatch', 'python', 'system', 'docker']
for dep_type in dependencies:
if dep_type not in expected_types:
logger.warning(f"Unexpected dependency type: {dep_type}")
-
+
return dependencies
```
@@ -258,16 +258,16 @@ Registry accessors handle registry schema evolution:
class V110RegistryAccessor(RegistryAccessorBase):
def can_handle(self, registry_data: Dict[str, Any]) -> bool:
"""Check if this accessor can handle the registry schema."""
-
+
# Check for v1.1.0 registry schema indicators
schema_version = registry_data.get('registry_schema_version', '')
if schema_version.startswith('1.1.'):
return True
-
+
# Check for CrackingShells registry structure
if 'repositories' in registry_data:
return True
-
+
return False
```
@@ -293,40 +293,40 @@ The schema update process is coordinated across components:
```python
def update_schemas() -> bool:
"""Coordinate schema updates across the system."""
-
+
# 1. Fetch latest schema information from GitHub
fetcher = SchemaFetcher()
latest_info = fetcher.get_latest_schema_info()
-
+
if not latest_info:
logger.warning("Failed to fetch latest schema info")
return False
-
+
# 2. Check for new schema versions
cache = SchemaCache()
current_info = cache.get_info()
-
+
updated = False
for schema_type in ['package', 'registry']:
current_version = current_info.get(f"latest_{schema_type}_version")
latest_version = latest_info.get(f"latest_{schema_type}_version")
-
+
if current_version != latest_version:
# 3. Download new schema
schema_url = latest_info[schema_type]['url']
schema_data = fetcher.download_schema(schema_url)
-
+
if schema_data:
# 4. Update cache
cache.save_schema(schema_type, schema_data, latest_version)
cache.save_schema(schema_type, schema_data) # Also save as latest
updated = True
logger.info(f"Updated {schema_type} schema to {latest_version}")
-
+
# 5. Update cache metadata
if updated:
cache.update_info(latest_info)
-
+
return updated
```
@@ -339,10 +339,10 @@ class ValidatorFactory:
@classmethod
def _ensure_validators_loaded(cls) -> None:
"""Ensure validators are loaded and schemas are current."""
-
+
# Check for schema updates before loading validators
schema_retriever.get_schema("package", "latest") # Triggers update check
-
+
# Load validators based on available schemas
if not cls._validator_registry:
# Auto-discover and register validators
@@ -358,7 +358,7 @@ The system gracefully handles network failures:
```python
def get_schema_with_fallback(schema_type: str, version: str) -> Optional[Dict[str, Any]]:
"""Get schema with network failure fallback."""
-
+
try:
# Try to get latest schema (may trigger network request)
return schema_retriever.get_schema(schema_type, version, force_update=True)
@@ -379,7 +379,7 @@ The system handles schema validation errors gracefully:
```python
def validate_with_fallback(metadata: Dict, schema_version: str) -> Tuple[bool, List[str]]:
"""Validate with schema fallback."""
-
+
# Try validation with specific schema version
schema = get_package_schema(schema_version)
if schema:
@@ -388,7 +388,7 @@ def validate_with_fallback(metadata: Dict, schema_version: str) -> Tuple[bool, L
return True, []
except jsonschema.ValidationError as e:
return False, [f"Schema validation error: {e.message}"]
-
+
# Fallback to latest schema if specific version unavailable
latest_schema = get_package_schema("latest")
if latest_schema:
@@ -398,7 +398,7 @@ def validate_with_fallback(metadata: Dict, schema_version: str) -> Tuple[bool, L
return True, []
except jsonschema.ValidationError as e:
return False, [f"Schema validation error (latest): {e.message}"]
-
+
# No schema available - skip schema validation
logger.error("No schema available for validation")
return True, ["Schema validation skipped - no schema available"]
@@ -411,17 +411,17 @@ The system recovers from cache corruption:
```python
def load_schema_with_recovery(schema_type: str, version: str) -> Optional[Dict[str, Any]]:
"""Load schema with corruption recovery."""
-
+
try:
return cache.load_schema(schema_type, version)
except (json.JSONDecodeError, IOError) as e:
logger.warning(f"Cache corruption detected: {e}")
-
+
# Remove corrupted cache file
cache_path = cache.get_schema_path(schema_type, version)
if cache_path.exists():
cache_path.unlink()
-
+
# Trigger fresh download
return schema_retriever.get_schema(schema_type, version, force_update=True)
```
@@ -436,16 +436,16 @@ Schemas are loaded only when needed:
class LazySchemaLoader:
def __init__(self):
self._schema_cache = {}
-
+
def get_schema(self, schema_type: str, version: str) -> Optional[Dict[str, Any]]:
"""Get schema with lazy loading."""
-
+
cache_key = f"{schema_type}:{version}"
-
+
if cache_key not in self._schema_cache:
# Load schema on first access
self._schema_cache[cache_key] = schema_retriever.get_schema(schema_type, version)
-
+
return self._schema_cache[cache_key]
```
@@ -458,7 +458,7 @@ import threading
def background_schema_update():
"""Update schemas in background thread."""
-
+
def update_worker():
try:
schema_retriever.get_schema("package", "latest", force_update=True)
@@ -466,7 +466,7 @@ def background_schema_update():
logger.info("Background schema update completed")
except Exception as e:
logger.error(f"Background schema update failed: {e}")
-
+
# Start background update
update_thread = threading.Thread(target=update_worker, daemon=True)
update_thread.start()
diff --git a/docs/articles/devs/contribution_guidelines/ExtendingChainOfResponsibility.md b/docs/articles/devs/contribution_guidelines/ExtendingChainOfResponsibility.md
index ee0b3b8..22ae862 100644
--- a/docs/articles/devs/contribution_guidelines/ExtendingChainOfResponsibility.md
+++ b/docs/articles/devs/contribution_guidelines/ExtendingChainOfResponsibility.md
@@ -42,23 +42,23 @@ from hatch_validator.core.pkg_accessor_base import HatchPkgAccessorBase
class V130PackageAccessor(HatchPkgAccessorBase):
"""Package accessor for schema version 1.3.0."""
-
+
def can_handle(self, schema_version: str) -> bool:
"""Check if this accessor can handle the schema version."""
return schema_version in ["1.3.0", "v1.3.0"]
-
+
def get_new_feature(self, metadata: Dict[str, Any]) -> Any:
"""Handle v1.3.0-specific new feature."""
# Implement new functionality specific to v1.3.0
return metadata.get('new_feature_field', {})
-
+
def get_dependencies(self, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Delegate dependency access to v1.2.1."""
# v1.3.0 doesn't change dependency structure, delegate to v1.2.1
if self.next_accessor:
return self.next_accessor.get_dependencies(metadata)
raise NotImplementedError("Dependencies accessor not implemented")
-
+
def get_entry_points(self, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Handle entry points - may delegate or implement new logic."""
# If v1.3.0 changes entry point structure, implement here
@@ -80,57 +80,57 @@ from hatch_validator.core.validation_context import ValidationContext
class V130Validator(Validator):
"""Validator for schema version 1.3.0."""
-
+
def can_handle(self, schema_version: str) -> bool:
"""Check if this validator can handle the schema version."""
return schema_version in ["1.3.0", "v1.3.0"]
-
+
def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
"""Validate v1.3.0 package or delegate to next validator."""
-
+
schema_version = metadata.get("package_schema_version", "")
-
+
if not self.can_handle(schema_version):
# Delegate to next validator in chain
if self.next_validator:
return self.next_validator.validate(metadata, context)
return False, [f"Unsupported schema version: {schema_version}"]
-
+
# Perform v1.3.0-specific validation
errors = []
-
+
# Validate new features specific to v1.3.0
new_feature_valid, new_feature_errors = self._validate_new_feature(metadata, context)
errors.extend(new_feature_errors)
-
+
# Delegate unchanged validation concerns to previous validators
if self.next_validator:
# Delegate dependency validation to v1.2.1
dep_valid, dep_errors = self.next_validator.validate_dependencies(metadata, context)
errors.extend(dep_errors)
-
+
# Delegate entry point validation to v1.2.1
entry_valid, entry_errors = self.next_validator.validate_entry_points(metadata, context)
errors.extend(entry_errors)
-
+
return len(errors) == 0, errors
-
+
def _validate_new_feature(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
"""Validate v1.3.0-specific new feature."""
errors = []
-
+
new_feature = metadata.get('new_feature_field')
if new_feature is not None:
# Implement validation logic for new feature
if not isinstance(new_feature, dict):
errors.append("new_feature_field must be a dictionary")
-
+
# Add more specific validation rules
required_fields = ['field1', 'field2']
for field in required_fields:
if field not in new_feature:
errors.append(f"new_feature_field missing required field: {field}")
-
+
return len(errors) == 0, errors
```
@@ -145,20 +145,20 @@ from hatch_validator.registry.registry_accessor_base import RegistryAccessorBase
class V130RegistryAccessor(RegistryAccessorBase):
"""Registry accessor for schema version 1.3.0."""
-
+
def can_handle(self, registry_data: Dict[str, Any]) -> bool:
"""Check if this accessor can handle the registry data."""
schema_version = registry_data.get('registry_schema_version', '')
return schema_version.startswith('1.3.')
-
- def package_exists(self, registry_data: Dict[str, Any], package_name: str,
+
+ def package_exists(self, registry_data: Dict[str, Any], package_name: str,
repo_name: Optional[str] = None) -> bool:
"""Check package existence with v1.3.0 registry format."""
-
+
# If v1.3.0 doesn't change registry structure, delegate
if self._successor:
return self._successor.package_exists(registry_data, package_name, repo_name)
-
+
# Otherwise, implement v1.3.0-specific logic here
return False
```
@@ -177,13 +177,13 @@ class HatchPkgAccessorFactory:
# Register v1.3.0 accessor (newest first)
from hatch_validator.package.v1_3_0.accessor import V130PackageAccessor
cls.register_accessor('1.3.0', V130PackageAccessor)
-
+
# Register existing accessors
from hatch_validator.package.v1_2_1.accessor import V121PackageAccessor
cls.register_accessor('1.2.1', V121PackageAccessor)
-
+
# ... register other versions
-
+
# Update version order (newest to oldest)
cls._version_order = ['1.3.0', '1.2.1', '1.2.0', '1.1.0']
@@ -196,10 +196,10 @@ class ValidatorFactory:
# Register v1.3.0 validator (newest first)
from hatch_validator.package.v1_3_0.validator import V130Validator
cls.register_validator('1.3.0', V130Validator)
-
+
# Register existing validators
# ... register other versions
-
+
# Update version order (newest to oldest)
cls._version_order = ['1.3.0', '1.2.1', '1.2.0', '1.1.0']
```
@@ -262,11 +262,11 @@ def get_field_with_fallback(self, metadata: Dict[str, Any], field_name: str) ->
return self._get_field(metadata, field_name)
except Exception as e:
logger.warning(f"Error handling {field_name}: {e}")
-
+
# Delegate to next accessor
if self.next_accessor:
return self.next_accessor.get_field_with_fallback(metadata, field_name)
-
+
raise NotImplementedError(f"Field {field_name} not handled by any accessor")
```
@@ -275,10 +275,10 @@ def get_field_with_fallback(self, metadata: Dict[str, Any], field_name: str) ->
```python
def validate_chain_integrity(chain_head: ComponentBase) -> bool:
"""Validate that chain covers all required functionality."""
-
+
current = chain_head
covered_versions = set()
-
+
while current:
# Check that component implements required methods
required_methods = ['can_handle', 'get_dependencies', 'get_entry_points']
@@ -286,20 +286,20 @@ def validate_chain_integrity(chain_head: ComponentBase) -> bool:
if not hasattr(current, method):
logger.error(f"Component {current.__class__.__name__} missing method: {method}")
return False
-
+
# Track covered versions
if hasattr(current, 'SUPPORTED_VERSION'):
covered_versions.add(current.SUPPORTED_VERSION)
-
+
current = getattr(current, 'next_accessor', None) or getattr(current, 'next_validator', None)
-
+
# Ensure all required versions are covered
required_versions = {'1.1.0', '1.2.0', '1.2.1'}
if not required_versions.issubset(covered_versions):
missing = required_versions - covered_versions
logger.error(f"Chain missing support for versions: {missing}")
return False
-
+
return True
```
@@ -331,17 +331,17 @@ Ensure all components are registered correctly:
```python
def register_all_components():
"""Register all components with their factories."""
-
+
# Package accessors
HatchPkgAccessorFactory.register_accessor('1.3.0', V130PackageAccessor)
HatchPkgAccessorFactory.register_accessor('1.2.1', V121PackageAccessor)
# ... register all versions
-
+
# Validators
ValidatorFactory.register_validator('1.3.0', V130Validator)
ValidatorFactory.register_validator('1.2.1', V121Validator)
# ... register all versions
-
+
# Registry accessors (if applicable)
RegistryAccessorFactory.register_accessor('1.3.0', V130RegistryAccessor)
# ... register all versions
@@ -354,17 +354,17 @@ Ensure new components maintain backward compatibility:
```python
def test_backward_compatibility():
"""Test that new components work with older packages."""
-
+
# v1.3.0 chain should handle v1.2.1 packages
chain = HatchPkgAccessorFactory.create_accessor_chain("1.3.0")
-
+
v121_metadata = {
"package_schema_version": "1.2.1",
"name": "old-package",
"mcp_server": {"command": "server"},
"hatch_mcp_server": {"command": "hatch-server"}
}
-
+
# Should delegate to v1.2.1 accessor
entry_points = chain.get_entry_points(v121_metadata)
assert "mcp_server" in entry_points
diff --git a/docs/articles/users/integration/ProgrammaticUsage.md b/docs/articles/users/integration/ProgrammaticUsage.md
index a2171f8..c71851c 100644
--- a/docs/articles/users/integration/ProgrammaticUsage.md
+++ b/docs/articles/users/integration/ProgrammaticUsage.md
@@ -22,38 +22,38 @@ The Dependency Installation Orchestrator uses PackageService to access package m
from hatch_validator.package.package_service import PackageService
class DependencyInstallerOrchestrator:
- def _resolve_package_location(self, package_path_or_name: str,
+ def _resolve_package_location(self, package_path_or_name: str,
version_constraint: Optional[str] = None,
force_download: bool = False) -> Tuple[Path, Dict[str, Any]]:
"""Resolve package location and load metadata using PackageService."""
-
+
if Path(package_path_or_name).exists():
# Local package - load metadata directly
metadata_path = Path(package_path_or_name) / "hatch_metadata.json"
with open(metadata_path, 'r') as f:
metadata = json.load(f)
-
+
# PackageService automatically handles schema version detection
self.package_service = PackageService(metadata)
return Path(package_path_or_name), metadata
-
+
else:
# Remote package - resolve through registry
compatible_version = self.registry_service.find_compatible_version(
package_path_or_name, version_constraint)
-
+
location = self.registry_service.get_package_uri(
package_path_or_name, compatible_version)
-
+
downloaded_path = self.package_loader.download_package(
- location, package_path_or_name, compatible_version,
+ location, package_path_or_name, compatible_version,
force_download=force_download)
-
+
# Load metadata and initialize PackageService
metadata_path = downloaded_path / "hatch_metadata.json"
with open(metadata_path, 'r') as f:
metadata = json.load(f)
-
+
# Version-agnostic metadata access
self.package_service = PackageService(metadata)
return downloaded_path, metadata
@@ -66,10 +66,10 @@ The orchestrator accesses dependencies without knowing the schema version:
```python
def _build_dependency_graph(self, package_path: Path, metadata: Dict[str, Any]) -> Dict[str, Any]:
"""Build dependency graph using version-agnostic PackageService."""
-
+
# PackageService abstracts schema version differences
all_deps = self.package_service.get_dependencies()
-
+
# Process dependencies by type - works with any schema version
install_plan = {
"dependencies_to_install": {},
@@ -78,13 +78,13 @@ def _build_dependency_graph(self, package_path: Path, metadata: Dict[str, Any])
"metadata": metadata
}
}
-
+
# Handle all dependency types uniformly
for dep_type, dependencies in all_deps.items():
if dependencies:
install_plan["dependencies_to_install"][dep_type] = dependencies
self.logger.info(f"Found {len(dependencies)} {dep_type} dependencies")
-
+
return install_plan
```
@@ -99,32 +99,32 @@ def _build_dependency_graph(self, package_path: Path, metadata: Dict[str, Any])
The orchestrator uses RegistryService for version-agnostic registry operations:
```python
-def install_dependencies(self, package_path_or_name: str, env_path: Path,
+def install_dependencies(self, package_path_or_name: str, env_path: Path,
env_name: str, existing_packages: Dict[str, str],
version_constraint: Optional[str] = None,
- force_download: bool = False,
+ force_download: bool = False,
auto_approve: bool = False) -> Tuple[bool, List[Dict[str, Any]]]:
"""Install dependencies using version-agnostic registry operations."""
-
+
try:
# Step 1: Resolve package location using RegistryService
package_path, metadata = self._resolve_package_location(
package_path_or_name, version_constraint, force_download)
-
+
# Step 2: Check package existence without schema awareness
if not Path(package_path_or_name).exists():
if not self.registry_service.package_exists(package_path_or_name):
raise DependencyInstallationError(
f"Package {package_path_or_name} does not exist in registry")
-
+
# Step 3: Build dependency graph using PackageService
install_plan = self._build_dependency_graph(package_path, metadata)
-
+
# Step 4: Execute installation plan
installed_packages = self._execute_install_plan(install_plan, env_path, env_name)
-
+
return True, installed_packages
-
+
except Exception as e:
self.logger.error(f"Dependency installation failed: {e}")
raise DependencyInstallationError(f"Installation failed: {e}") from e
@@ -140,11 +140,11 @@ The Environment Manager initializes RegistryService for version-agnostic registr
from hatch_validator.registry.registry_service import RegistryService
class HatchEnvironmentManager:
- def __init__(self, cache_dir: Path = None, cache_ttl: int = 3600,
- simulation_mode: bool = False,
+ def __init__(self, cache_dir: Path = None, cache_ttl: int = 3600,
+ simulation_mode: bool = False,
local_registry_cache_path: Optional[Path] = None):
"""Initialize environment manager with registry service."""
-
+
# Initialize registry retriever and get registry data
self.retriever = RegistryRetriever(
cache_ttl=cache_ttl,
@@ -153,10 +153,10 @@ class HatchEnvironmentManager:
local_registry_cache_path=local_registry_cache_path
)
self.registry_data = self.retriever.get_registry()
-
+
# Initialize RegistryService with automatic schema detection
self.registry_service = RegistryService(self.registry_data)
-
+
# Initialize dependency orchestrator with registry service
self.dependency_orchestrator = DependencyInstallerOrchestrator(
package_loader=self.package_loader,
@@ -172,19 +172,19 @@ The environment manager handles registry updates transparently:
```python
def refresh_registry_data(self, force_refresh: bool = False) -> None:
"""Refresh registry data and update services."""
-
+
self.logger.info("Refreshing registry data...")
try:
# Get updated registry data
self.registry_data = self.retriever.get_registry(force_refresh=force_refresh)
-
+
# Update RegistryService with new data - automatic schema detection
self.registry_service = RegistryService(self.registry_data)
-
+
# Update orchestrator with new registry service
self.dependency_orchestrator.registry_service = self.registry_service
self.dependency_orchestrator.registry_data = self.registry_data
-
+
self.logger.info("Registry data refreshed successfully")
except Exception as e:
self.logger.error(f"Failed to refresh registry data: {e}")
@@ -196,16 +196,16 @@ def refresh_registry_data(self, force_refresh: bool = False) -> None:
The environment manager uses the dependency orchestrator for package installation:
```python
-def install_package(self, package_path_or_name: str, env_name: str,
+def install_package(self, package_path_or_name: str, env_name: str,
version_constraint: Optional[str] = None,
- force_download: bool = False,
+ force_download: bool = False,
auto_approve: bool = False) -> bool:
"""Install package using version-agnostic services."""
-
+
try:
env_path = self.get_environment_path(env_name)
existing_packages = self.get_installed_packages(env_name)
-
+
# Use dependency orchestrator with version-agnostic services
success, installed_packages = self.dependency_orchestrator.install_dependencies(
package_path_or_name=package_path_or_name,
@@ -216,13 +216,13 @@ def install_package(self, package_path_or_name: str, env_name: str,
force_download=force_download,
auto_approve=auto_approve
)
-
+
if success:
self._save_environments()
self.logger.info(f"Successfully installed package in environment {env_name}")
-
+
return success
-
+
except Exception as e:
self.logger.error(f"Package installation failed: {e}")
return False
@@ -239,32 +239,32 @@ from hatch_validator import HatchPackageValidator
def main():
"""Main CLI entry point with validation integration."""
-
+
# Initialize environment manager for registry data
env_manager = HatchEnvironmentManager()
-
+
# Parse command line arguments
args = parse_arguments()
-
+
if args.command == "validate":
package_path = Path(args.package_dir).resolve()
-
+
# Create validator with registry data from environment manager
validator = HatchPackageValidator(
version="latest",
allow_local_dependencies=True,
registry_data=env_manager.registry_data # Version-agnostic registry data
)
-
+
# Validate package - automatic schema version detection
is_valid, validation_results = validator.validate_package(package_path)
-
+
if is_valid:
print(f"Package validation SUCCESSFUL: {package_path}")
return 0
else:
print(f"Package validation FAILED: {package_path}")
-
+
# Print detailed validation results
if validation_results and isinstance(validation_results, dict):
for category, result in validation_results.items():
@@ -273,7 +273,7 @@ def main():
print(f"\n{category.replace('_', ' ').title()} errors:")
for error in result['errors']:
print(f" - {error}")
-
+
return 1
```
@@ -284,37 +284,37 @@ The CLI implements comprehensive error handling for validation results:
```python
def handle_validation_results(validation_results: Dict[str, Any]) -> int:
"""Handle and display validation results."""
-
+
if validation_results.get('valid', False):
print("ā Package validation successful")
return 0
-
+
print("ā Package validation failed")
-
+
# Handle schema validation errors
if not validation_results.get('metadata_schema', {}).get('valid', True):
print("\nSchema Validation Errors:")
for error in validation_results['metadata_schema'].get('errors', []):
print(f" - {error}")
-
+
# Handle entry point validation errors
if not validation_results.get('entry_point', {}).get('valid', True):
print("\nEntry Point Validation Errors:")
for error in validation_results['entry_point'].get('errors', []):
print(f" - {error}")
-
+
# Handle dependency validation errors
if not validation_results.get('dependencies', {}).get('valid', True):
print("\nDependency Validation Errors:")
for error in validation_results['dependencies'].get('errors', []):
print(f" - {error}")
-
+
# Handle tools validation errors
if not validation_results.get('tools', {}).get('valid', True):
print("\nTools Validation Errors:")
for error in validation_results['tools'].get('errors', []):
print(f" - {error}")
-
+
return 1
```
@@ -327,16 +327,16 @@ def handle_validation_results(validation_results: Dict[str, Any]) -> int:
```python
class ServiceManager:
"""Centralized management of Hatch-Validator services."""
-
+
def __init__(self, registry_data: Dict[str, Any]):
self.registry_service = RegistryService(registry_data)
self.package_service = None # Initialized per package
-
+
def load_package(self, metadata: Dict[str, Any]) -> PackageService:
"""Load package with version-agnostic service."""
self.package_service = PackageService(metadata)
return self.package_service
-
+
def validate_package(self, package_path: Path) -> Tuple[bool, Dict[str, Any]]:
"""Validate package using integrated services."""
validator = HatchPackageValidator(
@@ -354,22 +354,22 @@ class ServiceManager:
```python
def safe_package_operation(package_path: Path, registry_data: Dict[str, Any]) -> bool:
"""Perform package operations with graceful error handling."""
-
+
try:
# Primary operation with full services
service = PackageService()
with open(package_path / "hatch_metadata.json", 'r') as f:
metadata = json.load(f)
-
+
service.load_metadata(metadata)
dependencies = service.get_dependencies()
return True
-
+
except ValueError as e:
# Handle schema version issues
logger.warning(f"Schema version issue: {e}")
return False
-
+
except Exception as e:
# Handle other errors gracefully
logger.error(f"Package operation failed: {e}")
@@ -383,40 +383,40 @@ def safe_package_operation(package_path: Path, registry_data: Dict[str, Any]) ->
```python
class OptimizedPackageProcessor:
"""Optimized package processing with service reuse."""
-
+
def __init__(self, registry_data: Dict[str, Any]):
# Initialize registry service once
self.registry_service = RegistryService(registry_data)
-
+
# Initialize validator once
self.validator = HatchPackageValidator(
version="latest",
allow_local_dependencies=True,
registry_data=registry_data
)
-
+
def process_packages(self, package_paths: List[Path]) -> List[Dict[str, Any]]:
"""Process multiple packages efficiently."""
results = []
-
+
for package_path in package_paths:
# Reuse validator instance
is_valid, validation_results = self.validator.validate_package(package_path)
-
+
# Create new PackageService per package
with open(package_path / "hatch_metadata.json", 'r') as f:
metadata = json.load(f)
-
+
package_service = PackageService(metadata)
dependencies = package_service.get_dependencies()
-
+
results.append({
'path': package_path,
'valid': is_valid,
'dependencies': dependencies,
'validation_results': validation_results
})
-
+
return results
```
diff --git a/docs/articles/users/validation/SchemaManagement.md b/docs/articles/users/validation/SchemaManagement.md
index 624e188..814ce59 100644
--- a/docs/articles/users/validation/SchemaManagement.md
+++ b/docs/articles/users/validation/SchemaManagement.md
@@ -117,7 +117,7 @@ schema_info = fetcher.get_latest_schema_info()
# "url": "https://github.com/.../schemas-package-v1.2.1/hatch_pkg_metadata_schema.json"
# },
# "registry": {
-# "version": "v1.1.0",
+# "version": "v1.1.0",
# "url": "https://github.com/.../schemas-registry-v1.1.0/hatch_all_pkg_metadata_schema.json"
# }
# }
diff --git a/docs/resources/diagrams/chain_of_responsibility_simplified.puml b/docs/resources/diagrams/chain_of_responsibility_simplified.puml
index 02238d6..b6727ab 100644
--- a/docs/resources/diagrams/chain_of_responsibility_simplified.puml
+++ b/docs/resources/diagrams/chain_of_responsibility_simplified.puml
@@ -13,7 +13,7 @@ package "Core Pattern" {
+ validate(data): result
+ set_next(validator): void
}
-
+
abstract class RegistryAccessorBase {
+ can_handle(data): bool
+ process(data): result
@@ -23,7 +23,7 @@ package "Core Pattern" {
+ get_data(metadata): data
+ set_next(accessor): void
}
-
+
}
package "Version Implementations" {
@@ -31,32 +31,32 @@ package "Version Implementations" {
+ can_handle(): bool
+ validate(): result
}
-
+
class V1_2_0_Validator {
+ can_handle(): bool
+ validate(): result
}
-
+
class V1_1_0_Validator {
+ can_handle(): bool
+ validate(): result
}
-
+
class V1_2_1_PackageAccessor {
+ can_handle(): bool
+ get_data(): data
}
-
+
class V1_2_0_PackageAccessor {
+ can_handle(): bool
+ get_data(): data
}
-
+
class V1_1_0_PackageAccessor {
+ can_handle(): bool
+ get_data(): data
}
-
+
class V1_1_0_RegistryAccessor {
+ can_handle(): bool
+ process(): result
diff --git a/docs/resources/diagrams/data_access_patterns_focus.puml b/docs/resources/diagrams/data_access_patterns_focus.puml
index a8a4a5a..1aaaca3 100644
--- a/docs/resources/diagrams/data_access_patterns_focus.puml
+++ b/docs/resources/diagrams/data_access_patterns_focus.puml
@@ -25,21 +25,21 @@ alt v1.2.1 specific handling
PA121 -> PA121 : extract_dual_entry_points()
PA121 -> PA120 : get_dependencies(metadata)
activate PA120
-
+
note over PA120 : **Version Check**\nif schema_version in ["1.2.0", "1.2.1"]:\n handle unified dependencies\nelse: delegate
-
+
PA120 -> PA120 : extract_unified_dependencies()
PA120 --> PA121 : dependency_data
deactivate PA120
-
+
else direct delegation
PA121 -> PA120 : get_dependencies(metadata)
activate PA120
PA120 -> PA110 : get_dependencies(metadata)
activate PA110
-
+
note over PA110 : **Terminal Handler**\nImplements all base\nmetadata extraction
-
+
PA110 -> PA110 : extract_basic_dependencies()
PA110 --> PA120 : dependency_data
deactivate PA110
diff --git a/docs/resources/diagrams/factory_pattern_focus.puml b/docs/resources/diagrams/factory_pattern_focus.puml
index d95e3c5..3b26b7d 100644
--- a/docs/resources/diagrams/factory_pattern_focus.puml
+++ b/docs/resources/diagrams/factory_pattern_focus.puml
@@ -26,7 +26,7 @@ package "Factory Classes" {
+ {static} register_validator(version: str, validator_class: Type[Validator]): void
+ {static} _build_chain(validators: List[Validator]): Validator
}
-
+
class HatchPkgAccessorFactory {
- {static} _accessor_registry: Dict[str, Type[HatchPkgAccessor]]
- {static} _version_order: List[str] = ["1.2.1", "1.2.0", "1.1.0"]
@@ -34,7 +34,7 @@ package "Factory Classes" {
+ {static} register_accessor(version: str, accessor_class: Type[HatchPkgAccessor]): void
+ {static} _build_chain(accessors: List[HatchPkgAccessor]): HatchPkgAccessor
}
-
+
class RegistryAccessorFactory {
//Dynamic Creation: Detects registry format and creates appropriate accessor instance//
..
@@ -52,11 +52,11 @@ package "Created Components" {
class V121Validator
class V120Validator
class V110Validator
-
+
class V121PackageAccessor
class V120PackageAccessor
class V110PackageAccessor
-
+
class V110RegistryAccessor
}
@@ -75,12 +75,12 @@ package "Chain Structure" {
head: V121Validator
V121 -> V120 -> V110
}
-
+
object "Package Accessor Chain" as PAC {
head: V121PackageAccessor
V121 -> V120 -> V110
}
-
+
object "Registry Accessor" as RA {
single: V110RegistryAccessor
}
diff --git a/docs/resources/diagrams/strategy_pattern_focus.puml b/docs/resources/diagrams/strategy_pattern_focus.puml
index b02a09a..79ba57c 100644
--- a/docs/resources/diagrams/strategy_pattern_focus.puml
+++ b/docs/resources/diagrams/strategy_pattern_focus.puml
@@ -24,7 +24,7 @@ package "Strategy Interfaces" {
class V120DependencyValidation {
+ validate_dependencies(): result
}
-
+
class V110DependencyValidation {
+ validate_dependencies(): result
}
@@ -48,7 +48,7 @@ package "Strategy Interfaces" {
class V121ToolsValidation {
+ validate_tools(): result
}
-
+
class V110ToolsValidation {
+ validate_tools(): result
}
@@ -56,7 +56,7 @@ package "Strategy Interfaces" {
abstract class SchemaValidationStrategy {
+ {abstract} validate_schema(metadata: Dict, context: ValidationContext): Tuple[bool, List[str]]
}
-
+
class V110SchemaValidation {
+ validate_schema(): result
}
@@ -87,12 +87,12 @@ package "Context Classes" {
- tools_strategy: ToolsValidationStrategy
+ validate(metadata, context): result
}
-
+
class V120Validator {
- dependency_strategy: DependencyValidationStrategy
+ validate(metadata, context): result
}
-
+
class V110Validator {
//Terminal Validator: Implements all strategies. No delegation needed.//
..
diff --git a/docs/resources/images/chain_of_responsibility_delegation_sequence.svg b/docs/resources/images/chain_of_responsibility_delegation_sequence.svg
index 3b0b3f3..42ec114 100644
--- a/docs/resources/images/chain_of_responsibility_delegation_sequence.svg
+++ b/docs/resources/images/chain_of_responsibility_delegation_sequence.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/resources/images/chain_of_responsibility_simplified.svg b/docs/resources/images/chain_of_responsibility_simplified.svg
index b664a98..127fa41 100644
--- a/docs/resources/images/chain_of_responsibility_simplified.svg
+++ b/docs/resources/images/chain_of_responsibility_simplified.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/resources/images/component_architecture_simplified.svg b/docs/resources/images/component_architecture_simplified.svg
index 96f04cf..73c019c 100644
--- a/docs/resources/images/component_architecture_simplified.svg
+++ b/docs/resources/images/component_architecture_simplified.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/resources/images/data_access_patterns_focus.svg b/docs/resources/images/data_access_patterns_focus.svg
index 6dff983..0cb726d 100644
--- a/docs/resources/images/data_access_patterns_focus.svg
+++ b/docs/resources/images/data_access_patterns_focus.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/resources/images/factory_pattern_focus.svg b/docs/resources/images/factory_pattern_focus.svg
index e468645..fe154ce 100644
--- a/docs/resources/images/factory_pattern_focus.svg
+++ b/docs/resources/images/factory_pattern_focus.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/resources/images/strategy_pattern_focus.svg b/docs/resources/images/strategy_pattern_focus.svg
index 028026e..174b570 100644
--- a/docs/resources/images/strategy_pattern_focus.svg
+++ b/docs/resources/images/strategy_pattern_focus.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/resources/images/validation_execution_focus.svg b/docs/resources/images/validation_execution_focus.svg
index 58e811e..1a1553a 100644
--- a/docs/resources/images/validation_execution_focus.svg
+++ b/docs/resources/images/validation_execution_focus.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/resources/images/version_agnostic_access_simplified.svg b/docs/resources/images/version_agnostic_access_simplified.svg
index dd91914..b396f5f 100644
--- a/docs/resources/images/version_agnostic_access_simplified.svg
+++ b/docs/resources/images/version_agnostic_access_simplified.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/hatch_validator/__init__.py b/hatch_validator/__init__.py
index fda0369..b508afa 100644
--- a/hatch_validator/__init__.py
+++ b/hatch_validator/__init__.py
@@ -14,51 +14,53 @@
DependencyValidationStrategy,
ToolsValidationStrategy,
EntryPointValidationStrategy,
- SchemaValidationStrategy
+ SchemaValidationStrategy,
)
from hatch_validator.core.validator_factory import ValidatorFactory
# Package validator
-from hatch_validator.package_validator import HatchPackageValidator, PackageValidationError
+from hatch_validator.package_validator import (
+ HatchPackageValidator,
+ PackageValidationError,
+)
# Schema handling components
from hatch_validator.schemas.schema_fetcher import SchemaFetcher
from hatch_validator.schemas.schema_cache import SchemaCache
from hatch_validator.schemas.schemas_retriever import (
SchemaRetriever,
- get_package_schema,
- get_registry_schema
+ get_package_schema,
+ get_registry_schema,
)
# Registry Access
from hatch_validator.registry.registry_service import RegistryService
-from hatch_validator.registry.v1_1_0.registry_accessor import RegistryAccessor as V110RegistryAccessor
+from hatch_validator.registry.v1_1_0.registry_accessor import (
+ RegistryAccessor as V110RegistryAccessor,
+)
# Version-specific implementations will be imported when needed via the factory
__all__ = [
# Core validation framework
- 'ValidationContext',
- 'Validator',
- 'ValidationStrategy',
- 'DependencyValidationStrategy',
- 'ToolsValidationStrategy',
- 'EntryPointValidationStrategy',
- 'SchemaValidationStrategy',
- 'ValidatorFactory',
-
+ "ValidationContext",
+ "Validator",
+ "ValidationStrategy",
+ "DependencyValidationStrategy",
+ "ToolsValidationStrategy",
+ "EntryPointValidationStrategy",
+ "SchemaValidationStrategy",
+ "ValidatorFactory",
# Package validator
- 'HatchPackageValidator',
- 'PackageValidationError',
-
+ "HatchPackageValidator",
+ "PackageValidationError",
# Schema handling components
- 'SchemaRetriever',
- 'SchemaFetcher',
- 'SchemaCache',
- 'get_package_schema',
- 'get_registry_schema',
-
+ "SchemaRetriever",
+ "SchemaFetcher",
+ "SchemaCache",
+ "get_package_schema",
+ "get_registry_schema",
# Registry Access
- 'RegistryService',
- 'V110RegistryAccessor'
-]
\ No newline at end of file
+ "RegistryService",
+ "V110RegistryAccessor",
+]
diff --git a/hatch_validator/core/pkg_accessor_base.py b/hatch_validator/core/pkg_accessor_base.py
index 3c25e18..f71c3db 100644
--- a/hatch_validator/core/pkg_accessor_base.py
+++ b/hatch_validator/core/pkg_accessor_base.py
@@ -8,17 +8,19 @@
from typing import Any, Dict, Optional
from pathlib import Path
+
class HatchPkgAccessor(ABC):
"""Abstract base class for metadata accessors in the Chain of Responsibility pattern.
-
+
Each accessor in the chain can either handle the access for a specific
schema version or pass the request to the next accessor in the chain. The base class
provides default delegation methods for each specific metadata concern,
allowing concrete accessors to override only the concerns that have changed in their version.
"""
- def __init__(self, next_accessor: Optional['HatchPkgAccessor'] = None):
+
+ def __init__(self, next_accessor: Optional["HatchPkgAccessor"] = None):
"""Initialize the accessor with an optional next accessor in the chain.
-
+
Args:
next_accessor (HatchPkgAccessor, optional): Next accessor in the chain. Defaults to None.
"""
@@ -27,18 +29,18 @@ def __init__(self, next_accessor: Optional['HatchPkgAccessor'] = None):
@abstractmethod
def can_handle(self, schema_version: str) -> bool:
"""Determine if this accessor can handle the given schema version.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if this accessor can handle the schema version
"""
pass
- def set_next(self, accessor: 'HatchPkgAccessor') -> 'HatchPkgAccessor':
+ def set_next(self, accessor: "HatchPkgAccessor") -> "HatchPkgAccessor":
"""Set the next accessor in the chain.
-
+
Args:
accessor (HatchPkgAccessor): Next accessor to set
@@ -50,23 +52,27 @@ def set_next(self, accessor: 'HatchPkgAccessor') -> 'HatchPkgAccessor':
def get_dependencies(self, metadata: Dict[str, Any]) -> Any:
"""Get dependencies from metadata.
-
+
Default behavior: delegate to next accessor in chain if available.
-
+
Args:
metadata (Dict[str, Any]): Package metadata
-
+
Returns:
Any: Dependencies structure
-
+
Raises:
NotImplementedError: If there is no next accessor and this method is not overridden
"""
if self.next_accessor:
return self.next_accessor.get_dependencies(metadata)
- raise NotImplementedError("Dependency accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Dependency accessor not implemented for this schema version"
+ )
- def is_local_dependency(self, metadata: Dict[str, Any], root_dir: Optional[Path] = None) -> bool:
+ def is_local_dependency(
+ self, metadata: Dict[str, Any], root_dir: Optional[Path] = None
+ ) -> bool:
"""Check if a Hatch dependency is local.
Default behavior: delegate to next accessor in chain if available.
@@ -81,25 +87,29 @@ def is_local_dependency(self, metadata: Dict[str, Any], root_dir: Optional[Path]
"""
if self.next_accessor:
return self.next_accessor.is_local_dependency(metadata, root_dir)
- raise NotImplementedError("Local dependency accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Local dependency accessor not implemented for this schema version"
+ )
def get_entry_point(self, metadata: Dict[str, Any]) -> Any:
"""Get entry point from metadata.
-
+
Default behavior: delegate to next accessor in chain if available.
-
+
Args:
metadata (Dict[str, Any]): Package metadata
-
+
Returns:
Any: Entry point value
-
+
Raises:
NotImplementedError: If there is no next accessor and this method is not overridden
"""
if self.next_accessor:
return self.next_accessor.get_entry_point(metadata)
- raise NotImplementedError("Entry point accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Entry point accessor not implemented for this schema version"
+ )
def get_mcp_entry_point(self, metadata: Dict[str, Any]) -> Any:
"""Get MCP entry point from metadata.
@@ -117,8 +127,9 @@ def get_mcp_entry_point(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_mcp_entry_point(metadata)
- raise NotImplementedError("MCP entry point accessor not implemented for this schema version"
- )
+ raise NotImplementedError(
+ "MCP entry point accessor not implemented for this schema version"
+ )
def get_hatch_mcp_entry_point(self, metadata: Dict[str, Any]) -> Any:
"""Get Hatch MCP entry point from metadata.
@@ -136,25 +147,29 @@ def get_hatch_mcp_entry_point(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_hatch_mcp_entry_point(metadata)
- raise NotImplementedError("Hatch MCP entry point accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Hatch MCP entry point accessor not implemented for this schema version"
+ )
def get_tools(self, metadata: Dict[str, Any]) -> Any:
"""Get tools from metadata.
-
+
Default behavior: delegate to next accessor in chain if available.
-
+
Args:
metadata (Dict[str, Any]): Package metadata
-
+
Returns:
Any: Tools structure
-
+
Raises:
NotImplementedError: If there is no next accessor and this method is not overridden
"""
if self.next_accessor:
return self.next_accessor.get_tools(metadata)
- raise NotImplementedError("Tools accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Tools accessor not implemented for this schema version"
+ )
def get_package_schema_version(self, metadata: Dict[str, Any]) -> Any:
"""Get package schema version from metadata.
@@ -170,7 +185,9 @@ def get_package_schema_version(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_package_schema_version(metadata)
- raise NotImplementedError("Package schema version accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Package schema version accessor not implemented for this schema version"
+ )
def get_name(self, metadata: Dict[str, Any]) -> Any:
"""Get package name from metadata.
@@ -186,7 +203,9 @@ def get_name(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_name(metadata)
- raise NotImplementedError("Name accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Name accessor not implemented for this schema version"
+ )
def get_version(self, metadata: Dict[str, Any]) -> Any:
"""Get package version from metadata.
@@ -202,7 +221,9 @@ def get_version(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_version(metadata)
- raise NotImplementedError("Version accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Version accessor not implemented for this schema version"
+ )
def get_description(self, metadata: Dict[str, Any]) -> Any:
"""Get package description from metadata.
@@ -218,7 +239,9 @@ def get_description(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_description(metadata)
- raise NotImplementedError("Description accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Description accessor not implemented for this schema version"
+ )
def get_tags(self, metadata: Dict[str, Any]) -> Any:
"""Get tags from metadata.
@@ -234,7 +257,9 @@ def get_tags(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_tags(metadata)
- raise NotImplementedError("Tags accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Tags accessor not implemented for this schema version"
+ )
def get_author(self, metadata: Dict[str, Any]) -> Any:
"""Get author from metadata.
@@ -250,7 +275,9 @@ def get_author(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_author(metadata)
- raise NotImplementedError("Author accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Author accessor not implemented for this schema version"
+ )
def get_contributors(self, metadata: Dict[str, Any]) -> Any:
"""Get contributors from metadata.
@@ -266,7 +293,9 @@ def get_contributors(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_contributors(metadata)
- raise NotImplementedError("Contributors accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Contributors accessor not implemented for this schema version"
+ )
def get_license(self, metadata: Dict[str, Any]) -> Any:
"""Get license from metadata.
@@ -282,7 +311,9 @@ def get_license(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_license(metadata)
- raise NotImplementedError("License accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "License accessor not implemented for this schema version"
+ )
def get_repository(self, metadata: Dict[str, Any]) -> Any:
"""Get repository from metadata.
@@ -298,7 +329,9 @@ def get_repository(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_repository(metadata)
- raise NotImplementedError("Repository accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Repository accessor not implemented for this schema version"
+ )
def get_documentation(self, metadata: Dict[str, Any]) -> Any:
"""Get documentation from metadata.
@@ -314,7 +347,9 @@ def get_documentation(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_documentation(metadata)
- raise NotImplementedError("Documentation accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Documentation accessor not implemented for this schema version"
+ )
def get_compatibility(self, metadata: Dict[str, Any]) -> Any:
"""Get compatibility from metadata.
@@ -330,7 +365,9 @@ def get_compatibility(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_compatibility(metadata)
- raise NotImplementedError("Compatibility accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Compatibility accessor not implemented for this schema version"
+ )
def get_citations(self, metadata: Dict[str, Any]) -> Any:
"""Get citations from metadata.
@@ -346,7 +383,9 @@ def get_citations(self, metadata: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_citations(metadata)
- raise NotImplementedError("Citations accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Citations accessor not implemented for this schema version"
+ )
def get_python_dependency_channel(self, dependency: Dict[str, Any]) -> Any:
"""Get channel from a Python dependency.
@@ -366,4 +405,6 @@ def get_python_dependency_channel(self, dependency: Dict[str, Any]) -> Any:
"""
if self.next_accessor:
return self.next_accessor.get_python_dependency_channel(dependency)
- raise NotImplementedError("Python dependency channel accessor not implemented for this schema version")
+ raise NotImplementedError(
+ "Python dependency channel accessor not implemented for this schema version"
+ )
diff --git a/hatch_validator/core/pkg_accessor_factory.py b/hatch_validator/core/pkg_accessor_factory.py
index bcc21d8..691b248 100644
--- a/hatch_validator/core/pkg_accessor_factory.py
+++ b/hatch_validator/core/pkg_accessor_factory.py
@@ -11,22 +11,26 @@
logger = logging.getLogger("hatch.pkg_accessor_factory")
+
class HatchPkgAccessorFactory:
"""Factory class for creating package accessor chains.
-
+
This factory creates the appropriate accessor chain based on the target
schema version, setting up the Chain of Responsibility pattern correctly.
The factory maintains a registry of available accessors and constructs
chains that enable proper delegation between versions.
"""
+
# Registry of available accessor versions (newest to oldest)
_accessor_registry: Dict[str, Type[HatchPkgAccessor]] = {}
_version_order: List[str] = []
@classmethod
- def register_accessor(cls, version: str, accessor_class: Type[HatchPkgAccessor]) -> None:
+ def register_accessor(
+ cls, version: str, accessor_class: Type[HatchPkgAccessor]
+ ) -> None:
"""Register an accessor class for a specific schema version.
-
+
Args:
version (str): Schema version (e.g., "1.1.0", "1.2.0")
accessor_class (Type[HatchPkgAccessor]): Accessor class for the version
@@ -41,7 +45,7 @@ def register_accessor(cls, version: str, accessor_class: Type[HatchPkgAccessor])
@classmethod
def get_supported_versions(cls) -> List[str]:
"""Get list of supported schema versions.
-
+
Returns:
List[str]: List of supported versions ordered newest to oldest
"""
@@ -54,49 +58,66 @@ def _ensure_accessors_loaded(cls) -> None:
if not cls._accessor_registry:
# Import and register available accessors
try:
- from hatch_validator.package.v1_1_0.accessor import HatchPkgAccessor as V110HatchPkgAccessor
+ from hatch_validator.package.v1_1_0.accessor import (
+ HatchPkgAccessor as V110HatchPkgAccessor,
+ )
+
cls.register_accessor("1.1.0", V110HatchPkgAccessor)
except ImportError as e:
logger.warning(f"Could not load v1.1.0 accessor: {e}")
try:
- from hatch_validator.package.v1_2_0.accessor import HatchPkgAccessor as V120HatchPkgAccessor
+ from hatch_validator.package.v1_2_0.accessor import (
+ HatchPkgAccessor as V120HatchPkgAccessor,
+ )
+
cls.register_accessor("1.2.0", V120HatchPkgAccessor)
except ImportError as e:
logger.warning(f"Could not load v1.2.0 accessor: {e}")
try:
- from hatch_validator.package.v1_2_1.accessor import HatchPkgAccessor as V121HatchPkgAccessor
+ from hatch_validator.package.v1_2_1.accessor import (
+ HatchPkgAccessor as V121HatchPkgAccessor,
+ )
+
cls.register_accessor("1.2.1", V121HatchPkgAccessor)
except ImportError as e:
logger.warning(f"Could not load v1.2.1 accessor: {e}")
try:
- from hatch_validator.package.v1_2_2.accessor import HatchPkgAccessor as V122HatchPkgAccessor
+ from hatch_validator.package.v1_2_2.accessor import (
+ HatchPkgAccessor as V122HatchPkgAccessor,
+ )
+
cls.register_accessor("1.2.2", V122HatchPkgAccessor)
except ImportError as e:
logger.warning(f"Could not load v1.2.2 accessor: {e}")
try:
- from hatch_validator.package.v2_0_0.accessor import HatchPkgAccessor as V200HatchPkgAccessor
+ from hatch_validator.package.v2_0_0.accessor import (
+ HatchPkgAccessor as V200HatchPkgAccessor,
+ )
+
cls.register_accessor("2.0.0", V200HatchPkgAccessor)
except ImportError as e:
logger.warning(f"Could not load v2.0.0 accessor: {e}")
@classmethod
- def create_accessor_chain(cls, target_version: Optional[str] = None) -> HatchPkgAccessor:
+ def create_accessor_chain(
+ cls, target_version: Optional[str] = None
+ ) -> HatchPkgAccessor:
"""Create appropriate accessor chain based on target version.
-
+
Creates a chain of accessors ordered from newest to oldest schema versions.
Each accessor in the chain can handle its specific version and delegate
to older versions for unchanged access concerns.
-
+
Args:
- target_version (str, optional): Specific schema version to target.
+ target_version (str, optional): Specific schema version to target.
If None, uses the latest available version. Defaults to None.
-
+
Returns:
HatchPkgAccessor: Head of the accessor chain
-
+
Raises:
ValueError: If the target version is not supported or no accessors are available
"""
@@ -108,8 +129,10 @@ def create_accessor_chain(cls, target_version: Optional[str] = None) -> HatchPkg
target_version = cls._version_order[0] # Latest version
elif target_version not in cls._accessor_registry:
- raise ValueError(f"Unsupported schema version: {target_version}. "
- f"Supported versions: {cls._version_order}")
+ raise ValueError(
+ f"Unsupported schema version: {target_version}. "
+ f"Supported versions: {cls._version_order}"
+ )
logger.info(f"Creating accessor chain for target version: {target_version}")
# Create chain starting from target version down to oldest
@@ -127,7 +150,9 @@ def create_accessor_chain(cls, target_version: Optional[str] = None) -> HatchPkg
# Link accessors (each points to the next older one)
for i in range(len(accessors) - 1):
accessors[i].set_next(accessors[i + 1])
- logger.debug(f"Linked accessor {chain_versions[i]} -> {chain_versions[i+1]}")
+ logger.debug(
+ f"Linked accessor {chain_versions[i]} -> {chain_versions[i+1]}"
+ )
head_accessor = accessors[0]
logger.info(f"Accessor chain created successfully, head: {target_version}")
diff --git a/hatch_validator/core/validation_context.py b/hatch_validator/core/validation_context.py
index 86aa5bb..7c8424e 100644
--- a/hatch_validator/core/validation_context.py
+++ b/hatch_validator/core/validation_context.py
@@ -10,7 +10,7 @@
class ValidationContext:
"""Context object that carries validation state through the validator chain.
-
+
This context provides a consistent interface for passing validation resources
and state between validators and strategies in the chain.
@@ -18,11 +18,16 @@ class ValidationContext:
registry data, and flags for local dependencies and schema updates. Additional
data can be stored and retrieved using the `set_data` and `get_data` methods.
"""
-
- def __init__(self, package_dir: Optional[Path] = None, registry_data: Optional[Dict] = None,
- allow_local_dependencies: bool = True, force_schema_update: bool = False):
+
+ def __init__(
+ self,
+ package_dir: Optional[Path] = None,
+ registry_data: Optional[Dict] = None,
+ allow_local_dependencies: bool = True,
+ force_schema_update: bool = False,
+ ):
"""Initialize validation context.
-
+
Args:
package_dir (Path, optional): Path to the package being validated. Defaults to None.
registry_data (Dict, optional): Registry data for dependency validation. Defaults to None.
@@ -34,23 +39,23 @@ def __init__(self, package_dir: Optional[Path] = None, registry_data: Optional[D
self.allow_local_dependencies = allow_local_dependencies
self.force_schema_update = force_schema_update
self.additional_data = {}
-
+
def set_data(self, key: str, value: Any) -> None:
"""Set additional data in the context.
-
+
Args:
key (str): Key for the data
value (Any): Value to store
"""
self.additional_data[key] = value
-
+
def get_data(self, key: str, default: Any = None) -> Any:
"""Get additional data from the context.
-
+
Args:
key (str): Key for the data
default (Any, optional): Default value if key not found. Defaults to None.
-
+
Returns:
Any: Value associated with the key or default
"""
diff --git a/hatch_validator/core/validation_strategy.py b/hatch_validator/core/validation_strategy.py
index 212ca19..672cbe0 100644
--- a/hatch_validator/core/validation_strategy.py
+++ b/hatch_validator/core/validation_strategy.py
@@ -9,34 +9,40 @@
from .validation_context import ValidationContext
+
class ValidationError(Exception):
"""Custom exception for validation errors."""
+
pass
+
class ValidationStrategy(ABC):
"""Base interface for all validation strategies.
-
+
This serves as a marker interface for validation strategies and provides
common functionality that all strategies might need.
"""
+
pass
class DependencyValidationStrategy(ValidationStrategy):
"""Strategy interface for validating package dependencies.
-
+
Different schema versions may have different dependency structures,
so this strategy allows for version-specific dependency validation logic.
"""
-
+
@abstractmethod
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies according to specific schema version.
-
+
Args:
metadata (Dict): Package metadata containing dependency information
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether dependency validation was successful
@@ -47,19 +53,21 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
class ToolsValidationStrategy(ValidationStrategy):
"""Strategy interface for validating tool declarations.
-
+
Validates that tools declared in metadata actually exist in the entry point file
and are properly accessible.
"""
-
+
@abstractmethod
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate tools according to specific schema version.
-
+
Args:
metadata (Dict): Package metadata containing tool declarations
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether tool validation was successful
@@ -70,18 +78,20 @@ def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bo
class EntryPointValidationStrategy(ValidationStrategy):
"""Strategy interface for validating entry point files.
-
+
Validates that the entry point specified in metadata exists and is accessible.
"""
-
+
@abstractmethod
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate entry point according to specific schema version.
-
+
Args:
metadata (Dict): Package metadata containing entry point information
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether entry point validation was successful
@@ -92,19 +102,21 @@ def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tu
class SchemaValidationStrategy(ValidationStrategy):
"""Strategy interface for validating metadata against JSON schema.
-
+
Validates that the package metadata conforms to the JSON schema for
the specific schema version.
"""
-
+
@abstractmethod
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against JSON schema for specific version.
-
+
Args:
metadata (Dict): Package metadata to validate against schema
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether schema validation was successful
diff --git a/hatch_validator/core/validator_base.py b/hatch_validator/core/validator_base.py
index 0627ace..4dbf45f 100644
--- a/hatch_validator/core/validator_base.py
+++ b/hatch_validator/core/validator_base.py
@@ -12,130 +12,146 @@
class Validator(ABC):
"""Abstract base class for validators in the Chain of Responsibility pattern.
-
+
Each validator in the chain can either handle the validation for a specific
version or pass the request to the next validator in the chain. The base class
provides default delegation methods for each specific validation concern,
allowing concrete validators to override only the validation concerns that
have changed in their version.
"""
-
- def __init__(self, next_validator: Optional['Validator'] = None):
+
+ def __init__(self, next_validator: Optional["Validator"] = None):
"""Initialize the validator with an optional next validator in the chain.
-
+
Args:
next_validator (Validator, optional): Next validator in the chain. Defaults to None.
"""
self.next_validator = next_validator
-
+
@abstractmethod
- def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata or delegate to next validator in chain.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources and state
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
- List[str]: List of validation errors
"""
pass
-
+
@abstractmethod
def can_handle(self, schema_version: str) -> bool:
"""Determine if this validator can handle the given schema version.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if this validator can handle the schema version
"""
pass
-
- def set_next(self, validator: 'Validator') -> 'Validator':
+
+ def set_next(self, validator: "Validator") -> "Validator":
"""Set the next validator in the chain.
-
+
Args:
validator (Validator): Next validator to set
-
+
Returns:
Validator: The validator that was set as next
"""
self.next_validator = validator
return validator
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against schema.
-
+
Default behavior: delegate to next validator in chain if available.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
-
+
Raises:
NotImplementedError: If there is no next validator and this method is not overridden
"""
if self.next_validator:
return self.next_validator.validate_schema(metadata, context)
- raise NotImplementedError("Schema validation not implemented for this validator")
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ raise NotImplementedError(
+ "Schema validation not implemented for this validator"
+ )
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate package dependencies.
-
+
Default behavior: delegate to next validator in chain if available.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
-
+
Raises:
NotImplementedError: If there is no next validator and this method is not overridden
"""
if self.next_validator:
return self.next_validator.validate_dependencies(metadata, context)
- raise NotImplementedError("Dependency validation not implemented for this validator")
-
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ raise NotImplementedError(
+ "Dependency validation not implemented for this validator"
+ )
+
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate package entry point.
-
+
Default behavior: delegate to next validator in chain if available.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
-
+
Raises:
NotImplementedError: If there is no next validator and this method is not overridden
"""
if self.next_validator:
return self.next_validator.validate_entry_point(metadata, context)
- raise NotImplementedError("Entry point validation not implemented for this validator")
-
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ raise NotImplementedError(
+ "Entry point validation not implemented for this validator"
+ )
+
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate package tools.
-
+
Default behavior: delegate to next validator in chain if available.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
-
+
Raises:
NotImplementedError: If there is no next validator and this method is not overridden
"""
diff --git a/hatch_validator/core/validator_factory.py b/hatch_validator/core/validator_factory.py
index 55e9dca..dc26bcb 100644
--- a/hatch_validator/core/validator_factory.py
+++ b/hatch_validator/core/validator_factory.py
@@ -14,21 +14,21 @@
class ValidatorFactory:
"""Factory class for creating schema validator chains.
-
+
This factory creates the appropriate validator chain based on the target
schema version, setting up the Chain of Responsibility pattern correctly.
The factory maintains a registry of available validators and constructs
chains that enable proper delegation between versions.
"""
-
+
# Registry of available validator versions (newest to oldest)
_validator_registry: Dict[str, Type[Validator]] = {}
_version_order: List[str] = []
-
+
@classmethod
def register_validator(cls, version: str, validator_class: Type[Validator]) -> None:
"""Register a validator class for a specific schema version.
-
+
Args:
version (str): Schema version (e.g., "1.1.0", "1.2.0")
validator_class (Type[Validator]): Validator class for the version
@@ -39,87 +39,104 @@ def register_validator(cls, version: str, validator_class: Type[Validator]) -> N
# Sort versions in descending order (newest first)
cls._version_order.sort(reverse=True)
logger.debug(f"Registered validator for version {version}")
-
+
@classmethod
def get_supported_versions(cls) -> List[str]:
"""Get list of supported schema versions.
-
+
Returns:
List[str]: List of supported versions ordered newest to oldest
"""
cls._ensure_validators_loaded()
return cls._version_order.copy()
-
+
@classmethod
def _ensure_validators_loaded(cls) -> None:
"""Ensure all available validators are loaded and registered."""
if not cls._validator_registry:
# Import and register available validators
try:
- from hatch_validator.package.v1_1_0.validator import Validator as V110Validator
+ from hatch_validator.package.v1_1_0.validator import (
+ Validator as V110Validator,
+ )
+
cls.register_validator("1.1.0", V110Validator)
except ImportError as e:
logger.warning(f"Could not load v1.1.0 validator: {e}")
- # Future versions can be added here:
+ # Future versions can be added here:
try:
- from hatch_validator.package.v1_2_0.validator import Validator as V120Validator
+ from hatch_validator.package.v1_2_0.validator import (
+ Validator as V120Validator,
+ )
+
cls.register_validator("1.2.0", V120Validator)
except ImportError as e:
logger.warning(f"Could not load v1.2.0 validator: {e}")
try:
- from hatch_validator.package.v1_2_1.validator import Validator as V121Validator
+ from hatch_validator.package.v1_2_1.validator import (
+ Validator as V121Validator,
+ )
+
cls.register_validator("1.2.1", V121Validator)
except ImportError as e:
logger.warning(f"Could not load v1.2.1 validator: {e}")
try:
- from hatch_validator.package.v1_2_2.validator import Validator as V122Validator
+ from hatch_validator.package.v1_2_2.validator import (
+ Validator as V122Validator,
+ )
+
cls.register_validator("1.2.2", V122Validator)
except ImportError as e:
logger.warning(f"Could not load v1.2.2 validator: {e}")
try:
- from hatch_validator.package.v2_0_0.validator import Validator as V200Validator
+ from hatch_validator.package.v2_0_0.validator import (
+ Validator as V200Validator,
+ )
+
cls.register_validator("2.0.0", V200Validator)
except ImportError as e:
logger.warning(f"Could not load v2.0.0 validator: {e}")
-
+
@classmethod
def create_validator_chain(cls, target_version: Optional[str] = None) -> Validator:
"""Create appropriate validator chain based on target version.
-
+
Creates a chain of validators ordered from newest to oldest schema versions.
Each validator in the chain can handle its specific version and delegate
to older versions for unchanged validation concerns.
-
+
Args:
- target_version (str, optional): Specific schema version to target.
+ target_version (str, optional): Specific schema version to target.
If None, uses the latest available version. Defaults to None.
-
+
Returns:
Validator: Head of the validator chain
-
+
Raises:
ValueError: If the target version is not supported or no validators are available
"""
cls._ensure_validators_loaded()
-
+
if not cls._validator_registry:
raise ValueError("No validators available")
-
+
# Determine target version
if target_version is None:
target_version = cls._version_order[0] # Latest version
elif target_version not in cls._validator_registry:
- raise ValueError(f"Unsupported schema version: {target_version}. "
- f"Supported versions: {cls._version_order}")
-
+ raise ValueError(
+ f"Unsupported schema version: {target_version}. "
+ f"Supported versions: {cls._version_order}"
+ )
+
logger.info(f"Creating validator chain for target version: {target_version}")
- # Create chain starting from target version down to oldest
+ # Create chain starting from target version down to oldest
target_index = cls._version_order.index(target_version)
chain_versions = cls._version_order[target_index:]
-
+
# Create validators in order (newest to oldest)
validators = []
for version in chain_versions:
@@ -127,12 +144,14 @@ def create_validator_chain(cls, target_version: Optional[str] = None) -> Validat
validator = validator_class()
validators.append(validator)
logger.debug(f"Created validator for version {version}")
-
+
# Link validators (each points to the next older one)
for i in range(len(validators) - 1):
validators[i].set_next(validators[i + 1])
- logger.debug(f"Linked validator {chain_versions[i]} -> {chain_versions[i+1]}")
-
+ logger.debug(
+ f"Linked validator {chain_versions[i]} -> {chain_versions[i+1]}"
+ )
+
head_validator = validators[0]
logger.info(f"Validator chain created successfully, head: {target_version}")
return head_validator
diff --git a/hatch_validator/package/package_service.py b/hatch_validator/package/package_service.py
index 5b2ad16..b8e09b7 100644
--- a/hatch_validator/package/package_service.py
+++ b/hatch_validator/package/package_service.py
@@ -6,13 +6,14 @@
import logging
from pathlib import Path
-from typing import Optional, Dict, Any, List
+from typing import Optional, Dict, Any
from hatch_validator.core.pkg_accessor_factory import HatchPkgAccessorFactory
from hatch_validator.core.pkg_accessor_base import HatchPkgAccessor
logger = logging.getLogger("hatch.package_service")
+
class PackageService:
"""Service for package metadata operations.
@@ -21,6 +22,7 @@ class PackageService:
This service uses the accessor chain pattern to handle different
package schema versions automatically.
"""
+
def __init__(self, metadata: Optional[Dict[str, Any]] = None):
"""Initialize the package service.
@@ -42,15 +44,19 @@ def load_metadata(self, metadata: Dict[str, Any]) -> None:
ValueError: If no accessor can handle the package metadata.
"""
self._metadata = metadata
- schema_version = metadata.get("hatch_schema_version") or metadata.get("package_schema_version")
+ schema_version = metadata.get("hatch_schema_version") or metadata.get(
+ "package_schema_version"
+ )
if not schema_version:
- raise ValueError("Missing schema version in metadata. Expected 'hatch_schema_version' or 'package_schema_version'.")
-
+ raise ValueError(
+ "Missing schema version in metadata. Expected 'hatch_schema_version' or 'package_schema_version'."
+ )
+
self._accessor = HatchPkgAccessorFactory.create_accessor_chain(schema_version)
if not self._accessor:
raise ValueError(f"No accessor found for schema version: {schema_version}")
-
+
logger.debug(f"Loaded package metadata with schema version: {schema_version}")
def is_loaded(self) -> bool:
@@ -92,7 +98,9 @@ def get_dependencies(self) -> Dict[str, Any]:
raise ValueError("Package metadata is not loaded.")
return self._accessor.get_dependencies(self._metadata)
- def is_local_dependency(self, dep: Dict[str, Any], root_dir: Optional[Path] = None) -> bool:
+ def is_local_dependency(
+ self, dep: Dict[str, Any], root_dir: Optional[Path] = None
+ ) -> bool:
"""Check if a dependency is local.
Args:
diff --git a/hatch_validator/package/v1_1_0/accessor.py b/hatch_validator/package/v1_1_0/accessor.py
index de90f38..c455922 100644
--- a/hatch_validator/package/v1_1_0/accessor.py
+++ b/hatch_validator/package/v1_1_0/accessor.py
@@ -1,13 +1,17 @@
from pathlib import Path
from typing import Optional
-from hatch_validator.core.pkg_accessor_base import HatchPkgAccessor as HatchPkgAccessorBase
+from hatch_validator.core.pkg_accessor_base import (
+ HatchPkgAccessor as HatchPkgAccessorBase,
+)
+
class HatchPkgAccessor(HatchPkgAccessorBase):
"""Metadata accessor for Hatch package schema version 1.1.0.
Adapts access to metadata fields for the v1.1.0 schema structure.
"""
+
def can_handle(self, schema_version: str) -> bool:
"""Check if this accessor can handle schema version 1.1.0.
@@ -27,8 +31,8 @@ def get_dependencies(self, metadata):
dict: Dict with 'hatch' and 'python' keys for dependencies
"""
return {
- 'hatch': metadata.get('hatch_dependencies', []),
- 'python': metadata.get('python_dependencies', [])
+ "hatch": metadata.get("hatch_dependencies", []),
+ "python": metadata.get("python_dependencies", []),
}
def is_local_dependency(self, dep, root_dir: Optional[Path] = None):
@@ -40,11 +44,11 @@ def is_local_dependency(self, dep, root_dir: Optional[Path] = None):
Returns:
bool: True if dependency type is 'local'
"""
- internal_type = dep.get('type')
- return internal_type.get('type') == 'local'
+ internal_type = dep.get("type")
+ return internal_type.get("type") == "local"
def get_entry_point(self, metadata):
- return metadata.get('entry_point')
+ return metadata.get("entry_point")
def get_mcp_entry_point(self, metadata):
"""Until v1.2.1, MCP entry point is the same as the main entry point.
@@ -71,40 +75,40 @@ def get_hatch_mcp_entry_point(self, metadata):
return self.get_entry_point(metadata)
def get_tools(self, metadata):
- return metadata.get('tools', [])
+ return metadata.get("tools", [])
def get_package_schema_version(self, metadata):
- return metadata.get('package_schema_version')
+ return metadata.get("package_schema_version")
def get_name(self, metadata):
- return metadata.get('name')
+ return metadata.get("name")
def get_version(self, metadata):
- return metadata.get('version')
+ return metadata.get("version")
def get_description(self, metadata):
- return metadata.get('description')
+ return metadata.get("description")
def get_tags(self, metadata):
- return metadata.get('tags', [])
+ return metadata.get("tags", [])
def get_author(self, metadata):
- return metadata.get('author')
+ return metadata.get("author")
def get_contributors(self, metadata):
- return metadata.get('contributors', [])
+ return metadata.get("contributors", [])
def get_license(self, metadata):
- return metadata.get('license')
+ return metadata.get("license")
def get_repository(self, metadata):
- return metadata.get('repository')
+ return metadata.get("repository")
def get_documentation(self, metadata):
- return metadata.get('documentation')
+ return metadata.get("documentation")
def get_compatibility(self, metadata):
- return metadata.get('compatibility', {})
+ return metadata.get("compatibility", {})
def get_citations(self, metadata):
- return metadata.get('citations', {})
+ return metadata.get("citations", {})
diff --git a/hatch_validator/package/v1_1_0/dependency_validation.py b/hatch_validator/package/v1_1_0/dependency_validation.py
index c5a5e96..20acdc8 100644
--- a/hatch_validator/package/v1_1_0/dependency_validation.py
+++ b/hatch_validator/package/v1_1_0/dependency_validation.py
@@ -3,16 +3,17 @@
This module implements dependency validation using the decoupled utility modules
for graph operations, version constraints, and registry interactions.
"""
-import json
import logging
-from typing import Dict, List, Tuple, Optional, Set
+from typing import Dict, List, Tuple
from pathlib import Path
-from hatch_validator.core.validation_strategy import DependencyValidationStrategy, ValidationError
+from hatch_validator.core.validation_strategy import (
+ DependencyValidationStrategy,
+)
from hatch_validator.core.validation_context import ValidationContext
from hatch_validator.utils.hatch_dependency_graph import HatchDependencyGraphBuilder
from hatch_validator.utils.version_utils import VersionConstraintValidator
-from hatch_validator.registry.registry_service import RegistryService, RegistryError
+from hatch_validator.registry.registry_service import RegistryService
from hatch_validator.package.package_service import PackageService
logger = logging.getLogger("hatch.dependency_validation_v1_1_0")
@@ -21,32 +22,35 @@
class DependencyValidation(DependencyValidationStrategy):
"""Strategy for validating dependencies according to v1.1.0 schema using utility modules.
-
+
This implementation uses the decoupled utility modules for:
- Graph operations (cycle detection, topological sorting)
- Version constraint validation
- Registry interactions
"""
+
def __init__(self):
"""Initialize the dependency validation strategy."""
self.version_validator = VersionConstraintValidator()
self.registry_service = None
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies according to v1.1.0 schema using utility modules.
-
+
In v1.1.0, dependencies are stored in separate arrays:
- hatch_dependencies: Array of Hatch package dependencies
- python_dependencies: Array of Python package dependencies
-
+
Args:
metadata (Dict): Package metadata containing dependency information
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether dependency validation was successful
- - List[str]: List of dependency validation errors """
+ - List[str]: List of dependency validation errors"""
# Initialize package service from the context if available
package_service = context.get_data("package_service", None)
if package_service is None:
@@ -55,45 +59,54 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
# Store package service for use in helper methods
self.package_service = package_service
-
+
# Initialize registry service from the context if available
# Get registry data from context
registry_data = context.registry_data
registry_service = context.get_data("registry_service", None)
-
+
# Check if registry data is missing
if registry_data is None:
logger.error("No registry data available for dependency validation")
return False, ["No registry data available for dependency validation"]
-
+
if registry_service is None:
# Create a registry service with the provided data
registry_service = RegistryService(registry_data)
-
+
# Store registry service for use in helper methods
self.registry_service = registry_service
-
+
errors = []
is_valid = True
-
-
+
# Use package_service for all metadata access
deps = package_service.get_dependencies()
- hatch_dependencies = deps.get('hatch', [])
- python_dependencies = deps.get('python', [])
-
- logger.debug(f"Validating v1.1.0 dependencies - Hatch: {len(hatch_dependencies)}, Python: {len(python_dependencies)}")
-
+ hatch_dependencies = deps.get("hatch", [])
+ python_dependencies = deps.get("python", [])
+
+ logger.debug(
+ f"Validating v1.1.0 dependencies - Hatch: {len(hatch_dependencies)}, Python: {len(python_dependencies)}"
+ )
+
# Early check for local dependencies if they're not allowed
if not context.allow_local_dependencies:
- local_deps = [dep for dep in hatch_dependencies if package_service.is_local_dependency(dep)]
+ local_deps = [
+ dep
+ for dep in hatch_dependencies
+ if package_service.is_local_dependency(dep)
+ ]
if local_deps:
for dep in local_deps:
- logger.error(f"Local dependency '{dep.get('name')}' not allowed in this context")
- errors.append(f"Local dependency '{dep.get('name')}' not allowed in this context")
+ logger.error(
+ f"Local dependency '{dep.get('name')}' not allowed in this context"
+ )
+ errors.append(
+ f"Local dependency '{dep.get('name')}' not allowed in this context"
+ )
is_valid = False
return is_valid, errors
-
+
# Validate Hatch dependencies
if hatch_dependencies:
hatch_valid, hatch_errors = self._validate_hatch_dependencies(
@@ -102,7 +115,7 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
if not hatch_valid:
errors.extend(hatch_errors)
is_valid = False
-
+
# Validate Python dependencies format
if python_dependencies:
python_valid, python_errors = self._validate_python_dependencies(
@@ -111,39 +124,42 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
if not python_valid:
errors.extend(python_errors)
is_valid = False
-
+
return is_valid, errors
-
- def _validate_hatch_dependencies(self, hatch_dependencies: List[Dict],
- context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_hatch_dependencies(
+ self, hatch_dependencies: List[Dict], context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate Hatch package dependencies.
-
+
Args:
hatch_dependencies (List[Dict]): List of Hatch dependency definitions
context (ValidationContext): Validation context
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
is_valid = True
-
+
# Step 1: Validate individual dependencies
for dep in hatch_dependencies:
dep_valid, dep_errors = self._validate_single_hatch_dependency(dep, context)
if not dep_valid:
errors.extend(dep_errors)
is_valid = False
-
+
# Step 2: Build dependency graph and check for cycles
try:
hatch_dep_graph_builder = HatchDependencyGraphBuilder(
package_service=self.package_service,
- registry_service=self.registry_service
+ registry_service=self.registry_service,
+ )
+ dependency_graph = hatch_dep_graph_builder.build_dependency_graph(
+ hatch_dependencies, context
)
- dependency_graph = hatch_dep_graph_builder.build_dependency_graph(hatch_dependencies, context)
has_cycles, cycles = dependency_graph.detect_cycles()
-
+
if has_cycles:
for cycle in cycles:
cycle_str = " -> ".join(cycle)
@@ -155,115 +171,131 @@ def _validate_hatch_dependencies(self, hatch_dependencies: List[Dict],
logger.error(f"Error building dependency graph: {e}")
errors.append(f"Error analyzing dependency graph: {e}")
is_valid = False
-
+
return is_valid, errors
-
- def _validate_single_hatch_dependency(self, dep: Dict,
- context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_single_hatch_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a single Hatch dependency.
-
+
Args:
dep (Dict): Dependency definition
context (ValidationContext): Validation context
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
is_valid = True
-
+
# Validate required fields
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
if not dep_name:
errors.append("Hatch dependency missing name")
return False, errors
-
+
# Validate version constraint if present
- version_constraint = dep.get('version_constraint')
+ version_constraint = dep.get("version_constraint")
if version_constraint:
- constraint_valid, constraint_error = self.version_validator.validate_constraint(version_constraint)
+ (
+ constraint_valid,
+ constraint_error,
+ ) = self.version_validator.validate_constraint(version_constraint)
if not constraint_valid:
- errors.append(f"Invalid version constraint for '{dep_name}': {constraint_error}")
+ errors.append(
+ f"Invalid version constraint for '{dep_name}': {constraint_error}"
+ )
is_valid = False
-
+
if self.package_service.is_local_dependency(dep):
local_valid, local_errors = self._validate_local_dependency(dep, context)
if not local_valid:
errors.extend(local_errors)
is_valid = False
else:
- registry_valid, registry_errors = self._validate_registry_dependency(dep, context)
+ registry_valid, registry_errors = self._validate_registry_dependency(
+ dep, context
+ )
if not registry_valid:
errors.extend(registry_errors)
is_valid = False
-
+
return is_valid, errors
-
- def _validate_local_dependency(self, dep: Dict,
- context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_local_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a local file dependency.
-
+
Args:
dep (Dict): Local dependency definition
context (ValidationContext): Validation context
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
is_valid = True
-
- dep_name = dep.get('name')
- dep_type = dep.get('type', {})
- uri = dep_type.get('uri')
-
+
+ dep_name = dep.get("name")
+ dep_type = dep.get("type", {})
+ uri = dep_type.get("uri")
+
# Validate URI
if not uri:
errors.append(f"Local dependency '{dep_name}' missing URI")
return False, errors
-
- if not uri.startswith('file://'):
- errors.append(f"Local dependency '{dep_name}' URI must start with 'file://'")
+
+ if not uri.startswith("file://"):
+ errors.append(
+ f"Local dependency '{dep_name}' URI must start with 'file://'"
+ )
is_valid = False
else:
# Extract and validate path
path_str = uri[7:] # Remove "file://"
path = Path(path_str)
-
+
# Resolve relative paths
if context.package_dir and not path.is_absolute():
path = context.package_dir / path
-
+
# Check if path exists
if not path.exists() or not path.is_dir():
- errors.append(f"Local dependency '{dep_name}' path does not exist: {path}")
+ errors.append(
+ f"Local dependency '{dep_name}' path does not exist: {path}"
+ )
is_valid = False
else:
# Check for metadata file
metadata_path = path / "hatch_metadata.json"
if not metadata_path.exists():
- errors.append(f"Local dependency '{dep_name}' missing hatch_metadata.json: {metadata_path}")
+ errors.append(
+ f"Local dependency '{dep_name}' missing hatch_metadata.json: {metadata_path}"
+ )
is_valid = False
-
+
return is_valid, errors
-
- def _validate_registry_dependency(self, dep: Dict,
- context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_registry_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a registry dependency.
-
+
Args:
dep (Dict): Registry dependency definition
context (ValidationContext): Validation context
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
is_valid = True
-
- dep_name = dep.get('name')
- version_constraint = dep.get('version_constraint')
-
+
+ dep_name = dep.get("name")
+ version_constraint = dep.get("version_constraint")
+
# Check if package exists in registry
exists, error = self.registry_service.validate_package_exists(dep_name)
if not exists:
@@ -271,38 +303,51 @@ def _validate_registry_dependency(self, dep: Dict,
is_valid = False
elif version_constraint:
# Check if the available version satisfies the constraint
- version_compatible, version_error = self.registry_service.validate_version_compatibility(
- dep_name, version_constraint)
+ (
+ version_compatible,
+ version_error,
+ ) = self.registry_service.validate_version_compatibility(
+ dep_name, version_constraint
+ )
if not version_compatible:
- errors.append(f"No version of '{dep_name}' satisfies constraint {version_constraint}: {version_error}")
+ errors.append(
+ f"No version of '{dep_name}' satisfies constraint {version_constraint}: {version_error}"
+ )
is_valid = False
-
+
return is_valid, errors
-
- def _validate_python_dependencies(self, python_dependencies: List[Dict]) -> Tuple[bool, List[str]]:
+
+ def _validate_python_dependencies(
+ self, python_dependencies: List[Dict]
+ ) -> Tuple[bool, List[str]]:
"""Validate Python package dependencies format.
-
+
Args:
python_dependencies (List[Dict]): List of Python dependency definitions
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
is_valid = True
-
+
for dep in python_dependencies:
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
if not dep_name:
errors.append("Python dependency missing name")
is_valid = False
continue
-
- version_constraint = dep.get('version_constraint')
+
+ version_constraint = dep.get("version_constraint")
if version_constraint:
- constraint_valid, constraint_error = self.version_validator.validate_constraint(version_constraint)
+ (
+ constraint_valid,
+ constraint_error,
+ ) = self.version_validator.validate_constraint(version_constraint)
if not constraint_valid:
- errors.append(f"Invalid version constraint for Python dependency '{dep_name}': {constraint_error}")
+ errors.append(
+ f"Invalid version constraint for Python dependency '{dep_name}': {constraint_error}"
+ )
is_valid = False
-
+
return is_valid, errors
diff --git a/hatch_validator/package/v1_1_0/entry_point_validation.py b/hatch_validator/package/v1_1_0/entry_point_validation.py
index 482c3f0..76253c8 100644
--- a/hatch_validator/package/v1_1_0/entry_point_validation.py
+++ b/hatch_validator/package/v1_1_0/entry_point_validation.py
@@ -8,16 +8,19 @@
logger = logging.getLogger("hatch_validator.schemas.v1_1_0.entry_point_validation")
logger.setLevel(logging.INFO)
+
class EntryPointValidation(EntryPointValidationStrategy):
"""Strategy for validating entry point files for v1.1.0."""
-
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate entry point according to v1.1.0 schema.
-
+
Args:
metadata (Dict): Package metadata containing entry point information
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether entry point validation was successful
@@ -31,18 +34,18 @@ def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tu
if not entry_point:
logger.error("No entry_point specified in metadata")
return False, ["No entry_point specified in metadata"]
-
+
if not context.package_dir:
logger.error("Package directory not provided for entry point validation")
return False, ["Package directory not provided for entry point validation"]
-
+
entry_path = context.package_dir / entry_point
if not entry_path.exists():
logger.error(f"Entry point file '{entry_point}' does not exist")
return False, [f"Entry point file '{entry_point}' does not exist"]
-
+
if not entry_path.is_file():
logger.error(f"Entry point '{entry_point}' is not a file")
return False, [f"Entry point '{entry_point}' is not a file"]
-
- return True, []
\ No newline at end of file
+
+ return True, []
diff --git a/hatch_validator/package/v1_1_0/schema_validation.py b/hatch_validator/package/v1_1_0/schema_validation.py
index 8a54499..8dccb3e 100644
--- a/hatch_validator/package/v1_1_0/schema_validation.py
+++ b/hatch_validator/package/v1_1_0/schema_validation.py
@@ -10,6 +10,7 @@
logger = logging.getLogger("hatch_validator.schemas.v1_1_0.schema_validation")
logger.setLevel(logging.INFO)
+
class SchemaValidation(SchemaValidationStrategy):
"""Strategy for validating metadata against JSON schema.
@@ -17,8 +18,10 @@ class SchemaValidation(SchemaValidationStrategy):
(`package_schema_version`), this can be used for any schema version
as long as the schema is available.
"""
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against v1.1.0 schema.
In fact, given that the function is retrieving the schema version from
@@ -28,7 +31,7 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
Args:
metadata (Dict): Package metadata to validate against schema
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether schema validation was successful
@@ -39,18 +42,22 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
if package_service is None:
package_service = PackageService(metadata)
schema_version = package_service.get_field("package_schema_version")
- schema = get_package_schema(version=schema_version, force_update=context.force_schema_update)
+ schema = get_package_schema(
+ version=schema_version, force_update=context.force_schema_update
+ )
if not schema:
logger.error(f"Failed to load package schema version {schema_version}")
- return False, [f"Failed to load package schema version {schema_version}"]
+ return False, [
+ f"Failed to load package schema version {schema_version}"
+ ]
# Validate against schema
jsonschema.validate(instance=metadata, schema=schema)
return True, []
-
+
except jsonschema.exceptions.ValidationError as e:
logger.error(f"Schema validation error: {e.message}")
return False, [f"Schema validation error: {e.message}"]
except Exception as e:
logger.error(f"Error during schema validation: {str(e)}")
- return False, [f"Error during schema validation: {str(e)}"]
\ No newline at end of file
+ return False, [f"Error during schema validation: {str(e)}"]
diff --git a/hatch_validator/package/v1_1_0/tools_validation.py b/hatch_validator/package/v1_1_0/tools_validation.py
index 42b1489..056a00f 100644
--- a/hatch_validator/package/v1_1_0/tools_validation.py
+++ b/hatch_validator/package/v1_1_0/tools_validation.py
@@ -9,16 +9,19 @@
logger = logging.getLogger("hatch_validator.schemas.v1_1_0.tools_validation")
logger.setLevel(logging.INFO)
+
class ToolsValidation(ToolsValidationStrategy):
"""Strategy for validating tool declarations for v1.1.0."""
-
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate tools according to v1.1.0 schema.
-
+
Args:
metadata (Dict): Package metadata containing tool declarations
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether tool validation was successful
@@ -33,47 +36,52 @@ def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bo
entry_point = package_service.get_entry_point()
if not entry_point:
return False, ["Entry point required for tool validation"]
-
+
if not context.package_dir:
return False, ["Package directory not provided for tool validation"]
-
+
errors = []
all_exist = True
-
+
# Parse the entry point file to get function names
try:
module_path = context.package_dir / entry_point
- with open(module_path, 'r', encoding='utf-8') as file:
+ with open(module_path, "r", encoding="utf-8") as file:
try:
tree = ast.parse(file.read(), filename=str(module_path))
-
+
# Get all function names defined in the file
- function_names = [node.name for node in ast.walk(tree)
- if isinstance(node, ast.FunctionDef)]
-
+ function_names = [
+ node.name
+ for node in ast.walk(tree)
+ if isinstance(node, ast.FunctionDef)
+ ]
+
logger.debug(f"Found functions in {entry_point}: {function_names}")
-
+
# Check for each tool
for tool in tools:
- tool_name = tool.get('name')
+ tool_name = tool.get("name")
if not tool_name:
logger.error(f"Tool metadata missing name: {tool}")
errors.append("Tool missing name in metadata")
all_exist = False
continue
-
+
# Check if the tool function is defined in the file
if tool_name not in function_names:
logger.error(f"Tool '{tool_name}' not found in entry point")
- errors.append(f"Tool '{tool_name}' not found in entry point")
+ errors.append(
+ f"Tool '{tool_name}' not found in entry point"
+ )
all_exist = False
-
+
except SyntaxError as e:
logger.error(f"Syntax error in {entry_point}: {e}")
return False, [f"Syntax error in {entry_point}: {e}"]
-
+
except Exception as e:
logger.error(f"Error validating tools: {str(e)}")
return False, [f"Error validating tools: {str(e)}"]
-
- return all_exist, errors
\ No newline at end of file
+
+ return all_exist, errors
diff --git a/hatch_validator/package/v1_1_0/validator.py b/hatch_validator/package/v1_1_0/validator.py
index da6dc60..53bb8ef 100644
--- a/hatch_validator/package/v1_1_0/validator.py
+++ b/hatch_validator/package/v1_1_0/validator.py
@@ -21,21 +21,23 @@
logger = logging.getLogger("hatch.schema.v1_1_0.validator")
logger.setLevel(logging.DEBUG)
+
class Validator(ValidatorBase):
"""Validator for packages using schema version 1.1.0
-
+
Schema version 1.1.0 includes hatch_dependencies and python_dependencies
as separate arrays.
As the end of the validator chain, this implementation provides concrete
implementations for all validation methods.
-
+
Note:
This validator is the first to be implemented since the introduction
of the chain of responsibility pattern, so it is the last in the chain.
"""
+
def __init__(self, next_validator=None):
"""Initialize the v1.1.0 validator with strategies.
-
+
Args:
next_validator (Validator, optional): Next validator in chain. Defaults to None.
"""
@@ -44,43 +46,45 @@ def __init__(self, next_validator=None):
self.dependency_strategy = DependencyValidation()
self.entry_point_strategy = EntryPointValidation()
self.tools_strategy = ToolsValidation()
-
+
def can_handle(self, schema_version: str) -> bool:
"""Determine if this validator can handle the given schema version.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if this validator can handle the schema version
"""
return schema_version == "1.1.0"
-
- def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validation entry point for packages following schema v1.1.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources and state
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
- List[str]: List of validation errors
"""
schema_version = metadata.get("package_schema_version", "")
-
+
# Check if we can handle this version
if not self.can_handle(schema_version):
if self.next_validator:
return self.next_validator.validate(metadata, context)
return False, [f"Unsupported schema version: {schema_version}"]
-
- logger.info(f"Validating package metadata using v1.1.0 validator")
-
+
+ logger.info("Validating package metadata using v1.1.0 validator")
+
all_errors = []
is_valid = True
-
+
# 1. Validate against JSON schema
schema_valid, schema_errors = self.validate_schema(metadata, context)
if not schema_valid:
@@ -88,75 +92,83 @@ def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, Li
is_valid = False
# If schema validation fails, don't continue with other validations
return is_valid, all_errors
-
+
# 2. Validate dependencies
deps_valid, deps_errors = self.validate_dependencies(metadata, context)
if not deps_valid:
all_errors.extend(deps_errors)
is_valid = False
-
+
# 3. Validate entry point (if package directory is provided)
if context.package_dir:
entry_valid, entry_errors = self.validate_entry_point(metadata, context)
if not entry_valid:
all_errors.extend(entry_errors)
is_valid = False
-
+
# 4. Validate tools (if entry point validation passed)
if entry_valid:
tools_valid, tools_errors = self.validate_tools(metadata, context)
if not tools_valid:
all_errors.extend(tools_errors)
is_valid = False
-
+
return is_valid, all_errors
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against schema for v1.1.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating schema for v1.1.0")
return self.schema_strategy.validate_schema(metadata, context)
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies for v1.1.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating dependencies for v1.1.0")
return self.dependency_strategy.validate_dependencies(metadata, context)
-
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate entry point for v1.1.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating entry point for v1.1.0")
return self.entry_point_strategy.validate_entry_point(metadata, context)
-
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate tools for v1.1.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
diff --git a/hatch_validator/package/v1_2_0/accessor.py b/hatch_validator/package/v1_2_0/accessor.py
index a74f0e8..a5553f6 100644
--- a/hatch_validator/package/v1_2_0/accessor.py
+++ b/hatch_validator/package/v1_2_0/accessor.py
@@ -1,18 +1,21 @@
-
import logging
from typing import Optional
from pathlib import Path
-from hatch_validator.core.pkg_accessor_base import HatchPkgAccessor as HatchPkgAccessorBase
+from hatch_validator.core.pkg_accessor_base import (
+ HatchPkgAccessor as HatchPkgAccessorBase,
+)
logger = logging.getLogger("hatch.package.v1_2_0.accessor")
logger.setLevel(logging.DEBUG)
+
class HatchPkgAccessor(HatchPkgAccessorBase):
"""Metadata accessor for Hatch package schema version 1.2.0.
Adapts access to metadata fields for the v1.2.0 schema structure.
"""
+
def can_handle(self, schema_version: str) -> bool:
"""Check if this accessor can handle schema version 1.2.0.
@@ -31,15 +34,15 @@ def get_dependencies(self, metadata):
Returns:
dict: Dict with 'hatch', 'python', 'system', and 'docker' keys for dependencies
"""
- deps = metadata.get('dependencies', {})
+ deps = metadata.get("dependencies", {})
return {
- 'hatch': deps.get('hatch', []),
- 'python': deps.get('python', []),
- 'system': deps.get('system', []),
- 'docker': deps.get('docker', [])
+ "hatch": deps.get("hatch", []),
+ "python": deps.get("python", []),
+ "system": deps.get("system", []),
+ "docker": deps.get("docker", []),
}
- def is_local_dependency(self, dep, root_dir : Optional[Path] = None):
+ def is_local_dependency(self, dep, root_dir: Optional[Path] = None):
"""Check if a Hatch dependency is local for v1.2.0.
Args:
@@ -50,7 +53,7 @@ def is_local_dependency(self, dep, root_dir : Optional[Path] = None):
"""
try:
# Attempt to convert the name to a Path object
- name_as_path = Path(dep.get('name', ''))
+ name_as_path = Path(dep.get("name", ""))
except ValueError:
# If conversion fails, it's not a valid path
return False
diff --git a/hatch_validator/package/v1_2_0/dependency_validation.py b/hatch_validator/package/v1_2_0/dependency_validation.py
index 7877890..8d60bce 100644
--- a/hatch_validator/package/v1_2_0/dependency_validation.py
+++ b/hatch_validator/package/v1_2_0/dependency_validation.py
@@ -7,14 +7,17 @@
import json
import logging
-from typing import Dict, List, Tuple, Optional, Set
+from typing import Dict, List, Tuple, Optional
from pathlib import Path
-from hatch_validator.core.validation_strategy import DependencyValidationStrategy, ValidationError
+from hatch_validator.core.validation_strategy import (
+ DependencyValidationStrategy,
+ ValidationError,
+)
from hatch_validator.core.validation_context import ValidationContext
from hatch_validator.utils.hatch_dependency_graph import HatchDependencyGraphBuilder
from hatch_validator.utils.version_utils import VersionConstraintValidator
-from hatch_validator.registry.registry_service import RegistryService, RegistryError
+from hatch_validator.registry.registry_service import RegistryService
from hatch_validator.package.package_service import PackageService
logger = logging.getLogger("hatch.dependency_validation_v1_2_0")
@@ -23,7 +26,7 @@
class DependencyValidation(DependencyValidationStrategy):
"""Strategy for validating dependencies according to v1.2.0 schema using utility modules.
-
+
This implementation uses the same validation logic as v1.1.0 but adapted for
the unified dependencies structure in v1.2.0:
- dependencies.hatch: Array of Hatch package dependencies
@@ -31,24 +34,27 @@ class DependencyValidation(DependencyValidationStrategy):
- dependencies.system: Array of System package dependencies (format validation only)
- dependencies.docker: Array of Docker image dependencies (format validation only)
"""
+
def __init__(self):
"""Initialize the dependency validation strategy."""
self.version_validator = VersionConstraintValidator()
- self.registry_service : Optional[RegistryService] = None
+ self.registry_service: Optional[RegistryService] = None
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies according to v1.2.0 schema using utility modules.
-
+
In v1.2.0, dependencies are stored in a unified object:
- dependencies.hatch: Array of Hatch package dependencies
- dependencies.python: Array of Python package dependencies
- - dependencies.system: Array of System package dependencies
+ - dependencies.system: Array of System package dependencies
- dependencies.docker: Array of Docker image dependencies
-
+
Args:
metadata (Dict): Package metadata containing dependency information
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether dependency validation was successful
@@ -68,16 +74,18 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
# Get registry data from context
registry_data = context.registry_data
registry_service = context.get_data("registry_service", None)
-
+
# Check if registry data is missing
if registry_data is None:
logger.error("No registry data available for dependency validation")
- raise ValidationError("No registry data available for dependency validation")
-
+ raise ValidationError(
+ "No registry data available for dependency validation"
+ )
+
if registry_service is None:
# Create a registry service with the provided data
registry_service = RegistryService(registry_data)
-
+
# Store registry service for use in helper methods
self.registry_service = registry_service
@@ -85,7 +93,7 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
is_valid = True
# Get dependencies from v1.2.0 unified format
dependencies = package_service.get_dependencies()
- hatch_dependencies = dependencies.get('hatch', [])
+ hatch_dependencies = dependencies.get("hatch", [])
# Validate Hatch dependencies
if hatch_dependencies:
@@ -100,43 +108,48 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
logger.error(f"Error during dependency validation: {e}")
errors.append(f"Error during dependency validation: {e}")
is_valid = False
-
+
logger.debug(f"Dependency validation result: {is_valid}, errors: {errors}")
return is_valid, errors
- def _validate_hatch_dependencies(self, hatch_dependencies: List[Dict],
- context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_hatch_dependencies(
+ self, hatch_dependencies: List[Dict], context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate Hatch package dependencies.
-
+
Args:
hatch_dependencies (List[Dict]): List of Hatch dependency definitions
context (ValidationContext): Validation context
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
is_valid = True
-
+
# Step 1: Validate individual dependencies
for dep in hatch_dependencies:
dep_valid, dep_errors = self._validate_single_hatch_dependency(dep, context)
if not dep_valid:
errors.extend(dep_errors)
is_valid = False
-
+
# Step 2: Build dependency graph and check for cycles
try:
hatch_dep_graph_builder = HatchDependencyGraphBuilder(
package_service=self.package_service,
- registry_service=self.registry_service
+ registry_service=self.registry_service,
+ )
+ dependency_graph = hatch_dep_graph_builder.build_dependency_graph(
+ hatch_dependencies, context
+ )
+ logger.debug(
+ f"Dependency graph: {json.dumps(dependency_graph.to_dict(), indent=2)}"
)
- dependency_graph = hatch_dep_graph_builder.build_dependency_graph(hatch_dependencies, context)
- logger.debug(f"Dependency graph: {json.dumps(dependency_graph.to_dict(), indent=2)}")
has_cycles, cycles = dependency_graph.detect_cycles()
-
+
if has_cycles:
for cycle in cycles:
cycle_str = " -> ".join(cycle)
@@ -148,7 +161,7 @@ def _validate_hatch_dependencies(self, hatch_dependencies: List[Dict],
logger.error(f"Error building dependency graph: {e}")
errors.append(f"Error analyzing dependency graph: {e}")
is_valid = False
-
+
return is_valid, errors
def _parse_hatch_dep_name(self, dep_name: str) -> Tuple[Optional[str], str]:
@@ -162,12 +175,14 @@ def _parse_hatch_dep_name(self, dep_name: str) -> Tuple[Optional[str], str]:
Returns:
Tuple[Optional[str], str]: (repo_name, package_name). repo_name is None if not present.
"""
- if ':' in dep_name:
- repo, pkg = dep_name.split(':', 1)
+ if ":" in dep_name:
+ repo, pkg = dep_name.split(":", 1)
return repo, pkg
return None, dep_name
-
- def _validate_single_hatch_dependency(self, dep: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_single_hatch_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a single Hatch dependency.
Args:
@@ -178,24 +193,31 @@ def _validate_single_hatch_dependency(self, dep: Dict, context: ValidationContex
"""
errors = []
is_valid = True
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
if not dep_name:
errors.append("Hatch dependency missing name")
return False, errors
-
+
# Validate version constraint if present
- version_constraint = dep.get('version_constraint')
+ version_constraint = dep.get("version_constraint")
if version_constraint:
- constraint_valid, constraint_error = self.version_validator.validate_constraint(version_constraint)
+ (
+ constraint_valid,
+ constraint_error,
+ ) = self.version_validator.validate_constraint(version_constraint)
if not constraint_valid:
- errors.append(f"Invalid version constraint for '{dep_name}': {constraint_error}")
+ errors.append(
+ f"Invalid version constraint for '{dep_name}': {constraint_error}"
+ )
is_valid = False
-
+
# Check if this looks like a local path, otherwise treat as remote
if self.package_service.is_local_dependency(dep, context.package_dir):
# Local dependency - check if allowed
if not context.allow_local_dependencies:
- errors.append(f"Local dependency '{dep_name}' not allowed in this context")
+ errors.append(
+ f"Local dependency '{dep_name}' not allowed in this context"
+ )
return False, errors
local_valid, local_errors = self._validate_local_dependency(dep, context)
if not local_valid:
@@ -203,14 +225,18 @@ def _validate_single_hatch_dependency(self, dep: Dict, context: ValidationContex
is_valid = False
else:
# Remote dependency - validate through registry
- registry_valid, registry_errors = self._validate_registry_dependency(dep, context)
+ registry_valid, registry_errors = self._validate_registry_dependency(
+ dep, context
+ )
if not registry_valid:
errors.extend(registry_errors)
is_valid = False
-
+
return is_valid, errors
-
- def _validate_local_dependency(self, dep: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_local_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a local file dependency.
Args:
@@ -220,31 +246,39 @@ def _validate_local_dependency(self, dep: Dict, context: ValidationContext) -> T
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
- dep_name = dep.get('name')
-
+ dep_name = dep.get("name")
+
# Resolve path
path = Path(dep_name)
if context.package_dir and not path.is_absolute():
path = context.package_dir / path
-
+
# Check if path exists as a file (not a directory)
if path.exists():
if not path.is_dir():
- errors.append(f"Local dependency '{dep_name}' path is not a directory: {path}")
+ errors.append(
+ f"Local dependency '{dep_name}' path is not a directory: {path}"
+ )
return False, errors
else:
- errors.append(f"Local dependency '{dep_name}' path is not a directory: {path}")
+ errors.append(
+ f"Local dependency '{dep_name}' path is not a directory: {path}"
+ )
return False, errors
-
+
# Check for metadata file
metadata_path = path / "hatch_metadata.json"
if not metadata_path.exists():
- errors.append(f"Local dependency '{dep_name}' missing hatch_metadata.json: {metadata_path}")
+ errors.append(
+ f"Local dependency '{dep_name}' missing hatch_metadata.json: {metadata_path}"
+ )
return False, errors
-
+
return True, []
-
- def _validate_registry_dependency(self, dep: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_registry_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a registry dependency.
Args:
@@ -254,33 +288,45 @@ def _validate_registry_dependency(self, dep: Dict, context: ValidationContext) -
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
- dep_name = dep.get('name')
- version_constraint = dep.get('version_constraint')
-
+ dep_name = dep.get("name")
+ version_constraint = dep.get("version_constraint")
+
# Parse repo and package name
repo, pkg = self._parse_hatch_dep_name(dep_name)
-
+
if repo:
# Check repo existence
if not self.registry_service.repository_exists(repo):
- errors.append(f"Repository '{repo}' not found in registry for dependency '{dep_name}'")
+ errors.append(
+ f"Repository '{repo}' not found in registry for dependency '{dep_name}'"
+ )
return False, errors
# Check package existence in repo
if not self.registry_service.package_exists(pkg, repo_name=repo):
- errors.append(f"Package '{pkg}' not found in repository '{repo}' for dependency '{dep_name}'")
+ errors.append(
+ f"Package '{pkg}' not found in repository '{repo}' for dependency '{dep_name}'"
+ )
return False, errors
else:
# No repo prefix, check package in any repo
if not self.registry_service.package_exists(pkg):
- errors.append(f"Registry dependency '{pkg}' not found in registry for dependency '{dep_name}'")
+ errors.append(
+ f"Registry dependency '{pkg}' not found in registry for dependency '{dep_name}'"
+ )
return False, errors
-
+
# Check version compatibility if constraint is specified
if version_constraint:
- version_compatible, version_error = self.registry_service.validate_version_compatibility(
- dep_name, version_constraint)
+ (
+ version_compatible,
+ version_error,
+ ) = self.registry_service.validate_version_compatibility(
+ dep_name, version_constraint
+ )
if not version_compatible:
- errors.append(f"No version of '{dep_name}' satisfies constraint {version_constraint}: {version_error}")
+ errors.append(
+ f"No version of '{dep_name}' satisfies constraint {version_constraint}: {version_error}"
+ )
return False, errors
-
+
return True, []
diff --git a/hatch_validator/package/v1_2_0/schema_validation.py b/hatch_validator/package/v1_2_0/schema_validation.py
index 94bffa3..cc17759 100644
--- a/hatch_validator/package/v1_2_0/schema_validation.py
+++ b/hatch_validator/package/v1_2_0/schema_validation.py
@@ -18,14 +18,16 @@
class SchemaValidation(SchemaValidationStrategy):
"""Strategy for validating metadata against JSON schema for v1.2.0."""
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against v1.2.0 schema.
-
+
Args:
metadata (Dict): Package metadata to validate against schema
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether schema validation was successful
@@ -36,18 +38,22 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
if package_service is None:
package_service = PackageService(metadata)
schema_version = package_service.get_field("package_schema_version")
- schema = get_package_schema(version=schema_version, force_update=context.force_schema_update)
+ schema = get_package_schema(
+ version=schema_version, force_update=context.force_schema_update
+ )
if not schema:
logger.error(f"Failed to load package schema version {schema_version}")
- return False, [f"Failed to load package schema version {schema_version}"]
+ return False, [
+ f"Failed to load package schema version {schema_version}"
+ ]
# Validate against schema
jsonschema.validate(instance=metadata, schema=schema)
return True, []
-
+
except jsonschema.exceptions.ValidationError as e:
logger.error(f"Schema validation error: {e.message}")
return False, [f"Schema validation error: {e.message}"]
except Exception as e:
logger.error(f"Error during schema validation: {str(e)}")
- return False, [f"Error during schema validation: {str(e)}"]
\ No newline at end of file
+ return False, [f"Error during schema validation: {str(e)}"]
diff --git a/hatch_validator/package/v1_2_0/validator.py b/hatch_validator/package/v1_2_0/validator.py
index 81a2ce8..4d7b4a4 100644
--- a/hatch_validator/package/v1_2_0/validator.py
+++ b/hatch_validator/package/v1_2_0/validator.py
@@ -22,59 +22,61 @@
class Validator(ValidatorBase):
"""Validator for packages using schema version 1.2.0.
-
+
Schema version 1.2.0 introduces a unified dependencies structure
with support for hatch, python, system, and docker dependencies.
This validator handles the new dependency structure and delegates
unchanged validation logic (entry point, tools) to the previous validator in the chain.
"""
-
+
def __init__(self, next_validator=None):
"""Initialize the v1.2.0 validator with strategies.
-
+
Args:
next_validator (Validator, optional): Next validator in chain. Defaults to None.
"""
super().__init__(next_validator)
self.schema_strategy = SchemaValidation()
self.dependency_strategy = DependencyValidation()
-
+
def can_handle(self, schema_version: str) -> bool:
"""Determine if this validator can handle the given schema version.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if this validator can handle the schema version
"""
return schema_version == "1.2.0"
-
- def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validation entry point for packages following schema v1.2.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources and state
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
- List[str]: List of validation errors
"""
schema_version = metadata.get("package_schema_version", "")
-
+
# Check if we can handle this version
if not self.can_handle(schema_version):
if self.next_validator:
return self.next_validator.validate(metadata, context)
return False, [f"Unsupported schema version: {schema_version}"]
-
- logger.info(f"Validating package metadata using v1.2.0 validator")
-
+
+ logger.info("Validating package metadata using v1.2.0 validator")
+
all_errors = []
is_valid = True
-
+
# 1. Validate against JSON schema
schema_valid, schema_errors = self.validate_schema(metadata, context)
if not schema_valid:
@@ -82,52 +84,56 @@ def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, Li
is_valid = False
# If schema validation fails, don't continue with other validations
return is_valid, all_errors
-
+
# 2. Validate dependencies (major change in v1.2.0)
deps_valid, deps_errors = self.validate_dependencies(metadata, context)
if not deps_valid:
all_errors.extend(deps_errors)
is_valid = False
-
+
# 3. Validate entry point (if package directory is provided)
if context.package_dir:
entry_valid, entry_errors = self.validate_entry_point(metadata, context)
if not entry_valid:
all_errors.extend(entry_errors)
is_valid = False
-
+
# 4. Validate tools (if entry point validation passed)
if entry_valid:
tools_valid, tools_errors = self.validate_tools(metadata, context)
if not tools_valid:
all_errors.extend(tools_errors)
is_valid = False
-
+
return is_valid, all_errors
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against schema for v1.2.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating package metadata against v1.2.0 schema")
return self.schema_strategy.validate_schema(metadata, context)
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies for v1.2.0.
-
+
This method implements the new unified dependencies structure
introduced in v1.2.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
diff --git a/hatch_validator/package/v1_2_1/accessor.py b/hatch_validator/package/v1_2_1/accessor.py
index e4f99df..17203a4 100644
--- a/hatch_validator/package/v1_2_1/accessor.py
+++ b/hatch_validator/package/v1_2_1/accessor.py
@@ -6,29 +6,32 @@
"""
import logging
-from hatch_validator.core.pkg_accessor_base import HatchPkgAccessor as HatchPkgAccessorBase
+from hatch_validator.core.pkg_accessor_base import (
+ HatchPkgAccessor as HatchPkgAccessorBase,
+)
logger = logging.getLogger("hatch.package.v1_2_1.accessor")
+
class HatchPkgAccessor(HatchPkgAccessorBase):
"""Metadata accessor for Hatch package schema version 1.2.1.
-
+
Adapts access to metadata fields for the v1.2.1 schema structure,
specifically handling dual entry point configuration while delegating
unchanged concerns to the v1.2.0 accessor.
"""
-
+
def can_handle(self, schema_version: str) -> bool:
"""Check if this accessor can handle schema version 1.2.1.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if schema_version is '1.2.1'
"""
return schema_version == "1.2.1"
-
+
def get_entry_point(self, metadata):
"""Get the full entry point dict for v1.2.1.
@@ -42,7 +45,7 @@ def get_entry_point(self, metadata):
Returns:
dict: Dual entry point dict with mcp_server and hatch_mcp_server keys
"""
- return metadata.get('entry_point', {})
+ return metadata.get("entry_point", {})
def get_mcp_entry_point(self, metadata):
"""Get MCP entry point from metadata.
@@ -53,8 +56,8 @@ def get_mcp_entry_point(self, metadata):
Returns:
str: MCP entry point value (e.g., "mcp_server.py")
"""
- entry_point = metadata.get('entry_point', {})
- return entry_point.get('mcp_server') if isinstance(entry_point, dict) else None
+ entry_point = metadata.get("entry_point", {})
+ return entry_point.get("mcp_server") if isinstance(entry_point, dict) else None
def get_hatch_mcp_entry_point(self, metadata):
"""Get Hatch MCP entry point from metadata.
@@ -65,4 +68,4 @@ def get_hatch_mcp_entry_point(self, metadata):
Returns:
Any: Hatch MCP entry point value
"""
- return metadata.get('entry_point').get('hatch_mcp_server')
+ return metadata.get("entry_point").get("hatch_mcp_server")
diff --git a/hatch_validator/package/v1_2_1/entry_point_validation.py b/hatch_validator/package/v1_2_1/entry_point_validation.py
index 620f5b1..1279eb7 100644
--- a/hatch_validator/package/v1_2_1/entry_point_validation.py
+++ b/hatch_validator/package/v1_2_1/entry_point_validation.py
@@ -6,8 +6,7 @@
import ast
import logging
-from pathlib import Path
-from typing import Dict, List, Tuple, Set
+from typing import Dict, List, Tuple
from hatch_validator.core.validation_strategy import EntryPointValidationStrategy
from hatch_validator.core.validation_context import ValidationContext
@@ -19,53 +18,59 @@
class EntryPointValidation(EntryPointValidationStrategy):
"""Strategy for validating dual entry point files for v1.2.1.
-
- This strategy validates that both mcp_server (FastMCP server) and
+
+ This strategy validates that both mcp_server (FastMCP server) and
hatch_mcp_server (HatchMCP wrapper) files exist and that the wrapper
properly imports from the FastMCP server.
"""
-
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dual entry point according to v1.2.1 schema.
-
+
Args:
metadata (Dict): Package metadata containing entry point information
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether entry point validation was successful
- List[str]: List of entry point validation errors
"""
- entry_point = metadata.get('entry_point')
+ entry_point = metadata.get("entry_point")
if not entry_point:
logger.error("No entry_point specified in metadata")
return False, ["No entry_point specified in metadata"]
-
+
# Schema validation ensures this is a dict, but double-check
if not isinstance(entry_point, dict):
logger.error("entry_point must be an object for schema v1.2.1")
return False, ["entry_point must be an object for schema v1.2.1"]
-
+
if not context.package_dir:
logger.error("Package directory not provided for entry point validation")
return False, ["Package directory not provided for entry point validation"]
-
+
errors = []
-
+
# Get both entry point files
- mcp_server = entry_point.get('mcp_server')
- hatch_mcp_server = entry_point.get('hatch_mcp_server')
-
+ mcp_server = entry_point.get("mcp_server")
+ hatch_mcp_server = entry_point.get("hatch_mcp_server")
+
# Validate both files exist
- mcp_server_valid, mcp_server_errors = self._validate_file_exists(mcp_server, context, "FastMCP server")
+ mcp_server_valid, mcp_server_errors = self._validate_file_exists(
+ mcp_server, context, "FastMCP server"
+ )
if not mcp_server_valid:
errors.extend(mcp_server_errors)
-
- hatch_wrapper_valid, hatch_wrapper_errors = self._validate_file_exists(hatch_mcp_server, context, "HatchMCP wrapper")
+
+ hatch_wrapper_valid, hatch_wrapper_errors = self._validate_file_exists(
+ hatch_mcp_server, context, "HatchMCP wrapper"
+ )
if not hatch_wrapper_valid:
errors.extend(hatch_wrapper_errors)
-
+
# Only validate import relationship if both files exist
if mcp_server_valid and hatch_wrapper_valid:
import_valid, import_errors = self._validate_import_relationship(
@@ -73,22 +78,24 @@ def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tu
)
if not import_valid:
errors.extend(import_errors)
-
+
if errors:
logger.error(f"Entry point validation failed with {len(errors)} errors")
return False, errors
-
+
logger.debug("Dual entry point validation successful")
return True, []
-
- def _validate_file_exists(self, filename: str, context: ValidationContext, file_type: str) -> Tuple[bool, List[str]]:
+
+ def _validate_file_exists(
+ self, filename: str, context: ValidationContext, file_type: str
+ ) -> Tuple[bool, List[str]]:
"""Validate that a file exists and is accessible.
-
+
Args:
filename (str): Name of the file to validate
context (ValidationContext): Validation context with package directory
file_type (str): Type of file for error messages
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
@@ -96,66 +103,70 @@ def _validate_file_exists(self, filename: str, context: ValidationContext, file_
error_msg = f"{file_type} filename not specified"
logger.error(error_msg)
return False, [error_msg]
-
+
file_path = context.package_dir / filename
-
+
if not file_path.exists():
error_msg = f"{file_type} file '{filename}' does not exist"
logger.error(error_msg)
return False, [error_msg]
-
+
if not file_path.is_file():
error_msg = f"{file_type} '{filename}' is not a file"
logger.error(error_msg)
return False, [error_msg]
-
+
# Validate it's a Python file
- if not filename.endswith('.py'):
+ if not filename.endswith(".py"):
error_msg = f"{file_type} '{filename}' must be a Python file (.py)"
logger.error(error_msg)
return False, [error_msg]
-
+
logger.debug(f"{file_type} file '{filename}' exists and is valid")
return True, []
-
- def _validate_import_relationship(self, mcp_server: str, hatch_wrapper: str, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def _validate_import_relationship(
+ self, mcp_server: str, hatch_wrapper: str, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate that HatchMCP wrapper imports from FastMCP server.
-
+
Args:
mcp_server (str): FastMCP server filename
hatch_wrapper (str): HatchMCP wrapper filename
context (ValidationContext): Validation context with package directory
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
try:
wrapper_path = context.package_dir / hatch_wrapper
- with open(wrapper_path, 'r', encoding='utf-8') as f:
+ with open(wrapper_path, "r", encoding="utf-8") as f:
source_code = f.read()
-
+
# Parse the wrapper file
tree = ast.parse(source_code)
-
+
# Expected import: from mcp_server import mcp (without .py extension)
- expected_module = mcp_server.replace('.py', '')
-
+ expected_module = mcp_server.replace(".py", "")
+
# Look for the import statement
for node in ast.walk(tree):
if isinstance(node, ast.ImportFrom):
if node.module == expected_module:
# Check if 'mcp' is imported
for alias in node.names:
- if alias.name == 'mcp':
- logger.debug(f"Found valid import: from {expected_module} import mcp")
+ if alias.name == "mcp":
+ logger.debug(
+ f"Found valid import: from {expected_module} import mcp"
+ )
return True, []
-
+
# If we get here, the import wasn't found
error_msg = f"HatchMCP wrapper must import 'mcp' from '{expected_module}'"
suggestion = f"Expected: from {expected_module} import mcp"
logger.error(error_msg)
return False, [error_msg, suggestion]
-
+
except SyntaxError as e:
error_msg = f"Syntax error in HatchMCP wrapper '{hatch_wrapper}' at line {e.lineno}: {e.msg}"
logger.error(error_msg)
diff --git a/hatch_validator/package/v1_2_1/schema_validation.py b/hatch_validator/package/v1_2_1/schema_validation.py
index 39794e6..4c069c5 100644
--- a/hatch_validator/package/v1_2_1/schema_validation.py
+++ b/hatch_validator/package/v1_2_1/schema_validation.py
@@ -20,18 +20,20 @@
class SchemaValidation(SchemaValidationStrategy):
"""Strategy for validating metadata against v1.2.1 schema.
-
+
This strategy validates packages against the v1.2.1 schema which requires
dual entry point configuration with mcp_server and hatch_mcp_server fields.
"""
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against v1.2.1 schema.
-
+
Args:
metadata (Dict): Package metadata to validate against schema
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether schema validation was successful
@@ -39,17 +41,21 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
"""
try:
# Load schema for v1.2.1
- schema = get_package_schema(version="1.2.1", force_update=context.force_schema_update)
+ schema = get_package_schema(
+ version="1.2.1", force_update=context.force_schema_update
+ )
if not schema:
error_msg = "Failed to load package schema version 1.2.1"
logger.error(error_msg)
return False, [error_msg]
-
+
# Validate against schema
jsonschema.validate(instance=metadata, schema=schema)
- logger.debug("Package metadata successfully validated against v1.2.1 schema")
+ logger.debug(
+ "Package metadata successfully validated against v1.2.1 schema"
+ )
return True, []
-
+
except jsonschema.ValidationError as e:
error_msg = f"Schema validation failed: {e.message}"
if e.absolute_path:
diff --git a/hatch_validator/package/v1_2_1/tools_validation.py b/hatch_validator/package/v1_2_1/tools_validation.py
index 0916b45..e23a1fd 100644
--- a/hatch_validator/package/v1_2_1/tools_validation.py
+++ b/hatch_validator/package/v1_2_1/tools_validation.py
@@ -19,78 +19,92 @@
class ToolsValidation(ToolsValidationStrategy):
"""Strategy for validating tools with FastMCP server enforcement for v1.2.1.
-
+
This strategy enforces that ALL tools declared in metadata must exist in the
FastMCP server file with proper @mcp.tool() decorators. This ensures tools
are available when the FastMCP server is imported independently.
"""
-
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate tools according to v1.2.1 schema with FastMCP server enforcement.
-
+
Args:
metadata (Dict): Package metadata containing tool declarations
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether tool validation was successful
- List[str]: List of tool validation errors
"""
- tools = metadata.get('tools', [])
+ tools = metadata.get("tools", [])
if not tools:
logger.debug("No tools declared in metadata")
return True, []
-
- entry_point = metadata.get('entry_point')
+
+ entry_point = metadata.get("entry_point")
if not entry_point or not isinstance(entry_point, dict):
logger.error("Dual entry point configuration required for tool validation")
- return False, ["Dual entry point configuration required for tool validation"]
-
- mcp_server_file = entry_point.get('mcp_server')
+ return False, [
+ "Dual entry point configuration required for tool validation"
+ ]
+
+ mcp_server_file = entry_point.get("mcp_server")
if not mcp_server_file:
logger.error("FastMCP server file not specified in entry point")
return False, ["FastMCP server file not specified in entry point"]
-
+
if not context.package_dir:
logger.error("Package directory not provided for tool validation")
return False, ["Package directory not provided for tool validation"]
-
+
# Extract tools from FastMCP server file
- server_tools, extraction_errors = self._extract_fastmcp_tools(mcp_server_file, context)
-
+ server_tools, extraction_errors = self._extract_fastmcp_tools(
+ mcp_server_file, context
+ )
+
if extraction_errors:
- logger.error(f"Failed to extract tools from FastMCP server: {extraction_errors}")
+ logger.error(
+ f"Failed to extract tools from FastMCP server: {extraction_errors}"
+ )
return False, extraction_errors
-
+
# Validate all declared tools exist in FastMCP server
missing_tools = []
for tool in tools:
- tool_name = tool.get('name')
+ tool_name = tool.get("name")
if not tool_name:
logger.error(f"Tool metadata missing name: {tool}")
missing_tools.append("Tool missing name in metadata")
continue
-
+
if tool_name not in server_tools:
- logger.error(f"Tool '{tool_name}' not found in FastMCP server '{mcp_server_file}'")
- missing_tools.append(f"Tool '{tool_name}' not found in FastMCP server '{mcp_server_file}'")
-
+ logger.error(
+ f"Tool '{tool_name}' not found in FastMCP server '{mcp_server_file}'"
+ )
+ missing_tools.append(
+ f"Tool '{tool_name}' not found in FastMCP server '{mcp_server_file}'"
+ )
+
if missing_tools:
error_msg = "Tools must be defined in FastMCP server to ensure availability when imported independently"
missing_tools.append(error_msg)
return False, missing_tools
-
+
logger.debug(f"All {len(tools)} declared tools found in FastMCP server")
return True, []
-
- def _extract_fastmcp_tools(self, server_file: str, context: ValidationContext) -> Tuple[Set[str], List[str]]:
+
+ def _extract_fastmcp_tools(
+ self, server_file: str, context: ValidationContext
+ ) -> Tuple[Set[str], List[str]]:
"""Extract tool names from @mcp.tool() decorators in FastMCP server file.
-
+
Args:
server_file (str): FastMCP server filename
context (ValidationContext): Validation context with package directory
-
+
Returns:
Tuple[Set[str], List[str]]: Set of tool names and list of errors
"""
@@ -100,13 +114,13 @@ def _extract_fastmcp_tools(self, server_file: str, context: ValidationContext) -
error_msg = f"FastMCP server file '{server_file}' not found"
logger.error(error_msg)
return set(), [error_msg]
-
- with open(file_path, 'r', encoding='utf-8') as f:
+
+ with open(file_path, "r", encoding="utf-8") as f:
source_code = f.read()
-
+
tree = ast.parse(source_code)
tool_names = set()
-
+
for node in ast.walk(tree):
if isinstance(node, ast.FunctionDef):
# Check for @mcp.tool() decorator
@@ -115,10 +129,12 @@ def _extract_fastmcp_tools(self, server_file: str, context: ValidationContext) -
tool_names.add(node.name)
logger.debug(f"Found tool '{node.name}' in FastMCP server")
break
-
- logger.debug(f"Extracted {len(tool_names)} tools from FastMCP server: {tool_names}")
+
+ logger.debug(
+ f"Extracted {len(tool_names)} tools from FastMCP server: {tool_names}"
+ )
return tool_names, []
-
+
except SyntaxError as e:
error_msg = f"Syntax error in FastMCP server '{server_file}' at line {e.lineno}: {e.msg}"
logger.error(error_msg)
@@ -131,27 +147,31 @@ def _extract_fastmcp_tools(self, server_file: str, context: ValidationContext) -
error_msg = f"Error parsing FastMCP server '{server_file}': {str(e)}"
logger.error(error_msg)
return set(), [error_msg]
-
+
def _is_mcp_tool_decorator(self, decorator) -> bool:
"""Check if decorator is @mcp.tool() or @mcp.tool.
-
+
Args:
decorator: AST decorator node
-
+
Returns:
bool: True if decorator is an MCP tool decorator
"""
# Handle @mcp.tool()
if isinstance(decorator, ast.Call):
if isinstance(decorator.func, ast.Attribute):
- return (decorator.func.attr == 'tool' and
- isinstance(decorator.func.value, ast.Name) and
- decorator.func.value.id == 'mcp')
-
+ return (
+ decorator.func.attr == "tool"
+ and isinstance(decorator.func.value, ast.Name)
+ and decorator.func.value.id == "mcp"
+ )
+
# Handle @mcp.tool
if isinstance(decorator, ast.Attribute):
- return (decorator.attr == 'tool' and
- isinstance(decorator.value, ast.Name) and
- decorator.value.id == 'mcp')
-
+ return (
+ decorator.attr == "tool"
+ and isinstance(decorator.value, ast.Name)
+ and decorator.value.id == "mcp"
+ )
+
return False
diff --git a/hatch_validator/package/v1_2_1/validator.py b/hatch_validator/package/v1_2_1/validator.py
index 3c5ed82..7c14498 100644
--- a/hatch_validator/package/v1_2_1/validator.py
+++ b/hatch_validator/package/v1_2_1/validator.py
@@ -26,16 +26,16 @@
class Validator(ValidatorBase):
"""Validator for packages using schema version 1.2.1.
-
+
Schema version 1.2.1 introduces dual entry point support requiring both
mcp_server (FastMCP server) and hatch_mcp_server (HatchMCP wrapper) files.
This validator implements enhanced entry point and tools validation while
delegating unchanged validation logic (dependencies) to the previous validator in the chain.
"""
-
+
def __init__(self, next_validator=None):
"""Initialize the v1.2.1 validator with strategies.
-
+
Args:
next_validator (Validator, optional): Next validator in chain. Defaults to None.
"""
@@ -43,43 +43,45 @@ def __init__(self, next_validator=None):
self.schema_strategy = SchemaValidation()
self.entry_point_strategy = EntryPointValidation()
self.tools_strategy = ToolsValidation()
-
+
def can_handle(self, schema_version: str) -> bool:
"""Check if this validator can handle the given schema version.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if this validator can handle the version, False otherwise
"""
return schema_version == "1.2.1"
-
- def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validation entry point for packages following schema v1.2.1.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources and state
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
- List[str]: List of validation errors
"""
schema_version = metadata.get("package_schema_version", "")
-
+
# Check if we can handle this version
if not self.can_handle(schema_version):
if self.next_validator:
return self.next_validator.validate(metadata, context)
return False, [f"Unsupported schema version: {schema_version}"]
-
- logger.info(f"Validating package metadata using v1.2.1 validator")
-
+
+ logger.info("Validating package metadata using v1.2.1 validator")
+
all_errors = []
is_valid = True
-
+
# 1. Validate against JSON schema
schema_valid, schema_errors = self.validate_schema(metadata, context)
if not schema_valid:
@@ -87,54 +89,62 @@ def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, Li
is_valid = False
# If schema validation fails, don't continue with other validations
return is_valid, all_errors
-
+
# 2. Validate dependencies (delegate to v1.2.0 - unchanged)
deps_valid, deps_errors = self.validate_dependencies(metadata, context)
if not deps_valid:
all_errors.extend(deps_errors)
is_valid = False
-
+
# 3. Validate entry point (dual entry point validation)
- entry_point_valid, entry_point_errors = self.validate_entry_point(metadata, context)
+ entry_point_valid, entry_point_errors = self.validate_entry_point(
+ metadata, context
+ )
if not entry_point_valid:
all_errors.extend(entry_point_errors)
is_valid = False
-
+
# 4. Validate tools (enhanced tools validation with FastMCP server enforcement)
tools_valid, tools_errors = self.validate_tools(metadata, context)
if not tools_valid:
all_errors.extend(tools_errors)
is_valid = False
-
+
if is_valid:
logger.info("Package metadata validation successful for v1.2.1")
else:
- logger.warning(f"Package metadata validation failed for v1.2.1: {len(all_errors)} errors")
-
+ logger.warning(
+ f"Package metadata validation failed for v1.2.1: {len(all_errors)} errors"
+ )
+
return is_valid, all_errors
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against schema for v1.2.1.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating package metadata against v1.2.1 schema")
return self.schema_strategy.validate_schema(metadata, context)
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies for v1.2.1.
-
+
Dependencies structure is unchanged from v1.2.0, so delegate to the next validator.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
@@ -143,7 +153,9 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
return self.next_validator.validate_dependencies(metadata, context)
return False, ["No validator available for dependency validation"]
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dual entry point for v1.2.1.
Args:
@@ -156,7 +168,9 @@ def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tu
logger.debug("Validating dual entry point for v1.2.1")
return self.entry_point_strategy.validate_entry_point(metadata, context)
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate tools with FastMCP server enforcement for v1.2.1.
Args:
diff --git a/hatch_validator/package/v1_2_2/__init__.py b/hatch_validator/package/v1_2_2/__init__.py
index b155a4e..f137b5d 100644
--- a/hatch_validator/package/v1_2_2/__init__.py
+++ b/hatch_validator/package/v1_2_2/__init__.py
@@ -3,4 +3,3 @@
This package contains the validator and strategies for schema version 1.2.2,
which introduces conda package manager support for Python dependencies.
"""
-
diff --git a/hatch_validator/package/v1_2_2/accessor.py b/hatch_validator/package/v1_2_2/accessor.py
index 9776dc7..206901e 100644
--- a/hatch_validator/package/v1_2_2/accessor.py
+++ b/hatch_validator/package/v1_2_2/accessor.py
@@ -8,10 +8,13 @@
import logging
from typing import Dict, Any
-from hatch_validator.core.pkg_accessor_base import HatchPkgAccessor as HatchPkgAccessorBase
+from hatch_validator.core.pkg_accessor_base import (
+ HatchPkgAccessor as HatchPkgAccessorBase,
+)
logger = logging.getLogger("hatch.package.v1_2_2.accessor")
+
class HatchPkgAccessor(HatchPkgAccessorBase):
"""Metadata accessor for Hatch package schema version 1.2.2.
@@ -43,5 +46,4 @@ def get_python_dependency_channel(self, dependency: Dict[str, Any]) -> Any:
Returns:
Any: Channel value (e.g., "conda-forge", "bioconda"), or None if not specified
"""
- return dependency.get('channel')
-
+ return dependency.get("channel")
diff --git a/hatch_validator/package/v1_2_2/dependency_validation.py b/hatch_validator/package/v1_2_2/dependency_validation.py
index 5138499..216216a 100644
--- a/hatch_validator/package/v1_2_2/dependency_validation.py
+++ b/hatch_validator/package/v1_2_2/dependency_validation.py
@@ -7,14 +7,17 @@
import json
import logging
-from typing import Dict, List, Tuple, Optional, Set
+from typing import Dict, List, Tuple, Optional
from pathlib import Path
-from hatch_validator.core.validation_strategy import DependencyValidationStrategy, ValidationError
+from hatch_validator.core.validation_strategy import (
+ DependencyValidationStrategy,
+ ValidationError,
+)
from hatch_validator.core.validation_context import ValidationContext
from hatch_validator.utils.hatch_dependency_graph import HatchDependencyGraphBuilder
from hatch_validator.utils.version_utils import VersionConstraintValidator
-from hatch_validator.registry.registry_service import RegistryService, RegistryError
+from hatch_validator.registry.registry_service import RegistryService
from hatch_validator.package.package_service import PackageService
logger = logging.getLogger("hatch.dependency_validation_v1_2_2")
@@ -23,34 +26,36 @@
class DependencyValidation(DependencyValidationStrategy):
"""Strategy for validating dependencies according to v1.2.2 schema.
-
+
This implementation extends v1.2.0 dependency validation with conda
package manager support for Python dependencies:
- dependencies.hatch: Array of Hatch package dependencies (unchanged)
- dependencies.python: Array of Python package dependencies (enhanced with conda support)
- dependencies.system: Array of System package dependencies (unchanged)
- dependencies.docker: Array of Docker image dependencies (unchanged)
-
+
New in v1.2.2:
- Python dependencies can specify package_manager: "pip" or "conda"
- Conda dependencies can specify a channel (e.g., "conda-forge", "bioconda")
"""
-
+
def __init__(self):
"""Initialize the dependency validation strategy."""
self.version_validator = VersionConstraintValidator()
self.registry_service: Optional[RegistryService] = None
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies according to v1.2.2 schema.
-
+
In v1.2.2, dependencies structure is the same as v1.2.0, but Python
dependencies now support conda package manager and channel specification.
-
+
Args:
metadata (Dict): Package metadata containing dependency information
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether dependency validation was successful
@@ -62,35 +67,37 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
if package_service is None:
# Create a package service with the provided metadata
package_service = PackageService(metadata)
-
+
# Store package service for use in helper methods
self.package_service = package_service
-
+
# Initialize registry service from the context if available
# Get registry data from context
registry_data = context.registry_data
registry_service = context.get_data("registry_service", None)
-
+
# Check if registry data is missing
if registry_data is None:
logger.error("No registry data available for dependency validation")
- raise ValidationError("No registry data available for dependency validation")
-
+ raise ValidationError(
+ "No registry data available for dependency validation"
+ )
+
if registry_service is None:
# Create a registry service with the provided data
registry_service = RegistryService(registry_data)
-
+
# Store registry service for use in helper methods
self.registry_service = registry_service
-
+
errors = []
is_valid = True
-
+
# Get dependencies from v1.2.2 unified format (same as v1.2.0)
dependencies = package_service.get_dependencies()
- hatch_dependencies = dependencies.get('hatch', [])
- python_dependencies = dependencies.get('python', [])
-
+ hatch_dependencies = dependencies.get("hatch", [])
+ python_dependencies = dependencies.get("python", [])
+
# Validate Hatch dependencies (unchanged from v1.2.0)
if hatch_dependencies:
hatch_valid, hatch_errors = self._validate_hatch_dependencies(
@@ -99,7 +106,7 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
if not hatch_valid:
errors.extend(hatch_errors)
is_valid = False
-
+
# Validate Python dependencies (enhanced with conda support)
if python_dependencies:
python_valid, python_errors = self._validate_python_dependencies(
@@ -108,18 +115,19 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
if not python_valid:
errors.extend(python_errors)
is_valid = False
-
+
except Exception as e:
logger.error(f"Error during dependency validation: {e}")
errors.append(f"Error during dependency validation: {e}")
is_valid = False
-
+
logger.debug(f"Dependency validation result: {is_valid}, errors: {errors}")
-
+
return is_valid, errors
- def _validate_python_dependencies(self, python_dependencies: List[Dict],
- context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_python_dependencies(
+ self, python_dependencies: List[Dict], context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate Python package dependencies with conda support.
Args:
@@ -133,15 +141,18 @@ def _validate_python_dependencies(self, python_dependencies: List[Dict],
is_valid = True
for dep in python_dependencies:
- dep_valid, dep_errors = self._validate_single_python_dependency(dep, context)
+ dep_valid, dep_errors = self._validate_single_python_dependency(
+ dep, context
+ )
if not dep_valid:
errors.extend(dep_errors)
is_valid = False
return is_valid, errors
- def _validate_single_python_dependency(self, dep: Dict,
- context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_single_python_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a single Python dependency with conda support.
Args:
@@ -154,44 +165,57 @@ def _validate_single_python_dependency(self, dep: Dict,
errors = []
is_valid = True
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
if not dep_name:
errors.append("Python dependency missing name")
return False, errors
# Validate version constraint if present
- version_constraint = dep.get('version_constraint')
+ version_constraint = dep.get("version_constraint")
if version_constraint:
- constraint_valid, constraint_error = self.version_validator.validate_constraint(version_constraint)
+ (
+ constraint_valid,
+ constraint_error,
+ ) = self.version_validator.validate_constraint(version_constraint)
if not constraint_valid:
- errors.append(f"Invalid version constraint for Python package '{dep_name}': {constraint_error}")
+ errors.append(
+ f"Invalid version constraint for Python package '{dep_name}': {constraint_error}"
+ )
is_valid = False
# Validate package_manager field (new in v1.2.2)
- package_manager = dep.get('package_manager', 'pip') # Default to pip
- if package_manager not in ['pip', 'conda']:
- errors.append(f"Invalid package_manager '{package_manager}' for Python package '{dep_name}'. Must be 'pip' or 'conda'")
+ package_manager = dep.get("package_manager", "pip") # Default to pip
+ if package_manager not in ["pip", "conda"]:
+ errors.append(
+ f"Invalid package_manager '{package_manager}' for Python package '{dep_name}'. Must be 'pip' or 'conda'"
+ )
is_valid = False
# Validate channel field (new in v1.2.2)
- channel = dep.get('channel')
+ channel = dep.get("channel")
if channel is not None:
# Channel should only be specified for conda packages
- if package_manager != 'conda':
- errors.append(f"Channel '{channel}' specified for Python package '{dep_name}' with package_manager '{package_manager}'. Channel is only valid for conda packages")
+ if package_manager != "conda":
+ errors.append(
+ f"Channel '{channel}' specified for Python package '{dep_name}' with package_manager '{package_manager}'. Channel is only valid for conda packages"
+ )
is_valid = False
else:
# Validate channel format: ^[a-zA-Z0-9_\-]+$
import re
- channel_pattern = r'^[a-zA-Z0-9_\-]+$'
+
+ channel_pattern = r"^[a-zA-Z0-9_\-]+$"
if not re.match(channel_pattern, channel):
- errors.append(f"Invalid channel format '{channel}' for Python package '{dep_name}'. Must match pattern: {channel_pattern}")
+ errors.append(
+ f"Invalid channel format '{channel}' for Python package '{dep_name}'. Must match pattern: {channel_pattern}"
+ )
is_valid = False
return is_valid, errors
- def _validate_hatch_dependencies(self, hatch_dependencies: List[Dict],
- context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_hatch_dependencies(
+ self, hatch_dependencies: List[Dict], context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate Hatch package dependencies.
This method is unchanged from v1.2.0 implementation.
@@ -217,10 +241,14 @@ def _validate_hatch_dependencies(self, hatch_dependencies: List[Dict],
try:
hatch_dep_graph_builder = HatchDependencyGraphBuilder(
package_service=self.package_service,
- registry_service=self.registry_service
+ registry_service=self.registry_service,
+ )
+ dependency_graph = hatch_dep_graph_builder.build_dependency_graph(
+ hatch_dependencies, context
+ )
+ logger.debug(
+ f"Dependency graph: {json.dumps(dependency_graph.to_dict(), indent=2)}"
)
- dependency_graph = hatch_dep_graph_builder.build_dependency_graph(hatch_dependencies, context)
- logger.debug(f"Dependency graph: {json.dumps(dependency_graph.to_dict(), indent=2)}")
has_cycles, cycles = dependency_graph.detect_cycles()
@@ -249,12 +277,14 @@ def _parse_hatch_dep_name(self, dep_name: str) -> Tuple[Optional[str], str]:
Returns:
Tuple[Optional[str], str]: (repo_name, package_name). repo_name is None if not present.
"""
- if ':' in dep_name:
- repo, pkg = dep_name.split(':', 1)
+ if ":" in dep_name:
+ repo, pkg = dep_name.split(":", 1)
return repo, pkg
return None, dep_name
- def _validate_single_hatch_dependency(self, dep: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_single_hatch_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a single Hatch dependency.
This method is unchanged from v1.2.0 implementation.
@@ -267,24 +297,31 @@ def _validate_single_hatch_dependency(self, dep: Dict, context: ValidationContex
"""
errors = []
is_valid = True
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
if not dep_name:
errors.append("Hatch dependency missing name")
return False, errors
# Validate version constraint if present
- version_constraint = dep.get('version_constraint')
+ version_constraint = dep.get("version_constraint")
if version_constraint:
- constraint_valid, constraint_error = self.version_validator.validate_constraint(version_constraint)
+ (
+ constraint_valid,
+ constraint_error,
+ ) = self.version_validator.validate_constraint(version_constraint)
if not constraint_valid:
- errors.append(f"Invalid version constraint for '{dep_name}': {constraint_error}")
+ errors.append(
+ f"Invalid version constraint for '{dep_name}': {constraint_error}"
+ )
is_valid = False
# Check if this looks like a local path, otherwise treat as remote
if self.package_service.is_local_dependency(dep, context.package_dir):
# Local dependency - check if allowed
if not context.allow_local_dependencies:
- errors.append(f"Local dependency '{dep_name}' not allowed in this context")
+ errors.append(
+ f"Local dependency '{dep_name}' not allowed in this context"
+ )
return False, errors
local_valid, local_errors = self._validate_local_dependency(dep, context)
if not local_valid:
@@ -292,14 +329,18 @@ def _validate_single_hatch_dependency(self, dep: Dict, context: ValidationContex
is_valid = False
else:
# Remote dependency - validate through registry
- registry_valid, registry_errors = self._validate_registry_dependency(dep, context)
+ registry_valid, registry_errors = self._validate_registry_dependency(
+ dep, context
+ )
if not registry_valid:
errors.extend(registry_errors)
is_valid = False
return is_valid, errors
- def _validate_local_dependency(self, dep: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_local_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a local file dependency.
This method is unchanged from v1.2.0 implementation.
@@ -311,7 +352,7 @@ def _validate_local_dependency(self, dep: Dict, context: ValidationContext) -> T
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
# Resolve path
path = Path(dep_name)
@@ -321,21 +362,29 @@ def _validate_local_dependency(self, dep: Dict, context: ValidationContext) -> T
# Check if path exists as a file (not a directory)
if path.exists():
if not path.is_dir():
- errors.append(f"Local dependency '{dep_name}' path is not a directory: {path}")
+ errors.append(
+ f"Local dependency '{dep_name}' path is not a directory: {path}"
+ )
return False, errors
else:
- errors.append(f"Local dependency '{dep_name}' path is not a directory: {path}")
+ errors.append(
+ f"Local dependency '{dep_name}' path is not a directory: {path}"
+ )
return False, errors
# Check for metadata file
metadata_path = path / "hatch_metadata.json"
if not metadata_path.exists():
- errors.append(f"Local dependency '{dep_name}' missing hatch_metadata.json: {metadata_path}")
+ errors.append(
+ f"Local dependency '{dep_name}' missing hatch_metadata.json: {metadata_path}"
+ )
return False, errors
return True, []
- def _validate_registry_dependency(self, dep: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_registry_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a registry dependency.
This method is unchanged from v1.2.0 implementation.
@@ -347,8 +396,8 @@ def _validate_registry_dependency(self, dep: Dict, context: ValidationContext) -
Tuple[bool, List[str]]: Validation result and errors
"""
errors = []
- dep_name = dep.get('name')
- version_constraint = dep.get('version_constraint')
+ dep_name = dep.get("name")
+ version_constraint = dep.get("version_constraint")
# Parse repo and package name
repo, pkg = self._parse_hatch_dep_name(dep_name)
@@ -356,25 +405,36 @@ def _validate_registry_dependency(self, dep: Dict, context: ValidationContext) -
if repo:
# Check repo existence
if not self.registry_service.repository_exists(repo):
- errors.append(f"Repository '{repo}' not found in registry for dependency '{dep_name}'")
+ errors.append(
+ f"Repository '{repo}' not found in registry for dependency '{dep_name}'"
+ )
return False, errors
# Check package existence in repo
if not self.registry_service.package_exists(pkg, repo_name=repo):
- errors.append(f"Package '{pkg}' not found in repository '{repo}' for dependency '{dep_name}'")
+ errors.append(
+ f"Package '{pkg}' not found in repository '{repo}' for dependency '{dep_name}'"
+ )
return False, errors
else:
# No repo prefix, check package in any repo
if not self.registry_service.package_exists(pkg):
- errors.append(f"Registry dependency '{pkg}' not found in registry for dependency '{dep_name}'")
+ errors.append(
+ f"Registry dependency '{pkg}' not found in registry for dependency '{dep_name}'"
+ )
return False, errors
# Check version compatibility if constraint is specified
if version_constraint:
- version_compatible, version_error = self.registry_service.validate_version_compatibility(
- dep_name, version_constraint)
+ (
+ version_compatible,
+ version_error,
+ ) = self.registry_service.validate_version_compatibility(
+ dep_name, version_constraint
+ )
if not version_compatible:
- errors.append(f"No version of '{dep_name}' satisfies constraint {version_constraint}: {version_error}")
+ errors.append(
+ f"No version of '{dep_name}' satisfies constraint {version_constraint}: {version_error}"
+ )
return False, errors
return True, []
-
diff --git a/hatch_validator/package/v1_2_2/schema_validation.py b/hatch_validator/package/v1_2_2/schema_validation.py
index 8df0521..5a9f2a8 100644
--- a/hatch_validator/package/v1_2_2/schema_validation.py
+++ b/hatch_validator/package/v1_2_2/schema_validation.py
@@ -25,7 +25,9 @@ class SchemaValidation(SchemaValidationStrategy):
conda package manager support for Python dependencies.
"""
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against v1.2.2 schema.
Args:
@@ -39,7 +41,9 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
"""
try:
# Load schema for v1.2.2
- schema = get_package_schema(version="1.2.2", force_update=context.force_schema_update)
+ schema = get_package_schema(
+ version="1.2.2", force_update=context.force_schema_update
+ )
if not schema:
error_msg = "Failed to load package schema version 1.2.2"
logger.error(error_msg)
@@ -47,7 +51,9 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
# Validate against schema
jsonschema.validate(instance=metadata, schema=schema)
- logger.debug("Package metadata successfully validated against v1.2.2 schema")
+ logger.debug(
+ "Package metadata successfully validated against v1.2.2 schema"
+ )
return True, []
except jsonschema.ValidationError as e:
@@ -60,4 +66,3 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
error_msg = f"Unexpected error during schema validation: {str(e)}"
logger.error(error_msg)
return False, [error_msg]
-
diff --git a/hatch_validator/package/v1_2_2/validator.py b/hatch_validator/package/v1_2_2/validator.py
index 3760be7..08bcd48 100644
--- a/hatch_validator/package/v1_2_2/validator.py
+++ b/hatch_validator/package/v1_2_2/validator.py
@@ -25,58 +25,60 @@
class Validator(ValidatorBase):
"""Validator for packages using schema version 1.2.2.
-
+
Schema version 1.2.2 introduces conda package manager support for Python
dependencies. This validator implements enhanced dependency validation while
delegating unchanged validation logic (entry points, tools) to the v1.2.1 validator.
"""
-
+
def __init__(self, next_validator=None):
"""Initialize the v1.2.2 validator with strategies.
-
+
Args:
next_validator (Validator, optional): Next validator in chain. Defaults to None.
"""
super().__init__(next_validator)
self.schema_strategy = SchemaValidation()
self.dependency_strategy = DependencyValidation()
-
+
def can_handle(self, schema_version: str) -> bool:
"""Check if this validator can handle the given schema version.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if this validator can handle the version, False otherwise
"""
return schema_version == "1.2.2"
-
- def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validation entry point for packages following schema v1.2.2.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources and state
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
- List[str]: List of validation errors
"""
schema_version = metadata.get("package_schema_version", "")
-
+
# Check if we can handle this version
if not self.can_handle(schema_version):
if self.next_validator:
return self.next_validator.validate(metadata, context)
return False, [f"Unsupported schema version: {schema_version}"]
-
- logger.info(f"Validating package metadata using v1.2.2 validator")
-
+
+ logger.info("Validating package metadata using v1.2.2 validator")
+
all_errors = []
is_valid = True
-
+
# 1. Validate against JSON schema
schema_valid, schema_errors = self.validate_schema(metadata, context)
if not schema_valid:
@@ -84,69 +86,79 @@ def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, Li
is_valid = False
# If schema validation fails, don't continue with other validations
return is_valid, all_errors
-
+
# 2. Validate dependencies (enhanced with conda support)
deps_valid, deps_errors = self.validate_dependencies(metadata, context)
if not deps_valid:
all_errors.extend(deps_errors)
is_valid = False
-
+
# 3. Validate entry point (delegate to v1.2.1 - unchanged)
- entry_point_valid, entry_point_errors = self.validate_entry_point(metadata, context)
+ entry_point_valid, entry_point_errors = self.validate_entry_point(
+ metadata, context
+ )
if not entry_point_valid:
all_errors.extend(entry_point_errors)
is_valid = False
-
+
# 4. Validate tools (delegate to v1.2.1 - unchanged)
tools_valid, tools_errors = self.validate_tools(metadata, context)
if not tools_valid:
all_errors.extend(tools_errors)
is_valid = False
-
+
if is_valid:
logger.info("Package metadata validation successful for v1.2.2")
else:
- logger.warning(f"Package metadata validation failed for v1.2.2: {len(all_errors)} errors")
-
+ logger.warning(
+ f"Package metadata validation failed for v1.2.2: {len(all_errors)} errors"
+ )
+
return is_valid, all_errors
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against schema for v1.2.2.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating package metadata against v1.2.2 schema")
return self.schema_strategy.validate_schema(metadata, context)
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dependencies for v1.2.2.
-
+
Dependencies structure includes conda support for Python packages.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating dependencies with conda support for v1.2.2")
return self.dependency_strategy.validate_dependencies(metadata, context)
-
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate dual entry point for v1.2.2.
-
+
Entry point validation is unchanged from v1.2.1, so delegate to the next validator.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
@@ -154,16 +166,18 @@ def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tu
if self.next_validator:
return self.next_validator.validate_entry_point(metadata, context)
return False, ["No validator available for entry point validation"]
-
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate tools with FastMCP server enforcement for v1.2.2.
-
+
Tools validation is unchanged from v1.2.1, so delegate to the next validator.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
@@ -171,4 +185,3 @@ def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bo
if self.next_validator:
return self.next_validator.validate_tools(metadata, context)
return False, ["No validator available for tools validation"]
-
diff --git a/hatch_validator/package/v2_0_0/__init__.py b/hatch_validator/package/v2_0_0/__init__.py
index e7a0890..41d1ad3 100644
--- a/hatch_validator/package/v2_0_0/__init__.py
+++ b/hatch_validator/package/v2_0_0/__init__.py
@@ -3,4 +3,3 @@
This package contains the validator and strategies for schema version 2.0.0,
which integrates the package metadata format with the Official MCP Registry.
"""
-
diff --git a/hatch_validator/package/v2_0_0/accessor.py b/hatch_validator/package/v2_0_0/accessor.py
index ca6dd5f..e78938e 100644
--- a/hatch_validator/package/v2_0_0/accessor.py
+++ b/hatch_validator/package/v2_0_0/accessor.py
@@ -9,10 +9,13 @@
import logging
from typing import Any, Dict
-from hatch_validator.core.pkg_accessor_base import HatchPkgAccessor as HatchPkgAccessorBase
+from hatch_validator.core.pkg_accessor_base import (
+ HatchPkgAccessor as HatchPkgAccessorBase,
+)
logger = logging.getLogger("hatch.package.v2_0_0.accessor")
+
class HatchPkgAccessor(HatchPkgAccessorBase):
"""Metadata accessor for Hatch package schema version 2.0.0."""
@@ -36,7 +39,9 @@ def get_package_schema_version(self, metadata: Dict[str, Any]) -> Any:
Returns:
Any: Schema version value from either hatch_schema_version or package_schema_version
"""
- return metadata.get("hatch_schema_version") or metadata.get("package_schema_version")
+ return metadata.get("hatch_schema_version") or metadata.get(
+ "package_schema_version"
+ )
def get_author(self, metadata: Dict[str, Any]) -> Any:
"""Get authors from metadata.
diff --git a/hatch_validator/package/v2_0_0/dependency_validation.py b/hatch_validator/package/v2_0_0/dependency_validation.py
index 2ce2545..c7ecbc3 100644
--- a/hatch_validator/package/v2_0_0/dependency_validation.py
+++ b/hatch_validator/package/v2_0_0/dependency_validation.py
@@ -19,7 +19,9 @@
class DependencyValidation(DependencyValidationStrategy):
"""Strategy for validating Docker dependencies according to v2.0.0 schema."""
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate Docker dependencies according to v2.0.0 schema.
Hatch, Python, and System dependency validation is handled by the
@@ -41,13 +43,15 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
package_service = PackageService(metadata)
dependencies = package_service.get_dependencies()
- docker_dependencies = dependencies.get('docker', [])
+ docker_dependencies = dependencies.get("docker", [])
errors = []
is_valid = True
if docker_dependencies:
- docker_valid, docker_errors = self._validate_docker_dependencies(docker_dependencies, context)
+ docker_valid, docker_errors = self._validate_docker_dependencies(
+ docker_dependencies, context
+ )
if not docker_valid:
errors.extend(docker_errors)
is_valid = False
@@ -56,25 +60,31 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
logger.error(f"Error during Docker dependency validation: {e}")
return False, [f"Error during Docker dependency validation: {e}"]
- logger.debug(f"Docker dependency validation result: {is_valid}, errors: {errors}")
+ logger.debug(
+ f"Docker dependency validation result: {is_valid}, errors: {errors}"
+ )
return is_valid, errors
- def _validate_docker_dependencies(self, docker_dependencies: List[Dict],
- context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_docker_dependencies(
+ self, docker_dependencies: List[Dict], context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate Docker image dependencies."""
errors = []
is_valid = True
for dep in docker_dependencies:
- dep_valid, dep_errors = self._validate_single_docker_dependency(dep, context)
+ dep_valid, dep_errors = self._validate_single_docker_dependency(
+ dep, context
+ )
if not dep_valid:
errors.extend(dep_errors)
is_valid = False
return is_valid, errors
- def _validate_single_docker_dependency(self, dep: Dict,
- context: ValidationContext) -> Tuple[bool, List[str]]:
+ def _validate_single_docker_dependency(
+ self, dep: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate a single Docker dependency.
Structural checks (digest presence, digest pattern, version_constraint rejection)
@@ -83,19 +93,21 @@ def _validate_single_docker_dependency(self, dep: Dict,
errors = []
is_valid = True
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
if not dep_name:
errors.append("Docker dependency missing name")
return False, errors
- tag = dep.get('tag')
+ tag = dep.get("tag")
if tag is not None and not isinstance(tag, str):
errors.append(f"Invalid Docker tag for '{dep_name}'. Must be a string")
is_valid = False
- registry = dep.get('registry')
+ registry = dep.get("registry")
if registry is not None and not isinstance(registry, str):
- errors.append(f"Invalid registry value for Docker dependency '{dep_name}'. Must be a string")
+ errors.append(
+ f"Invalid registry value for Docker dependency '{dep_name}'. Must be a string"
+ )
is_valid = False
return is_valid, errors
diff --git a/hatch_validator/package/v2_0_0/schema_validation.py b/hatch_validator/package/v2_0_0/schema_validation.py
index 8c54a1a..894eb4d 100644
--- a/hatch_validator/package/v2_0_0/schema_validation.py
+++ b/hatch_validator/package/v2_0_0/schema_validation.py
@@ -17,7 +17,9 @@
class SchemaValidation(SchemaValidationStrategy):
"""Strategy for validating metadata against v2.0.0 schema."""
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against v2.0.0 schema.
Args:
@@ -32,19 +34,25 @@ def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[b
try:
jsonschema = __import__("jsonschema")
except ImportError:
- error_msg = "jsonschema is required for schema validation but is not installed"
+ error_msg = (
+ "jsonschema is required for schema validation but is not installed"
+ )
logger.error(error_msg)
return False, [error_msg]
try:
- schema = get_package_schema(version="2.0.0", force_update=context.force_schema_update)
+ schema = get_package_schema(
+ version="2.0.0", force_update=context.force_schema_update
+ )
if not schema:
error_msg = "Failed to load package schema version 2.0.0"
logger.error(error_msg)
return False, [error_msg]
jsonschema.validate(instance=metadata, schema=schema)
- logger.debug("Package metadata successfully validated against v2.0.0 schema")
+ logger.debug(
+ "Package metadata successfully validated against v2.0.0 schema"
+ )
return True, []
except jsonschema.ValidationError as e:
diff --git a/hatch_validator/package/v2_0_0/validator.py b/hatch_validator/package/v2_0_0/validator.py
index e5d9bce..b56961a 100644
--- a/hatch_validator/package/v2_0_0/validator.py
+++ b/hatch_validator/package/v2_0_0/validator.py
@@ -22,61 +22,65 @@
class Validator(ValidatorBase):
"""Validator for packages using schema version 2.0.0.
-
+
Schema version 2.0.0 renames some fields (package_schema_version ā hatch_schema_version,
author ā authors, tools[].description ā tools[].desc), sets Docker dependencies to require
tag, digest now instead of version_constraint, and makes version_constraint optional for
all dependency types. This validator handles the new dependency structure and delegates
unchanged validation logic to the previous validator in the chain.
"""
-
+
def __init__(self, next_validator=None):
"""Initialize the v2.0.0 validator with strategies.
-
+
Args:
next_validator (Validator, optional): Next validator in chain. Defaults to None.
"""
super().__init__(next_validator)
self.schema_strategy = SchemaValidation()
self.dependency_strategy = DependencyValidation()
-
+
def can_handle(self, schema_version: str) -> bool:
"""Determine if this validator can handle the given schema version.
-
+
Args:
schema_version (str): Schema version to check
-
+
Returns:
bool: True if this validator can handle the schema version
"""
return schema_version == "2.0.0"
-
- def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validation entry point for packages following schema v2.0.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources and state
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
- List[str]: List of validation errors
"""
# Support new field name for v2.0.0 with fallback for legacy packages.
- schema_version = metadata.get("hatch_schema_version") or metadata.get("package_schema_version", "")
-
+ schema_version = metadata.get("hatch_schema_version") or metadata.get(
+ "package_schema_version", ""
+ )
+
# Check if we can handle this version
if not self.can_handle(schema_version):
if self.next_validator:
return self.next_validator.validate(metadata, context)
return False, [f"Unsupported schema version: {schema_version}"]
-
- logger.info(f"Validating package metadata using v2.0.0 validator")
-
+
+ logger.info("Validating package metadata using v2.0.0 validator")
+
all_errors = []
is_valid = True
-
+
# 1. Validate against JSON schema
schema_valid, schema_errors = self.validate_schema(metadata, context)
if not schema_valid:
@@ -84,7 +88,7 @@ def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, Li
is_valid = False
# If schema validation fails, don't continue with other validations
return is_valid, all_errors
-
+
# 2. Validate Hatch/Python/System dependencies ā unchanged from v1.2.2, delegated via chain
deps_valid, deps_errors = self.validate_dependencies(metadata, context)
if not deps_valid:
@@ -92,7 +96,9 @@ def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, Li
is_valid = False
# 3. Validate Docker dependencies ā new concern owned by v2.0.0
- docker_valid, docker_errors = self.validate_docker_dependencies(metadata, context)
+ docker_valid, docker_errors = self.validate_docker_dependencies(
+ metadata, context
+ )
if not docker_valid:
all_errors.extend(docker_errors)
is_valid = False
@@ -109,23 +115,27 @@ def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, Li
if not tools_valid:
all_errors.extend(tools_errors)
is_valid = False
-
+
return is_valid, all_errors
-
- def validate_schema(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_schema(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate metadata against schema for v2.0.0.
-
+
Args:
metadata (Dict): Package metadata to validate
context (ValidationContext): Validation context with resources
-
+
Returns:
Tuple[bool, List[str]]: Validation result and errors
"""
logger.debug("Validating package metadata against v2.0.0 schema")
return self.schema_strategy.validate_schema(metadata, context)
-
- def validate_docker_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_docker_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate Docker dependencies for v2.0.0.
Docker dependencies are new in v2.0.0 (digest-based, no version_constraint).
@@ -141,7 +151,9 @@ def validate_docker_dependencies(self, metadata: Dict, context: ValidationContex
logger.debug("Validating Docker dependencies for v2.0.0")
return self.dependency_strategy.validate_dependencies(metadata, context)
- def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_tools(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate tools for v2.0.0.
Tools validation (declared tool names must match @mcp.tool()-decorated functions)
@@ -159,7 +171,9 @@ def validate_tools(self, metadata: Dict, context: ValidationContext) -> Tuple[bo
return self.next_validator.validate_tools(metadata, context)
return False, ["No validator available for tools validation"]
- def validate_entry_point(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+ def validate_entry_point(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Validate entry point for v2.0.0.
Entry point validation (dual mcp_server + hatch_mcp_server file checks)
diff --git a/hatch_validator/package_validator.py b/hatch_validator/package_validator.py
index a8363cd..a5f57c3 100644
--- a/hatch_validator/package_validator.py
+++ b/hatch_validator/package_validator.py
@@ -17,20 +17,26 @@
class PackageValidationError(Exception):
"""Exception raised for package validation errors."""
+
pass
class HatchPackageValidator:
"""Hatch package validator using Chain of Responsibility pattern.
-
+
This validator uses the new extensible validation architecture that can
handle multiple schema versions through a chain of specialized validators.
"""
-
- def __init__(self, version: str = "latest", allow_local_dependencies: bool = True,
- force_schema_update: bool = False, registry_data: Optional[Dict] = None):
+
+ def __init__(
+ self,
+ version: str = "latest",
+ allow_local_dependencies: bool = True,
+ force_schema_update: bool = False,
+ registry_data: Optional[Dict] = None,
+ ):
"""Initialize the Hatch package validator.
-
+
Args:
version (str, optional): Version of the schema to use, or "latest". Defaults to "latest".
allow_local_dependencies (bool, optional): Whether to allow local dependencies. Defaults to True.
@@ -43,16 +49,16 @@ def __init__(self, version: str = "latest", allow_local_dependencies: bool = Tru
self.allow_local_dependencies = allow_local_dependencies
self.force_schema_update = force_schema_update
self.registry_data = registry_data
-
+
def validate_pkg_metadata(self, metadata: Dict) -> Tuple[bool, List[str]]:
"""Validate the package's metadata against the package JSON schema.
-
+
Uses the new Chain of Responsibility validator system to validate
metadata against the appropriate schema version.
-
+
Args:
metadata (Dict): The metadata to validate
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
@@ -61,58 +67,68 @@ def validate_pkg_metadata(self, metadata: Dict) -> Tuple[bool, List[str]]:
try:
# Determine the target schema version
schema_version = self._determine_schema_version(metadata)
-
+
# Create validator chain for the target version
validator = ValidatorFactory.create_validator_chain(schema_version)
-
+
# Create validation context (metadata-only validation)
context = ValidationContext(
registry_data=self.registry_data,
allow_local_dependencies=self.allow_local_dependencies,
- force_schema_update=self.force_schema_update
+ force_schema_update=self.force_schema_update,
)
-
+
# Run validation through the chain
return validator.validate(metadata, context)
-
+
except ValueError as e:
if "Unsupported schema version" in str(e):
# Provide meaningful error with either field name
- version_value = metadata.get('hatch_schema_version') or metadata.get('package_schema_version', 'unknown')
+ version_value = metadata.get("hatch_schema_version") or metadata.get(
+ "package_schema_version", "unknown"
+ )
return False, [f"Unsupported schema version: {version_value}"]
raise
except Exception as e:
return False, [f"Validation error: {str(e)}"]
-
+
def validate_registry_metadata(self, metadata: Dict) -> Tuple[bool, List[str]]:
"""Validate the registry's metadata against the registry JSON schema.
-
+
Args:
metadata (Dict): The metadata to validate
-
+
Returns:
Tuple[bool, List[str]]: Tuple containing:
- bool: Whether validation was successful
- List[str]: List of validation errors
"""
# Load schema using the schema retriever
- schema = get_registry_schema(version=self.version, force_update=self.force_schema_update)
+ schema = get_registry_schema(
+ version=self.version, force_update=self.force_schema_update
+ )
if not schema:
error_msg = f"Failed to load registry schema version {self.version}"
self.logger.error(error_msg)
return False, [error_msg]
-
+
# Validate against schema
try:
import jsonschema
+
jsonschema.validate(instance=metadata, schema=schema)
return True, []
except jsonschema.exceptions.ValidationError as e:
return False, [f"Registry validation error: {e.message}"]
except Exception as e:
return False, [f"Error during registry validation: {str(e)}"]
-
- def validate_package(self, package_dir: Path, metadata: Optional[Dict[str, Any]] = None, pending_update: Optional[Tuple[str, Dict]] = None) -> Tuple[bool, Dict[str, Any]]:
+
+ def validate_package(
+ self,
+ package_dir: Path,
+ metadata: Optional[Dict[str, Any]] = None,
+ pending_update: Optional[Tuple[str, Dict]] = None,
+ ) -> Tuple[bool, Dict[str, Any]]:
"""Validate a Hatch package in the specified directory.
Uses the new Chain of Responsibility validator system for comprehensive
@@ -130,94 +146,106 @@ def validate_package(self, package_dir: Path, metadata: Optional[Dict[str, Any]]
- Dict[str, Any]: Detailed validation results
"""
results = {
- 'valid': True,
- 'metadata_schema': {'valid': False, 'errors': []},
- 'entry_point': {'valid': False, 'errors': []},
- 'tools': {'valid': False, 'errors': []},
- 'dependencies': {'valid': True, 'errors': []},
- 'metadata': None
+ "valid": True,
+ "metadata_schema": {"valid": False, "errors": []},
+ "entry_point": {"valid": False, "errors": []},
+ "tools": {"valid": False, "errors": []},
+ "dependencies": {"valid": True, "errors": []},
+ "metadata": None,
}
-
+
# Check if package directory exists
if not package_dir.exists() or not package_dir.is_dir():
- results['valid'] = False
- results['metadata_schema']['errors'].append(f"Package directory does not exist: {package_dir}")
+ results["valid"] = False
+ results["metadata_schema"]["errors"].append(
+ f"Package directory does not exist: {package_dir}"
+ )
return False, results
-
+
# Load metadata from file if not supplied by caller
if metadata is None:
metadata_path = package_dir / "hatch_metadata.json"
if not metadata_path.exists():
- results['valid'] = False
- results['metadata_schema']['errors'].append("hatch_metadata.json not found")
+ results["valid"] = False
+ results["metadata_schema"]["errors"].append(
+ "hatch_metadata.json not found"
+ )
return False, results
try:
- with open(metadata_path, 'r') as f:
+ with open(metadata_path, "r") as f:
metadata = json.load(f)
except (json.JSONDecodeError, UnicodeDecodeError) as e:
- results['valid'] = False
- results['metadata_schema']['errors'].append(f"Failed to parse metadata: {e}")
+ results["valid"] = False
+ results["metadata_schema"]["errors"].append(
+ f"Failed to parse metadata: {e}"
+ )
return False, results
- results['metadata'] = metadata
-
+ results["metadata"] = metadata
+
# Use new validation system for comprehensive validation
try:
# Determine the target schema version
schema_version = self._determine_schema_version(metadata)
-
+
# Create validator chain for the target version
validator = ValidatorFactory.create_validator_chain(schema_version)
-
+
# Create validation context with package directory
context = ValidationContext(
package_dir=package_dir,
registry_data=self.registry_data,
allow_local_dependencies=self.allow_local_dependencies,
- force_schema_update=self.force_schema_update
+ force_schema_update=self.force_schema_update,
)
-
+
# Add pending update information for circular dependency detection
if pending_update:
context.set_data("pending_update", pending_update)
-
+
# Run comprehensive validation through the chain
is_valid, errors = validator.validate(metadata, context)
-
+
if is_valid:
# All validations passed
- results['metadata_schema']['valid'] = True
- results['entry_point']['valid'] = True
- results['tools']['valid'] = True
- results['dependencies']['valid'] = True
+ results["metadata_schema"]["valid"] = True
+ results["entry_point"]["valid"] = True
+ results["tools"]["valid"] = True
+ results["dependencies"]["valid"] = True
else:
# Parse errors to categorize them for backward compatibility
- results['valid'] = False
+ results["valid"] = False
self._categorize_validation_errors(errors, results)
-
+
except ValueError as e:
if "Unsupported schema version" in str(e):
- results['valid'] = False
- results['metadata_schema']['errors'].append(f"Unsupported schema version: {schema_version}")
+ results["valid"] = False
+ results["metadata_schema"]["errors"].append(
+ f"Unsupported schema version: {schema_version}"
+ )
else:
- results['valid'] = False
- results['metadata_schema']['errors'].append(f"Validation error: {str(e)}")
+ results["valid"] = False
+ results["metadata_schema"]["errors"].append(
+ f"Validation error: {str(e)}"
+ )
except Exception as e:
- results['valid'] = False
- results['metadata_schema']['errors'].append(f"Validation system error: {str(e)}")
-
- return results['valid'], results
-
+ results["valid"] = False
+ results["metadata_schema"]["errors"].append(
+ f"Validation system error: {str(e)}"
+ )
+
+ return results["valid"], results
+
def _determine_schema_version(self, metadata: Dict) -> str:
"""Determine the schema version to use for validation.
-
+
Maintains backward compatibility by checking for both the new field name
(hatch_schema_version) used in v2.0.0+ and the legacy field name
(package_schema_version) used in v1.x packages.
-
+
Args:
metadata (Dict): Package metadata
-
+
Returns:
str: Schema version to use
"""
@@ -225,54 +253,65 @@ def _determine_schema_version(self, metadata: Dict) -> str:
schema_version = metadata.get("hatch_schema_version")
if schema_version:
return schema_version
-
+
# Fall back to legacy field name for backward compatibility (v1.x)
schema_version = metadata.get("package_schema_version")
if schema_version:
return schema_version
-
+
# Fallback to validator's configured version
if self.version == "latest":
return "1.1.0" # Current latest version
else:
return self.version
-
- def _categorize_validation_errors(self, errors: List[str], results: Dict[str, Any]) -> None:
+
+ def _categorize_validation_errors(
+ self, errors: List[str], results: Dict[str, Any]
+ ) -> None:
"""Categorize validation errors into appropriate result categories.
-
+
This method maintains backward compatibility with the original result structure
by parsing error messages and categorizing them appropriately.
-
+
Args:
errors (List[str]): List of validation errors
results (Dict[str, Any]): Results dictionary to update
"""
# Reset all validation statuses since we'll set them based on errors
- results['metadata_schema']['valid'] = True
- results['entry_point']['valid'] = True
- results['tools']['valid'] = True
- results['dependencies']['valid'] = True
-
+ results["metadata_schema"]["valid"] = True
+ results["entry_point"]["valid"] = True
+ results["tools"]["valid"] = True
+ results["dependencies"]["valid"] = True
+
# Categorize each error
for error in errors:
error_lower = error.lower()
-
+
# Logging to debug error categorization
self.logger.debug(f"Categorizing error: {error}")
-
+
if "schema validation" in error_lower or "failed to load" in error_lower:
- results['metadata_schema']['errors'].append(error)
- results['metadata_schema']['valid'] = False
+ results["metadata_schema"]["errors"].append(error)
+ results["metadata_schema"]["valid"] = False
elif "entry point" in error_lower:
- results['entry_point']['errors'].append(error)
- results['entry_point']['valid'] = False
+ results["entry_point"]["errors"].append(error)
+ results["entry_point"]["valid"] = False
elif "tool" in error_lower or "function" in error_lower:
- results['tools']['errors'].append(error)
- results['tools']['valid'] = False
- elif any(keyword in error_lower for keyword in ['dependency', 'circular', 'constraint', 'not found', 'version']):
- results['dependencies']['errors'].append(error)
- results['dependencies']['valid'] = False
+ results["tools"]["errors"].append(error)
+ results["tools"]["valid"] = False
+ elif any(
+ keyword in error_lower
+ for keyword in [
+ "dependency",
+ "circular",
+ "constraint",
+ "not found",
+ "version",
+ ]
+ ):
+ results["dependencies"]["errors"].append(error)
+ results["dependencies"]["valid"] = False
else:
# Default: assign to metadata schema
- results['metadata_schema']['errors'].append(error)
- results['metadata_schema']['valid'] = False
\ No newline at end of file
+ results["metadata_schema"]["errors"].append(error)
+ results["metadata_schema"]["valid"] = False
diff --git a/hatch_validator/registry/registry_accessor_base.py b/hatch_validator/registry/registry_accessor_base.py
index 0d9cda3..7d18e71 100644
--- a/hatch_validator/registry/registry_accessor_base.py
+++ b/hatch_validator/registry/registry_accessor_base.py
@@ -1,52 +1,56 @@
-from typing import Dict, List, Set, Tuple, Optional, Any
+from typing import Dict, List, Optional, Any
from abc import ABC, abstractmethod
+
class RegistryError(Exception):
"""Exception raised for registry-related errors."""
+
pass
class RegistryAccessorBase(ABC):
"""Abstract base class for version-specific registry data accessors.
-
+
Implements the chain of responsibility pattern for handling different
registry schema versions.
"""
-
- def __init__(self, successor: Optional['RegistryAccessorBase'] = None):
+
+ def __init__(self, successor: Optional["RegistryAccessorBase"] = None):
"""Initialize the registry accessor.
-
+
Args:
successor (Optional[RegistryAccessor]): Next accessor in the chain.
"""
self._successor = successor
-
+
@abstractmethod
def can_handle(self, registry_data: Dict[str, Any]) -> bool:
"""Check if this accessor can handle the given registry data.
-
+
Args:
registry_data (Dict[str, Any]): Registry data to check.
-
+
Returns:
bool: True if this accessor can handle the data.
"""
pass
-
+
@abstractmethod
def get_schema_version(self, registry_data: Dict[str, Any]) -> str:
"""Get the schema version from registry data.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
-
+
Returns:
str: Schema version string.
"""
pass
-
+
@abstractmethod
- def get_all_package_names(self, registry_data: Dict[str, Any], repo_name: Optional[str] = None) -> List[str]:
+ def get_all_package_names(
+ self, registry_data: Dict[str, Any], repo_name: Optional[str] = None
+ ) -> List[str]:
"""Get all package names from registry data.
Args:
@@ -57,9 +61,14 @@ def get_all_package_names(self, registry_data: Dict[str, Any], repo_name: Option
List[str]: List of package names.
"""
pass
-
+
@abstractmethod
- def package_exists(self, registry_data: Dict[str, Any], package_name: str, repo_name: Optional[str] = None) -> bool:
+ def package_exists(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ repo_name: Optional[str] = None,
+ ) -> bool:
"""Check if a package exists in the registry.
Args:
@@ -71,9 +80,14 @@ def package_exists(self, registry_data: Dict[str, Any], package_name: str, repo_
bool: True if package exists.
"""
pass
-
+
@abstractmethod
- def get_package_versions(self, registry_data: Dict[str, Any], package_name: str, repo_name: Optional[str] = None) -> List[str]:
+ def get_package_versions(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ repo_name: Optional[str] = None,
+ ) -> List[str]:
"""Get all versions for a package.
Args:
@@ -85,9 +99,14 @@ def get_package_versions(self, registry_data: Dict[str, Any], package_name: str,
List[str]: List of version strings.
"""
pass
-
+
@abstractmethod
- def get_package_metadata(self, registry_data: Dict[str, Any], package_name: str, repo_name: Optional[str] = None) -> Dict[str, Any]:
+ def get_package_metadata(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ repo_name: Optional[str] = None,
+ ) -> Dict[str, Any]:
"""Get metadata for a package.
Args:
@@ -99,9 +118,11 @@ def get_package_metadata(self, registry_data: Dict[str, Any], package_name: str,
Dict[str, Any]: Package metadata.
"""
pass
-
+
@abstractmethod
- def get_package_by_repo(self, registry_data: Dict[str, Any], repo_name: str, package_name: str) -> Optional[Dict[str, Any]]:
+ def get_package_by_repo(
+ self, registry_data: Dict[str, Any], repo_name: str, package_name: str
+ ) -> Optional[Dict[str, Any]]:
"""Get a package by repository and package name.
Args:
@@ -114,12 +135,18 @@ def get_package_by_repo(self, registry_data: Dict[str, Any], repo_name: str, pac
pass
@abstractmethod
- def get_package_dependencies(self, registry_data: Dict[str, Any], package_name: str, version: str = None, repo_name: Optional[str] = None) -> Dict[str, Any]:
+ def get_package_dependencies(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ version: str = None,
+ repo_name: Optional[str] = None,
+ ) -> Dict[str, Any]:
"""Get reconstructed HATCH dependencies for a specific package version.
-
+
This method reconstructs the complete dependency information from the differential
storage format used in the registry.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
package_name (str): Package name.
@@ -133,30 +160,42 @@ def get_package_dependencies(self, registry_data: Dict[str, Any], package_name:
pass
@abstractmethod
- def get_package_version_info(self, registry_data: Dict[str, Any], package_name: str, version: str, repo_name: Optional[str] = None) -> Dict[str, Any]:
+ def get_package_version_info(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ version: str,
+ repo_name: Optional[str] = None,
+ ) -> Dict[str, Any]:
"""Get metadata for a specific package version.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
package_name (str): Package name.
version (str): Package version.
repo_name (str, optional): Repository name. If None, uses default repository.
-
+
Returns:
Dict[str, Any]: Metadata for the package version.
"""
pass
@abstractmethod
- def get_package_uri(self, registry_data: Dict[str, Any], package_name: str, version: str, repo_name: Optional[str] = None) -> str:
+ def get_package_uri(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ version: str,
+ repo_name: Optional[str] = None,
+ ) -> str:
"""Get the URI for a specific package version.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
package_name (str): Package name.
version (str): Package version.
repo_name (str, optional): Repository name. If None, uses default repository.
-
+
Returns:
str: URI for the package version.
"""
@@ -168,7 +207,7 @@ def list_repositories(self, registry_data: Dict[str, Any]) -> List[str]:
Args:
registry_data (Dict[str, Any]): Registry data.
-
+
Returns:
List[str]: List of repository names.
"""
@@ -181,7 +220,7 @@ def repository_exists(self, registry_data: Dict[str, Any], repo_name: str) -> bo
Args:
registry_data (Dict[str, Any]): Registry data.
repo_name (str): Repository name.
-
+
Returns:
bool: True if repository exists.
"""
@@ -194,18 +233,20 @@ def list_packages(self, registry_data: Dict[str, Any], repo_name: str) -> List[s
Args:
registry_data (Dict[str, Any]): Registry data.
repo_name (str): Repository name.
-
+
Returns:
List[str]: List of package names in the repository.
"""
pass
- def handle_request(self, registry_data: Dict[str, Any]) -> Optional['RegistryAccessorBase']:
+ def handle_request(
+ self, registry_data: Dict[str, Any]
+ ) -> Optional["RegistryAccessorBase"]:
"""Handle the request using chain of responsibility pattern.
-
+
Args:
registry_data (Dict[str, Any]): Registry data to handle.
-
+
Returns:
Optional[RegistryAccessor]: Accessor that can handle the data, or None.
"""
@@ -214,4 +255,4 @@ def handle_request(self, registry_data: Dict[str, Any]) -> Optional['RegistryAcc
elif self._successor:
return self._successor.handle_request(registry_data)
else:
- return None
\ No newline at end of file
+ return None
diff --git a/hatch_validator/registry/registry_accessor_factory.py b/hatch_validator/registry/registry_accessor_factory.py
index 1976dd2..d72d934 100644
--- a/hatch_validator/registry/registry_accessor_factory.py
+++ b/hatch_validator/registry/registry_accessor_factory.py
@@ -14,43 +14,46 @@
class RegistryAccessorFactory:
"""Factory class for creating registry accessor chains.
-
+
This factory creates the appropriate registry accessor chain based on the target
schema version, setting up the Chain of Responsibility pattern correctly.
The factory maintains a registry of available accessors and constructs
chains that enable proper delegation between versions.
"""
-
+
# Registry of available accessor versions (newest to oldest)
_accessor_registry: Dict[str, Type[RegistryAccessorBase]] = {}
_version_order: List[str] = []
-
+
@classmethod
- def register_accessor(cls, version: str, accessor_class: Type[RegistryAccessorBase]) -> None:
+ def register_accessor(
+ cls, version: str, accessor_class: Type[RegistryAccessorBase]
+ ) -> None:
"""Register a registry accessor for a specific schema version.
-
+
Args:
version (str): Schema version string (e.g., '1.1.0').
accessor_class (Type[RegistryAccessorBase]): Accessor class to register.
"""
cls._accessor_registry[version] = accessor_class
-
+
# Maintain version order (newest first)
if version not in cls._version_order:
cls._version_order.append(version)
cls._version_order.sort(reverse=True) # Newest first
-
+
logger.debug(f"Registered registry accessor for version {version}")
-
+
@classmethod
def get_supported_versions(cls) -> List[str]:
"""Get list of supported schema versions.
-
+
Returns:
List[str]: List of supported version strings, ordered newest to oldest.
"""
cls._ensure_accessors_loaded()
return cls._version_order.copy()
+
@classmethod
def _ensure_accessors_loaded(cls) -> None:
"""Ensure all available accessors are loaded and registered."""
@@ -58,32 +61,37 @@ def _ensure_accessors_loaded(cls) -> None:
# Import and register v1.2.0 accessor (newest first)
# from hatch_validator.registry.v1_2_0.registry_accessor import RegistryAccessor as V120RegistryAccessor
# cls.register_accessor('1.2.0', V120RegistryAccessor)
-
+
# Import and register v1.1.0 accessor
- from hatch_validator.registry.v1_1_0.registry_accessor import RegistryAccessor as V110RegistryAccessor
- cls.register_accessor('1.1.0', V110RegistryAccessor)
-
+ from hatch_validator.registry.v1_1_0.registry_accessor import (
+ RegistryAccessor as V110RegistryAccessor,
+ )
+
+ cls.register_accessor("1.1.0", V110RegistryAccessor)
+
@classmethod
- def create_accessor_chain(cls, target_version: Optional[str] = None) -> RegistryAccessorBase:
+ def create_accessor_chain(
+ cls, target_version: Optional[str] = None
+ ) -> RegistryAccessorBase:
"""Create a registry accessor chain for handling schema versions.
-
+
The chain is built starting from the target version (or newest available)
and includes all older versions as fallbacks in the chain.
-
+
Args:
target_version (str, optional): Target schema version. If None, uses newest.
-
+
Returns:
RegistryAccessorBase: Root accessor of the chain.
-
+
Raises:
ValueError: If target_version is specified but not supported.
"""
cls._ensure_accessors_loaded()
-
+
if not cls._accessor_registry:
raise ValueError("No registry accessors available")
-
+
# Determine starting version
if target_version:
if target_version not in cls._accessor_registry:
@@ -91,36 +99,40 @@ def create_accessor_chain(cls, target_version: Optional[str] = None) -> Registry
start_index = cls._version_order.index(target_version)
else:
start_index = 0 # Start with newest
-
+
# Build chain from target version to oldest
chain_versions = cls._version_order[start_index:]
-
+
if not chain_versions:
raise ValueError("No versions available for chain creation")
-
+
# Create accessors in reverse order (oldest first)
chain_accessors = []
for version in reversed(chain_versions):
accessor_class = cls._accessor_registry[version]
chain_accessors.append(accessor_class())
-
+
# Link the chain (each accessor points to the next older one)
for i in range(len(chain_accessors) - 1):
chain_accessors[i]._successor = chain_accessors[i + 1]
-
+
# Return the newest accessor (first in the chain)
root_accessor = chain_accessors[0]
-
- logger.debug(f"Created registry accessor chain with {len(chain_accessors)} versions")
+
+ logger.debug(
+ f"Created registry accessor chain with {len(chain_accessors)} versions"
+ )
return root_accessor
-
+
@classmethod
- def create_accessor_for_data(cls, registry_data: Dict) -> Optional[RegistryAccessorBase]:
+ def create_accessor_for_data(
+ cls, registry_data: Dict
+ ) -> Optional[RegistryAccessorBase]:
"""Create an accessor that can handle the given registry data.
-
+
Args:
registry_data (Dict): Registry data to find an accessor for.
-
+
Returns:
Optional[RegistryAccessorBase]: Accessor that can handle the data, or None.
"""
diff --git a/hatch_validator/registry/registry_service.py b/hatch_validator/registry/registry_service.py
index d9bc63d..20fa470 100644
--- a/hatch_validator/registry/registry_service.py
+++ b/hatch_validator/registry/registry_service.py
@@ -5,12 +5,14 @@
"""
import logging
-from packaging import specifiers
from typing import Optional, Dict, List, Any, Tuple
from .registry_accessor_factory import RegistryAccessorFactory
from .registry_accessor_base import RegistryAccessorBase, RegistryError
-from hatch_validator.utils.version_utils import VersionConstraintValidator, VersionConstraintError
+from hatch_validator.utils.version_utils import (
+ VersionConstraintValidator,
+ VersionConstraintError,
+)
logger = logging.getLogger("hatch.registry_service")
@@ -23,7 +25,7 @@ class RegistryService:
This service uses the accessor chain pattern to handle different
registry schema versions automatically.
"""
-
+
def __init__(self, registry_data: Optional[Dict[str, Any]] = None):
"""Initialize the registry service.
@@ -33,8 +35,10 @@ def __init__(self, registry_data: Optional[Dict[str, Any]] = None):
self._registry_data: Optional[Dict[str, Any]] = registry_data
self._accessor: Optional[RegistryAccessorBase] = None
if registry_data:
- self._accessor = RegistryAccessorFactory.create_accessor_for_data(registry_data)
-
+ self._accessor = RegistryAccessorFactory.create_accessor_for_data(
+ registry_data
+ )
+
def load_registry_data(self, registry_data: Dict[str, Any]) -> None:
"""Load registry data and initialize appropriate accessor.
@@ -46,12 +50,16 @@ def load_registry_data(self, registry_data: Dict[str, Any]) -> None:
"""
self._registry_data = registry_data
self._accessor = RegistryAccessorFactory.create_accessor_for_data(registry_data)
-
+
if not self._accessor:
- raise RegistryError("No accessor available for the provided registry data format")
-
- logger.debug(f"Loaded registry data with schema version: {self._accessor.get_schema_version(registry_data)}")
-
+ raise RegistryError(
+ "No accessor available for the provided registry data format"
+ )
+
+ logger.debug(
+ f"Loaded registry data with schema version: {self._accessor.get_schema_version(registry_data)}"
+ )
+
def load_registry_from_file(self, file_path: str) -> None:
"""Load registry data from a JSON file.
@@ -63,12 +71,13 @@ def load_registry_from_file(self, file_path: str) -> None:
"""
try:
import json
- with open(file_path, 'r', encoding='utf-8') as f:
+
+ with open(file_path, "r", encoding="utf-8") as f:
registry_data = json.load(f)
self.load_registry_data(registry_data)
except (IOError, json.JSONDecodeError) as e:
raise RegistryError(f"Failed to load registry from file {file_path}: {e}")
-
+
def is_loaded(self) -> bool:
"""Check if registry data is loaded.
@@ -76,7 +85,7 @@ def is_loaded(self) -> bool:
bool: True if registry data is loaded and accessible.
"""
return self._registry_data is not None and self._accessor is not None
-
+
def get_package_info(self, package_name: str) -> Optional[Dict[str, Any]]:
"""Get information about a package.
@@ -92,21 +101,23 @@ def get_package_info(self, package_name: str) -> Optional[Dict[str, Any]]:
"""
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
if not self._accessor.package_exists(self._registry_data, package_name):
logger.warning(f"Package '{package_name}' does not exist in the registry.")
return None
-
- versions = self._accessor.get_package_versions(self._registry_data, package_name)
- metadata = self._accessor.get_package_metadata(self._registry_data, package_name)
-
- return {
- 'name': package_name,
- 'versions': versions,
- 'metadata': metadata
- }
-
- def package_exists(self, package_name: str, repo_name: Optional[str] = None) -> bool:
+
+ versions = self._accessor.get_package_versions(
+ self._registry_data, package_name
+ )
+ metadata = self._accessor.get_package_metadata(
+ self._registry_data, package_name
+ )
+
+ return {"name": package_name, "versions": versions, "metadata": metadata}
+
+ def package_exists(
+ self, package_name: str, repo_name: Optional[str] = None
+ ) -> bool:
"""Check if a package exists in the registry.
Args:
@@ -127,8 +138,10 @@ def package_exists(self, package_name: str, repo_name: Optional[str] = None) ->
if repo is None and self.has_repository_name(package_name):
repo, pkg = package_name.split(":", 1)
return self._accessor.package_exists(self._registry_data, pkg, repo)
-
- def get_package_versions(self, package_name: str, repo_name: Optional[str] = None) -> List[str]:
+
+ def get_package_versions(
+ self, package_name: str, repo_name: Optional[str] = None
+ ) -> List[str]:
"""Get all versions for a package.
Args:
@@ -151,7 +164,7 @@ def get_package_versions(self, package_name: str, repo_name: Optional[str] = Non
if not self.package_exists(pkg, repo):
raise RegistryError(f"Package '{pkg}' does not exist in the registry")
return self._accessor.get_package_versions(self._registry_data, pkg, repo)
-
+
def get_all_package_names(self, repo_name: Optional[str] = None) -> List[str]:
"""Get all package names from registry, optionally for a specific repository.
@@ -167,8 +180,13 @@ def get_all_package_names(self, repo_name: Optional[str] = None) -> List[str]:
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
return self._accessor.get_all_package_names(self._registry_data, repo_name)
-
- def get_package_dependencies(self, package_name: str, version: Optional[str] = None, repo_name: Optional[str] = None) -> Dict[str, Any]:
+
+ def get_package_dependencies(
+ self,
+ package_name: str,
+ version: Optional[str] = None,
+ repo_name: Optional[str] = None,
+ ) -> Dict[str, Any]:
"""Get reconstructed dependencies for a specific package version.
Args:
@@ -188,9 +206,13 @@ def get_package_dependencies(self, package_name: str, version: Optional[str] = N
repo = repo_name
if repo is None and self.has_repository_name(package_name):
repo, pkg = package_name.split(":", 1)
- return self._accessor.get_package_dependencies(self._registry_data, pkg, version, repo)
+ return self._accessor.get_package_dependencies(
+ self._registry_data, pkg, version, repo
+ )
- def get_package_version_info(self, package_name: str, version: str, repo_name: Optional[str] = None) -> Optional[Dict[str, Any]]:
+ def get_package_version_info(
+ self, package_name: str, version: str, repo_name: Optional[str] = None
+ ) -> Optional[Dict[str, Any]]:
"""Get metadata for a specific version of a package.
Args:
@@ -213,9 +235,13 @@ def get_package_version_info(self, package_name: str, version: str, repo_name: O
repo, pkg = package_name.split(":", 1)
if not self.package_exists(pkg, repo):
raise RegistryError(f"Package '{pkg}' does not exist in the registry")
- return self._accessor.get_package_version_info(self._registry_data, pkg, version, repo)
-
- def get_package_uri(self, package_name: str, version: str, repo_name: Optional[str] = None) -> Optional[str]:
+ return self._accessor.get_package_version_info(
+ self._registry_data, pkg, version, repo
+ )
+
+ def get_package_uri(
+ self, package_name: str, version: str, repo_name: Optional[str] = None
+ ) -> Optional[str]:
"""Get the URI for a specific version of a package.
Args:
@@ -240,7 +266,12 @@ def get_package_uri(self, package_name: str, version: str, repo_name: Optional[s
raise RegistryError(f"Package '{pkg}' does not exist in the registry")
return self._accessor.get_package_uri(self._registry_data, pkg, version, repo)
- def find_compatible_version(self, package_name: str, version_constraint: Optional[str] = None, repo_name: Optional[str] = None) -> Optional[str]:
+ def find_compatible_version(
+ self,
+ package_name: str,
+ version_constraint: Optional[str] = None,
+ repo_name: Optional[str] = None,
+ ) -> Optional[str]:
"""Find a compatible version for a package given a version constraint.
Args:
@@ -260,8 +291,10 @@ def find_compatible_version(self, package_name: str, version_constraint: Optiona
repo = repo_name
if repo is None and self.has_repository_name(package_name):
repo, pkg = package_name.split(":", 1)
- if hasattr(self._accessor, 'find_compatible_version'):
- return self._accessor.find_compatible_version(self._registry_data, pkg, version_constraint, repo)
+ if hasattr(self._accessor, "find_compatible_version"):
+ return self._accessor.find_compatible_version(
+ self._registry_data, pkg, version_constraint, repo
+ )
else:
# Fallback for accessors without this method
versions = self.get_package_versions(pkg, repo)
@@ -270,13 +303,24 @@ def find_compatible_version(self, package_name: str, version_constraint: Optiona
if not version_constraint:
return versions[-1]
compatible_versions = [
- v for v in sorted(versions, key=lambda x: tuple(int(p) if p.isdigit() else p for p in x.split('.')), reverse=True)
- if VersionConstraintValidator.is_version_compatible(v, version_constraint)[0]
+ v
+ for v in sorted(
+ versions,
+ key=lambda x: tuple(
+ int(p) if p.isdigit() else p for p in x.split(".")
+ ),
+ reverse=True,
+ )
+ if VersionConstraintValidator.is_version_compatible(
+ v, version_constraint
+ )[0]
]
if not compatible_versions:
- raise VersionConstraintError(f"No compatible version found for '{pkg}' with constraint '{version_constraint}'")
+ raise VersionConstraintError(
+ f"No compatible version found for '{pkg}' with constraint '{version_constraint}'"
+ )
return compatible_versions[0]
-
+
def validate_package_exists(self, package_name: str) -> Tuple[bool, Optional[str]]:
"""Validate that a package exists in the registry.
@@ -291,18 +335,20 @@ def validate_package_exists(self, package_name: str) -> Tuple[bool, Optional[str
try:
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
if self.package_exists(package_name):
return True, None
else:
return False, f"Package '{package_name}' not found in registry"
-
+
except RegistryError as e:
return False, f"Registry error: {e}"
except Exception as e:
return False, f"Unexpected error checking package existence: {e}"
-
- def validate_package_version(self, package_name: str, version: str) -> Tuple[bool, Optional[str]]:
+
+ def validate_package_version(
+ self, package_name: str, version: str
+ ) -> Tuple[bool, Optional[str]]:
"""Validate that a specific version of a package exists.
Args:
@@ -317,23 +363,28 @@ def validate_package_version(self, package_name: str, version: str) -> Tuple[boo
try:
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
versions = self.get_package_versions(package_name)
if not versions:
return False, f"Package '{package_name}' not found in registry"
-
+
if version in versions:
return True, None
else:
- available_versions = ', '.join(versions)
- return False, f"Version '{version}' of package '{package_name}' not found. Available versions: {available_versions}"
-
+ available_versions = ", ".join(versions)
+ return (
+ False,
+ f"Version '{version}' of package '{package_name}' not found. Available versions: {available_versions}",
+ )
+
except RegistryError as e:
return False, f"Registry error: {e}"
except Exception as e:
return False, f"Unexpected error checking package version: {e}"
-
- def validate_version_compatibility(self, package_name: str, version_constraint: str) -> Tuple[bool, Optional[str]]:
+
+ def validate_version_compatibility(
+ self, package_name: str, version_constraint: str
+ ) -> Tuple[bool, Optional[str]]:
"""Validate that a version constraint can be satisfied by available package versions.
Args:
@@ -348,22 +399,27 @@ def validate_version_compatibility(self, package_name: str, version_constraint:
try:
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
versions = self.get_package_versions(package_name)
if not versions:
return False, f"Package '{package_name}' not found in registry"
-
+
# Use VersionConstraintValidator from utils
for v in versions:
- is_compatible, error = VersionConstraintValidator.is_version_compatible(v, version_constraint)
+ is_compatible, error = VersionConstraintValidator.is_version_compatible(
+ v, version_constraint
+ )
if is_compatible:
return True, None
- available_versions = ', '.join(versions)
- return False, f"No version of '{package_name}' satisfies constraint {version_constraint}. Available versions: {available_versions}"
-
+ available_versions = ", ".join(versions)
+ return (
+ False,
+ f"No version of '{package_name}' satisfies constraint {version_constraint}. Available versions: {available_versions}",
+ )
+
except Exception as e:
return False, f"Error checking version compatibility: {e}"
-
+
def get_missing_packages(self, package_names: List[str]) -> List[str]:
"""Get list of packages that don't exist in the registry.
@@ -378,14 +434,16 @@ def get_missing_packages(self, package_names: List[str]) -> List[str]:
"""
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
missing = []
for package_name in package_names:
if not self.package_exists(package_name):
missing.append(package_name)
return missing
-
- def validate_dependency_list(self, dependencies: List[str]) -> Tuple[bool, List[str]]:
+
+ def validate_dependency_list(
+ self, dependencies: List[str]
+ ) -> Tuple[bool, List[str]]:
"""Validate a list of package dependencies against the registry.
Args:
@@ -401,16 +459,16 @@ def validate_dependency_list(self, dependencies: List[str]) -> Tuple[bool, List[
"""
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
errors = []
-
+
for package_name in dependencies:
valid, error = self.validate_package_exists(package_name)
if not valid:
errors.append(error)
-
+
return len(errors) == 0, errors
-
+
def get_registry_statistics(self) -> Dict[str, int]:
"""Get statistics about the registry.
@@ -423,33 +481,35 @@ def get_registry_statistics(self) -> Dict[str, int]:
try:
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
all_packages = self.get_all_package_names()
total_packages = len(all_packages)
-
+
total_versions = 0
for package_name in all_packages:
versions = self.get_package_versions(package_name)
total_versions += len(versions)
-
+
return {
- 'total_packages': total_packages,
- 'total_versions': total_versions,
- 'average_versions_per_package': total_versions / total_packages if total_packages > 0 else 0
+ "total_packages": total_packages,
+ "total_versions": total_versions,
+ "average_versions_per_package": total_versions / total_packages
+ if total_packages > 0
+ else 0,
}
except RegistryError:
return {
- 'total_packages': 0,
- 'total_versions': 0,
- 'average_versions_per_package': 0
+ "total_packages": 0,
+ "total_versions": 0,
+ "average_versions_per_package": 0,
}
except Exception:
return {
- 'total_packages': 0,
- 'total_versions': 0,
- 'average_versions_per_package': 0
+ "total_packages": 0,
+ "total_versions": 0,
+ "average_versions_per_package": 0,
}
-
+
def get_registry_data(self) -> Optional[Dict[str, Any]]:
"""Get the raw registry data.
@@ -462,7 +522,7 @@ def get_registry_data(self) -> Optional[Dict[str, Any]]:
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
return self._registry_data
-
+
def get_schema_version(self) -> Optional[str]:
"""Get the schema version of the loaded registry data.
@@ -475,7 +535,7 @@ def get_schema_version(self) -> Optional[str]:
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
return self._accessor.get_schema_version(self._registry_data)
-
+
def list_repositories(self) -> List[str]:
"""List all repository names in the loaded registry.
@@ -529,11 +589,13 @@ def has_repository_name(self, pkg_name: str) -> bool:
"""
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
+
repo_name_candidate = pkg_name.split(":")[0]
return self.repository_exists(repo_name_candidate)
- def get_package_by_repo(self, repo_name: str, package_name: str) -> Optional[Dict[str, Any]]:
+ def get_package_by_repo(
+ self, repo_name: str, package_name: str
+ ) -> Optional[Dict[str, Any]]:
"""Get a package by repository and package name.
Args:
@@ -546,5 +608,7 @@ def get_package_by_repo(self, repo_name: str, package_name: str) -> Optional[Dic
"""
if not self.is_loaded():
raise RegistryError("Registry data not loaded")
-
- return self._accessor.get_package_by_repo(self._registry_data, repo_name, package_name)
+
+ return self._accessor.get_package_by_repo(
+ self._registry_data, repo_name, package_name
+ )
diff --git a/hatch_validator/registry/v1_1_0/registry_accessor.py b/hatch_validator/registry/v1_1_0/registry_accessor.py
index 223020c..d2cff0e 100644
--- a/hatch_validator/registry/v1_1_0/registry_accessor.py
+++ b/hatch_validator/registry/v1_1_0/registry_accessor.py
@@ -2,37 +2,40 @@
from hatch_validator.registry.registry_accessor_base import RegistryAccessorBase
from hatch_validator.utils.version_utils import VersionConstraintValidator
+
class RegistryAccessor(RegistryAccessorBase):
"""Registry accessor for schema version 1.1.0.
-
+
Handles the CrackingShells Package Registry format with repositories
containing packages with versions.
"""
-
+
def can_handle(self, registry_data: Dict[str, Any]) -> bool:
"""Check if this accessor can handle the given registry data.
-
+
Args:
registry_data (Dict[str, Any]): Registry data to check.
-
+
Returns:
bool: True if this accessor can handle the data.
"""
- schema_version = registry_data.get('registry_schema_version', '')
- return schema_version.startswith('1.1.')
-
+ schema_version = registry_data.get("registry_schema_version", "")
+ return schema_version.startswith("1.1.")
+
def get_schema_version(self, registry_data: Dict[str, Any]) -> str:
"""Get the schema version from registry data.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
-
+
Returns:
str: Schema version string.
"""
- return registry_data.get('registry_schema_version', 'unknown')
-
- def get_all_package_names(self, registry_data: Dict[str, Any], repo_name: Optional[str] = None) -> List[str]:
+ return registry_data.get("registry_schema_version", "unknown")
+
+ def get_all_package_names(
+ self, registry_data: Dict[str, Any], repo_name: Optional[str] = None
+ ) -> List[str]:
"""Get all package names from all repositories or a specific repository in the registry data.
Args:
@@ -42,17 +45,22 @@ def get_all_package_names(self, registry_data: Dict[str, Any], repo_name: Option
List[str]: List of package names.
"""
package_names = []
- repos = registry_data.get('repositories', [])
+ repos = registry_data.get("repositories", [])
for repo in repos:
- if repo_name and repo.get('name') != repo_name:
+ if repo_name and repo.get("name") != repo_name:
continue
- for package in repo.get('packages', []):
- name = package.get('name')
+ for package in repo.get("packages", []):
+ name = package.get("name")
if name:
package_names.append(name)
return package_names
- def package_exists(self, registry_data: Dict[str, Any], package_name: str, repo_name: Optional[str] = None) -> bool:
+ def package_exists(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ repo_name: Optional[str] = None,
+ ) -> bool:
"""Check if a package exists in the registry, optionally in a specific repo.
Args:
@@ -66,7 +74,12 @@ def package_exists(self, registry_data: Dict[str, Any], package_name: str, repo_
return package_name in self.list_packages(registry_data, repo_name)
return package_name in self.get_all_package_names(registry_data, repo_name=None)
- def get_package_versions(self, registry_data: Dict[str, Any], package_name: str, repo_name: Optional[str] = None) -> List[str]:
+ def get_package_versions(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ repo_name: Optional[str] = None,
+ ) -> List[str]:
"""Get all versions for a package, optionally in a specific repo.
Args:
@@ -76,16 +89,25 @@ def get_package_versions(self, registry_data: Dict[str, Any], package_name: str,
Returns:
List[str]: List of version strings.
"""
- repos = registry_data.get('repositories', [])
+ repos = registry_data.get("repositories", [])
for repo in repos:
- if repo_name and repo.get('name') != repo_name:
+ if repo_name and repo.get("name") != repo_name:
continue
- for pkg in repo.get('packages', []):
- if pkg.get('name') == package_name:
- return [ver.get('version') for ver in pkg.get('versions', []) if ver.get('version')]
+ for pkg in repo.get("packages", []):
+ if pkg.get("name") == package_name:
+ return [
+ ver.get("version")
+ for ver in pkg.get("versions", [])
+ if ver.get("version")
+ ]
return []
- def get_package_metadata(self, registry_data: Dict[str, Any], package_name: str, repo_name: Optional[str] = None) -> Dict[str, Any]:
+ def get_package_metadata(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ repo_name: Optional[str] = None,
+ ) -> Dict[str, Any]:
"""Get metadata for a package, optionally in a specific repo.
Args:
@@ -95,18 +117,24 @@ def get_package_metadata(self, registry_data: Dict[str, Any], package_name: str,
Returns:
Dict[str, Any]: Package metadata.
"""
- repos = registry_data.get('repositories', [])
+ repos = registry_data.get("repositories", [])
for repo in repos:
- if repo_name and repo.get('name') != repo_name:
+ if repo_name and repo.get("name") != repo_name:
continue
- for pkg in repo.get('packages', []):
- if pkg.get('name') == package_name:
+ for pkg in repo.get("packages", []):
+ if pkg.get("name") == package_name:
return pkg
return {}
- def get_package_version_info(self, registry_data: Dict[str, Any], package_name: str, version: str, repo_name: Optional[str] = None) -> Dict[str, Any]:
+ def get_package_version_info(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ version: str,
+ repo_name: Optional[str] = None,
+ ) -> Dict[str, Any]:
"""Get metadata for a specific package version.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
package_name (str): Package name.
@@ -119,20 +147,26 @@ def get_package_version_info(self, registry_data: Dict[str, Any], package_name:
package_data = self.get_package_metadata(registry_data, package_name, repo_name)
if not package_data:
return {}
-
- versions = package_data.get('versions', [])
+
+ versions = package_data.get("versions", [])
for v in versions:
- if v.get('version') == version:
+ if v.get("version") == version:
return v
-
+
return {}
- def get_package_dependencies(self, registry_data: Dict[str, Any], package_name: str, version: str = None, repo_name: Optional[str] = None) -> Dict[str, Any]:
+ def get_package_dependencies(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ version: str = None,
+ repo_name: Optional[str] = None,
+ ) -> Dict[str, Any]:
"""Get reconstructed HATCH dependencies for a specific package version.
-
+
This method reconstructs the complete dependency information from the differential
storage format used in the registry.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
package_name (str): Package name.
@@ -145,51 +179,52 @@ def get_package_dependencies(self, registry_data: Dict[str, Any], package_name:
package_data = self.get_package_metadata(registry_data, package_name, repo_name)
if not package_data:
return {}
-
- versions = package_data.get('versions', [])
+
+ versions = package_data.get("versions", [])
if not versions:
return {}
-
+
# Find the specific version or use latest
version_info = None
if version:
for v in versions:
- if v.get('version') == version:
+ if v.get("version") == version:
version_info = v
break
else:
# Use latest version (last in list)
version_info = versions[-1]
-
+
if not version_info:
return {}
-
+
return self._reconstruct_package_version(package_data, version_info)
-
- def _reconstruct_package_version(self, package: Dict[str, Any], version_info: Dict[str, Any]) -> Dict[str, Any]:
+
+ def _reconstruct_package_version(
+ self, package: Dict[str, Any], version_info: Dict[str, Any]
+ ) -> Dict[str, Any]:
"""Reconstruct complete package metadata for a specific version by walking the diff tree.
-
+
This method follows the differential storage approach where each version contains
only the changes from its base version.
-
+
Args:
package (Dict[str, Any]): Package object from the registry.
version_info (Dict[str, Any]): Specific version information.
-
+
Returns:
Dict[str, Any]: Reconstructed package metadata including dependencies and compatibility.
- Contains keys: name, version, dependencies (hatch)
"""
- version_chain = []
package_versions = package.get("versions", [])
-
+
# Initialize with empty metadata
reconstructed = {
"name": package["name"],
"version": version_info["version"],
- "dependencies": []
+ "dependencies": [],
}
-
+
# Apply changes from oldest to newest (reverse the chain)
# Given that new versions are always appended to the end of the list during package updates,
# we can iterate from the start.
@@ -198,44 +233,59 @@ def _reconstruct_package_version(self, package: Dict[str, Any], version_info: Di
# Add new dependencies
for dep in ver.get("hatch_dependencies_added", []):
reconstructed["dependencies"].append(dep)
-
+
# Remove dependencies
for dep_name in ver.get("hatch_dependencies_removed", []):
reconstructed["dependencies"] = [
- d for d in reconstructed["dependencies"]
+ d
+ for d in reconstructed["dependencies"]
if d.get("name") != dep_name
]
-
+
# Modify dependencies
for mod_dep in ver.get("hatch_dependencies_modified", []):
for i, dep in enumerate(reconstructed["dependencies"]):
if dep.get("name") == mod_dep.get("name"):
reconstructed["dependencies"][i] = mod_dep
break
-
+
return reconstructed
- def get_package_uri(self, registry_data: Dict[str, Any], package_name: str, version: str = None, repo_name: Optional[str] = None) -> Optional[str]:
+ def get_package_uri(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ version: str = None,
+ repo_name: Optional[str] = None,
+ ) -> Optional[str]:
"""Get the URI for a specific package version.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
package_name (str): Package name.
version (str, optional): Package version. If None, uses latest version.
repo_name (str, optional): Repository name. If None, uses default repository.
-
+
Returns:
Optional[str]: URI for the package version, or None if not found.
"""
- package_version_data = self.get_package_version_info(registry_data, package_name, version, repo_name)
+ package_version_data = self.get_package_version_info(
+ registry_data, package_name, version, repo_name
+ )
if not package_version_data:
return None
- return package_version_data.get('release_uri')
+ return package_version_data.get("release_uri")
- def find_compatible_version(self, registry_data: Dict[str, Any], package_name: str, version_constraint: str = None, repo_name: Optional[str] = None) -> Optional[str]:
+ def find_compatible_version(
+ self,
+ registry_data: Dict[str, Any],
+ package_name: str,
+ version_constraint: str = None,
+ repo_name: Optional[str] = None,
+ ) -> Optional[str]:
"""Find a compatible version for a package given a version constraint.
-
+
Args:
registry_data (Dict[str, Any]): Registry data.
package_name (str): Package name.
@@ -254,12 +304,21 @@ def find_compatible_version(self, registry_data: Dict[str, Any], package_name: s
# Use VersionConstraintValidator to filter compatible versions (prefer highest)
compatible_versions = [
- v for v in sorted(versions, key=lambda x: tuple(int(p) if p.isdigit() else p for p in x.split('.')), reverse=True)
- if VersionConstraintValidator.is_version_compatible(v, version_constraint)[0]
+ v
+ for v in sorted(
+ versions,
+ key=lambda x: tuple(int(p) if p.isdigit() else p for p in x.split(".")),
+ reverse=True,
+ )
+ if VersionConstraintValidator.is_version_compatible(v, version_constraint)[
+ 0
+ ]
]
return compatible_versions[0] if compatible_versions else None
- def get_package_by_repo(self, registry_data: Dict[str, Any], repo_name: str, package_name: str) -> Optional[Dict[str, Any]]:
+ def get_package_by_repo(
+ self, registry_data: Dict[str, Any], repo_name: str, package_name: str
+ ) -> Optional[Dict[str, Any]]:
"""Get a package by repository and package name.
Args:
@@ -269,10 +328,10 @@ def get_package_by_repo(self, registry_data: Dict[str, Any], repo_name: str, pac
Returns:
Optional[Dict[str, Any]]: Package metadata or None if not found.
"""
- for repo in registry_data.get('repositories', []):
- if repo.get('name') == repo_name:
- for pkg in repo.get('packages', []):
- if pkg.get('name') == package_name:
+ for repo in registry_data.get("repositories", []):
+ if repo.get("name") == repo_name:
+ for pkg in repo.get("packages", []):
+ if pkg.get("name") == package_name:
return pkg
return None
@@ -281,11 +340,11 @@ def list_repositories(self, registry_data: Dict[str, Any]) -> List[str]:
Args:
registry_data (Dict[str, Any]): Registry data.
-
+
Returns:
List[str]: List of repository names.
"""
- return [repo.get('name') for repo in registry_data.get('repositories', [])]
+ return [repo.get("name") for repo in registry_data.get("repositories", [])]
def repository_exists(self, registry_data: Dict[str, Any], repo_name: str) -> bool:
"""Check if a repository exists in the registry.
@@ -293,11 +352,14 @@ def repository_exists(self, registry_data: Dict[str, Any], repo_name: str) -> bo
Args:
registry_data (Dict[str, Any]): Registry data.
repo_name (str): Repository name.
-
+
Returns:
bool: True if repository exists.
"""
- return any(repo.get('name') == repo_name for repo in registry_data.get('repositories', []))
+ return any(
+ repo.get("name") == repo_name
+ for repo in registry_data.get("repositories", [])
+ )
def list_packages(self, registry_data: Dict[str, Any], repo_name: str) -> List[str]:
"""List all package names in a given repository.
@@ -305,11 +367,11 @@ def list_packages(self, registry_data: Dict[str, Any], repo_name: str) -> List[s
Args:
registry_data (Dict[str, Any]): Registry data.
repo_name (str): Repository name.
-
+
Returns:
List[str]: List of package names in the repository.
"""
- for repo in registry_data.get('repositories', []):
- if repo.get('name') == repo_name:
- return [pkg.get('name') for pkg in repo.get('packages', [])]
- return []
\ No newline at end of file
+ for repo in registry_data.get("repositories", []):
+ if repo.get("name") == repo_name:
+ return [pkg.get("name") for pkg in repo.get("packages", [])]
+ return []
diff --git a/hatch_validator/schemas/schema_cache.py b/hatch_validator/schemas/schema_cache.py
index aa8d8a4..78ce3d5 100644
--- a/hatch_validator/schemas/schema_cache.py
+++ b/hatch_validator/schemas/schema_cache.py
@@ -21,44 +21,44 @@
DEFAULT_VERSION = "v1.2.0" # Fallback if no version can be determined
# Import schema types from schema_fetcher
-from .schema_fetcher import SCHEMA_TYPES
+from .schema_fetcher import SCHEMA_TYPES # noqa: E402
class SchemaCache:
"""Manages local schema file storage and retrieval."""
-
+
def __init__(self, cache_dir: Path = CACHE_DIR):
"""Initialize the schema cache.
-
+
Args:
cache_dir (Path, optional): Directory to store cached schemas. Defaults to CACHE_DIR.
"""
self.cache_dir = cache_dir
self.info_file = cache_dir / "schema_info.json"
self.cache_dir.mkdir(parents=True, exist_ok=True)
-
+
def get_info(self) -> Dict[str, Any]:
"""Get cached schema information.
-
+
Returns:
Dict[str, Any]: Dictionary with schema info or empty dict if not available
"""
if not self.info_file.exists():
return {}
-
+
try:
with open(self.info_file, "r") as f:
return json.load(f)
except (json.JSONDecodeError, IOError) as e:
logger.error(f"Error reading cache info: {e}")
return {}
-
+
def update_info(self, info: Dict[str, Any]) -> bool:
"""Update the cached schema information.
-
+
Args:
info (Dict[str, Any]): Schema information to cache
-
+
Returns:
bool: True if update succeeded, False otherwise
"""
@@ -69,43 +69,43 @@ def update_info(self, info: Dict[str, Any]) -> bool:
except IOError as e:
logger.error(f"Error writing cache info: {e}")
return False
-
+
def is_fresh(self, max_age: int = DEFAULT_CACHE_TTL) -> bool:
"""Check if the cache is still fresh.
-
+
Args:
max_age (int, optional): Maximum age in seconds for the cache to be considered fresh. Defaults to DEFAULT_CACHE_TTL.
-
+
Returns:
bool: True if cache is fresh, False otherwise
"""
info = self.get_info()
if not info or "updated_at" not in info:
return False
-
+
try:
updated_str = info["updated_at"].replace("Z", "+00:00")
updated = datetime.fromisoformat(updated_str)
if updated.tzinfo is None:
updated = updated.replace(tzinfo=timezone.utc)
-
+
now = datetime.now(timezone.utc)
age = (now - updated).total_seconds()
-
+
return age < max_age
except (ValueError, TypeError):
return False
-
+
def get_schema_path(self, schema_type: str, version: str = None) -> Path:
"""Get the path where a schema should be stored.
-
+
Args:
schema_type (str): Type of schema ("package" or "registry")
version (str, optional): Schema version. If provided, schema will be stored in a version-specific folder. Defaults to None.
-
+
Returns:
Path: Path object for the schema file
-
+
Raises:
ValueError: If the schema type is unknown
"""
@@ -114,28 +114,28 @@ def get_schema_path(self, schema_type: str, version: str = None) -> Path:
# Base directory for this schema type
base_dir = self.cache_dir / schema_type
-
+
if version:
# Normalize version format (ensure v prefix)
- if not version.startswith('v'):
+ if not version.startswith("v"):
version = f"v{version}"
-
+
# Store in version-specific subfolder
schema_dir = base_dir / version
else:
# No version specified, use the main schema directory
schema_dir = base_dir
-
+
schema_dir.mkdir(parents=True, exist_ok=True)
return schema_dir / SCHEMA_TYPES[schema_type]["filename"]
-
+
def has_schema(self, schema_type: str, version: str = None) -> bool:
"""Check if a schema exists in the cache.
-
+
Args:
schema_type (str): Type of schema ("package" or "registry")
version (str, optional): Schema version to check. If None, checks for the default schema. Defaults to None.
-
+
Returns:
bool: True if schema exists in cache, False otherwise
"""
@@ -144,14 +144,16 @@ def has_schema(self, schema_type: str, version: str = None) -> bool:
return path.exists() and path.stat().st_size > 0
except ValueError:
return False
-
- def load_schema(self, schema_type: str, version: str = None) -> Optional[Dict[str, Any]]:
+
+ def load_schema(
+ self, schema_type: str, version: str = None
+ ) -> Optional[Dict[str, Any]]:
"""Load a schema from the cache.
-
+
Args:
schema_type (str): Type of schema ("package" or "registry")
version (str, optional): Schema version to load. If None, loads the default schema. Defaults to None.
-
+
Returns:
Optional[Dict[str, Any]]: Schema as a dictionary or None if not available
"""
@@ -159,22 +161,26 @@ def load_schema(self, schema_type: str, version: str = None) -> Optional[Dict[st
path = self.get_schema_path(schema_type, version)
if not path.exists():
return None
-
+
with open(path, "r") as f:
- logger.info(f"Loading cached schema {schema_type} version {version} from {path}")
+ logger.info(
+ f"Loading cached schema {schema_type} version {version} from {path}"
+ )
return json.load(f)
except (ValueError, json.JSONDecodeError, IOError) as e:
logger.error(f"Error loading cached schema: {e}")
return None
-
- def save_schema(self, schema_type: str, schema: Dict[str, Any], version: str = None) -> bool:
+
+ def save_schema(
+ self, schema_type: str, schema: Dict[str, Any], version: str = None
+ ) -> bool:
"""Save a schema to the cache.
-
+
Args:
schema_type (str): Type of schema ("package" or "registry")
schema (Dict[str, Any]): Schema data to save
version (str, optional): Schema version. If provided, schema will be stored in a version-specific folder. Defaults to None.
-
+
Returns:
bool: True if save succeeded, False otherwise
"""
@@ -186,21 +192,21 @@ def save_schema(self, schema_type: str, schema: Dict[str, Any], version: str = N
except (ValueError, IOError) as e:
logger.error(f"Error saving schema to cache: {e}")
return False
-
+
def get_latest_version(self, schema_type: str) -> str:
"""Get the latest known version of a schema type.
-
+
Args:
schema_type (str): Type of schema ("package" or "registry")
-
+
Returns:
str: Latest version string with 'v' prefix or default version if not found
"""
info = self.get_info()
version = info.get(f"latest_{schema_type}_version")
-
+
# Ensure version has 'v' prefix
- if version and not version.startswith('v'):
+ if version and not version.startswith("v"):
version = f"v{version}"
-
+
return version if version else DEFAULT_VERSION
diff --git a/hatch_validator/schemas/schema_fetcher.py b/hatch_validator/schemas/schema_fetcher.py
index 6c9227d..d865d2a 100644
--- a/hatch_validator/schemas/schema_fetcher.py
+++ b/hatch_validator/schemas/schema_fetcher.py
@@ -16,7 +16,9 @@
# Configuration
GITHUB_API_BASE = "https://api.github.com/repos/CrackingShells/Hatch-Schemas"
-GITHUB_RELEASES_BASE = "https://github.com/CrackingShells/Hatch-Schemas/releases/download"
+GITHUB_RELEASES_BASE = (
+ "https://github.com/CrackingShells/Hatch-Schemas/releases/download"
+)
# Schema type definitions
SCHEMA_TYPES = {
@@ -27,26 +29,28 @@
"registry": {
"filename": "hatch_all_pkg_metadata_schema.json",
"tag_prefix": "schemas-registry-",
- }
+ },
}
class SchemaFetcher:
"""Handles network operations to retrieve schemas from GitHub."""
-
- def __init__(self, api_base: str = GITHUB_API_BASE, releases_base: str = GITHUB_RELEASES_BASE):
+
+ def __init__(
+ self, api_base: str = GITHUB_API_BASE, releases_base: str = GITHUB_RELEASES_BASE
+ ):
"""Initialize the schema fetcher.
-
+
Args:
api_base (str, optional): Base URL for GitHub API requests. Defaults to GITHUB_API_BASE.
releases_base (str, optional): Base URL for GitHub release downloads. Defaults to GITHUB_RELEASES_BASE.
"""
self.api_base = api_base
self.releases_base = releases_base
-
+
def get_releases(self) -> list:
"""Fetch GitHub releases information.
-
+
Returns:
list: List containing release data or empty list if fetch fails
"""
@@ -58,47 +62,45 @@ def get_releases(self) -> list:
except requests.RequestException as e:
logger.error(f"Error fetching releases: {e}")
return []
-
+
def extract_schema_info(self, releases: list) -> Dict[str, Any]:
"""Process GitHub releases data to extract schema information.
-
+
Args:
releases (list): List of release data from GitHub API
-
+
Returns:
Dict[str, Any]: Dictionary with extracted schema information
"""
from datetime import datetime, timezone
-
- info = {
- "updated_at": datetime.now(timezone.utc).isoformat()
- }
-
+
+ info = {"updated_at": datetime.now(timezone.utc).isoformat()}
+
for release in releases:
- tag = release.get('tag_name', '')
-
+ tag = release.get("tag_name", "")
+
for schema_type, config in SCHEMA_TYPES.items():
- prefix = config['tag_prefix']
+ prefix = config["tag_prefix"]
version_key = f"latest_{schema_type}_version"
-
+
# Only process the first (latest) release for each type
if tag.startswith(prefix) and version_key not in info:
- version = tag.replace(prefix, '')
+ version = tag.replace(prefix, "")
info[version_key] = version
info[schema_type] = {
- 'version': version,
- 'url': f"{self.releases_base}/{tag}/{config['filename']}",
- 'release_url': release.get('html_url', '')
+ "version": version,
+ "url": f"{self.releases_base}/{tag}/{config['filename']}",
+ "release_url": release.get("html_url", ""),
}
-
+
return info
-
+
def download_schema(self, url: str) -> Optional[Dict[str, Any]]:
"""Download a schema JSON file from URL.
-
+
Args:
url (str): URL to download the schema from
-
+
Returns:
Optional[Dict[str, Any]]: Schema as a dictionary or None if download fails
"""
@@ -110,28 +112,30 @@ def download_schema(self, url: str) -> Optional[Dict[str, Any]]:
except (requests.RequestException, json.JSONDecodeError) as e:
logger.error(f"Error downloading schema: {e}")
return None
-
- def download_specific_version(self, schema_type: str, version: str) -> Optional[Dict[str, Any]]:
+
+ def download_specific_version(
+ self, schema_type: str, version: str
+ ) -> Optional[Dict[str, Any]]:
"""Download a specific schema version directly.
-
+
Args:
schema_type (str): Type of schema ("package" or "registry")
version (str): Version to download, should include 'v' prefix
-
+
Returns:
Optional[Dict[str, Any]]: Schema as a dictionary or None if download fails
"""
if schema_type not in SCHEMA_TYPES:
logger.error(f"Unknown schema type: {schema_type}")
return None
-
+
# Ensure version has 'v' prefix
- if not version.startswith('v'):
+ if not version.startswith("v"):
version = f"v{version}"
-
+
config = SCHEMA_TYPES[schema_type]
tag = f"{config['tag_prefix']}{version}"
url = f"{self.releases_base}/{tag}/{config['filename']}"
-
+
logger.info(f"Downloading {schema_type} schema version {version} from {url}")
return self.download_schema(url)
diff --git a/hatch_validator/schemas/schemas_retriever.py b/hatch_validator/schemas/schemas_retriever.py
index 5c6fecc..c535235 100644
--- a/hatch_validator/schemas/schemas_retriever.py
+++ b/hatch_validator/schemas/schemas_retriever.py
@@ -18,29 +18,32 @@
# Configure logging
logger = logging.getLogger("hatch.schema_retriever")
+
class SchemaRetriever:
"""Main class for retrieving and managing schemas."""
-
+
def __init__(self, cache_dir: Path = None):
"""Initialize the schema retriever.
-
+
Args:
cache_dir (Path, optional): Custom path to store cached schemas. If None, use default. Defaults to None.
"""
self.cache = SchemaCache(cache_dir or CACHE_DIR)
self.fetcher = SchemaFetcher()
-
- def get_schema(self, schema_type: str, version: str = "latest", force_update: bool = False) -> Optional[Dict[str, Any]]:
+
+ def get_schema(
+ self, schema_type: str, version: str = "latest", force_update: bool = False
+ ) -> Optional[Dict[str, Any]]:
"""Get a schema, either from cache or by downloading.
-
+
This is the main method for obtaining schema data. It first tries to get the schema from the cache,
and if not available or if updates are forced, it attempts to download it.
-
+
Args:
schema_type (str): Type of schema ("package" or "registry")
version (str, optional): Version of schema or "latest". Defaults to "latest".
force_update (bool, optional): If True, force check for updates regardless of cache status. Defaults to False.
-
+
Returns:
Optional[Dict[str, Any]]: Schema as a dictionary or None if not available
"""
@@ -48,41 +51,45 @@ def get_schema(self, schema_type: str, version: str = "latest", force_update: bo
if schema_type not in SCHEMA_TYPES:
logger.error(f"Unknown schema type: {schema_type}")
return None
- # For "latest", try to update cache if needed and return the cached version
+ # For "latest", try to update cache if needed and return the cached version
if version == "latest":
- if force_update or not self.cache.is_fresh() or not self.cache.has_schema(schema_type):
+ if (
+ force_update
+ or not self.cache.is_fresh()
+ or not self.cache.has_schema(schema_type)
+ ):
self.update_schemas(force=force_update)
-
+
# First try to get the latest version number
latest_version = self.cache.get_latest_version(schema_type)
-
+
# Try to load the schema from the version-specific folder first,
# fallback to the main folder if not found
schema = self.cache.load_schema(schema_type, latest_version)
if schema:
return schema
return self.cache.load_schema(schema_type)
- # For specific version, first check if it's already in the cache
- normalized_version = version if version.startswith('v') else f"v{version}"
+ # For specific version, first check if it's already in the cache
+ normalized_version = version if version.startswith("v") else f"v{version}"
if not force_update and self.cache.has_schema(schema_type, normalized_version):
return self.cache.load_schema(schema_type, normalized_version)
-
+
# If not in cache or force update, download it directly
schema_data = self.fetcher.download_specific_version(schema_type, version)
if schema_data:
# Cache the specific version in its own folder
self.cache.save_schema(schema_type, schema_data, normalized_version)
return schema_data
-
+
logger.error(f"Could not retrieve {schema_type} schema version {version}")
return None
-
+
def update_schemas(self, force: bool = False) -> bool:
"""Check for schema updates and download if needed.
-
+
Args:
force (bool, optional): If True, force update regardless of cache freshness. Defaults to False.
-
+
Returns:
bool: True if any schema was updated, False otherwise
"""
@@ -90,53 +97,53 @@ def update_schemas(self, force: bool = False) -> bool:
if not force and self.cache.is_fresh():
logger.debug("Cache is fresh, skipping update")
return False
-
+
# Get latest releases from GitHub
releases = self.fetcher.get_releases()
if not releases:
logger.warning("Could not retrieve GitHub releases")
return False
-
+
# Extract schema information from releases
schema_info = self.fetcher.extract_schema_info(releases)
if not schema_info:
logger.warning("No schema information found in releases")
return False
-
+
updated = False
-
+
# Process each schema type
for schema_type in SCHEMA_TYPES:
if schema_type not in schema_info:
continue
-
+
# Get schema URL
schema_url = schema_info.get(schema_type, {}).get("url")
if not schema_url:
continue
-
+
# Download schema
schema_data = self.fetcher.download_schema(schema_url)
if not schema_data:
continue
-
+
# Get the version
version = schema_info.get(f"latest_{schema_type}_version")
-
+
# Save to cache - both in the version-specific folder and main folder
if version:
# Save to version-specific folder
self.cache.save_schema(schema_type, schema_data, version)
-
+
# Also save to main folder (no version) for backward compatibility
if self.cache.save_schema(schema_type, schema_data):
updated = True
logger.info(f"Updated {schema_type} schema to version {version}")
-
+
# Update cache info if any schema was updated
if updated:
self.cache.update_info(schema_info)
-
+
return updated
@@ -144,26 +151,30 @@ def update_schemas(self, force: bool = False) -> bool:
schema_retriever = SchemaRetriever()
-def get_package_schema(version: str = "latest", force_update: bool = False) -> Optional[Dict[str, Any]]:
+def get_package_schema(
+ version: str = "latest", force_update: bool = False
+) -> Optional[Dict[str, Any]]:
"""Helper function to get the package schema.
-
+
Args:
version (str, optional): Version of the schema, or "latest". Defaults to "latest".
force_update (bool, optional): If True, force a check for updates. Defaults to False.
-
+
Returns:
Optional[Dict[str, Any]]: The package schema as a dictionary, or None if not available
"""
return schema_retriever.get_schema("package", version, force_update)
-def get_registry_schema(version: str = "latest", force_update: bool = False) -> Optional[Dict[str, Any]]:
+def get_registry_schema(
+ version: str = "latest", force_update: bool = False
+) -> Optional[Dict[str, Any]]:
"""Helper function to get the registry schema.
-
+
Args:
version (str, optional): Version of the schema, or "latest". Defaults to "latest".
force_update (bool, optional): If True, force a check for updates. Defaults to False.
-
+
Returns:
Optional[Dict[str, Any]]: The registry schema as a dictionary, or None if not available
"""
@@ -173,22 +184,22 @@ def get_registry_schema(version: str = "latest", force_update: bool = False) ->
# If run as script, perform a test
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
-
+
# Test functionality
print("Testing schema retriever...")
-
+
# Force update of schemas
updated = schema_retriever.update_schemas(force=True)
print(f"Schema update forced: {'Updated' if updated else 'No update needed'}")
-
+
# Load schemas
pkg_schema = get_package_schema()
reg_schema = get_registry_schema()
-
+
print(f"Package schema loaded: {'Yes' if pkg_schema else 'No'}")
if pkg_schema:
print(f"Package schema title: {pkg_schema.get('title')}")
-
+
print(f"Registry schema loaded: {'Yes' if reg_schema else 'No'}")
if reg_schema:
print(f"Registry schema title: {reg_schema.get('title')}")
diff --git a/hatch_validator/utils/dependency_graph.py b/hatch_validator/utils/dependency_graph.py
index d257e46..4b62003 100644
--- a/hatch_validator/utils/dependency_graph.py
+++ b/hatch_validator/utils/dependency_graph.py
@@ -11,19 +11,20 @@
class DependencyGraphError(Exception):
"""Exception raised for dependency graph related errors."""
+
pass
class DependencyGraph:
"""Utility class for working with dependency graphs.
-
+
Provides methods for building graphs, detecting cycles, and other
graph operations that are independent of schema version.
"""
-
+
def __init__(self, adjacency_list: Optional[Dict[str, List[Dict]]] = None):
"""Initialize the dependency graph.
-
+
Args:
adjacency_list (Dict[str, List[Dict]], optional): Initial adjacency list.
Maps package names to their direct dependencies. Each dependency is a dict
@@ -33,7 +34,7 @@ def __init__(self, adjacency_list: Optional[Dict[str, List[Dict]]] = None):
def to_dict(self) -> Dict[str, List[Dict]]:
"""Convert the graph to a dictionary representation.
-
+
Returns:
Dict[str, List[Dict]]: Adjacency list representation of the graph.
"""
@@ -41,61 +42,62 @@ def to_dict(self) -> Dict[str, List[Dict]]:
def __str__(self) -> str:
"""String representation of the dependency graph.
-
+
Returns:
str: String representation of the adjacency list.
"""
return str(self.to_dict())
-
+
def __repr__(self) -> str:
"""Official string representation for debugging."""
return f"{self.__class__.__name__}({self.to_dict()})"
-
+
def add_dependency(self, package: str, dependency: Dict) -> None:
"""Add a dependency relationship to the graph.
-
+
Args:
package (str): The package that depends on another package.
dependency (Dict): Dependency object with keys: name, version_constraint, resolved_version.
"""
if package not in self.adjacency_list:
self.adjacency_list[package] = []
-
+
dep_name = dependency.get("name")
if not dep_name:
raise ValueError("Dependency dict must contain 'name' key")
-
+
# Avoid duplicates by name and resolved_version
existing = any(
- d.get("name") == dep_name and d.get("resolved_version") == dependency.get("resolved_version")
+ d.get("name") == dep_name
+ and d.get("resolved_version") == dependency.get("resolved_version")
for d in self.adjacency_list[package]
)
if not existing:
self.adjacency_list[package].append(dependency)
-
+
def add_package(self, package: str) -> None:
"""Add a package to the graph without dependencies.
-
+
Args:
package (str): The package name to add.
"""
if package not in self.adjacency_list:
self.adjacency_list[package] = []
-
+
def _get_dependency_name(self, dependency: Dict) -> str:
"""Extract dependency name from dict format.
-
+
Args:
dependency (Dict): Dependency in dict format.
-
+
Returns:
str: The dependency name.
"""
return dependency.get("name", "")
-
+
def get_all_packages(self) -> Set[str]:
"""Get all packages in the graph.
-
+
Returns:
Set[str]: Set of all package names in the graph.
"""
@@ -104,14 +106,14 @@ def get_all_packages(self) -> Set[str]:
for dep in deps:
packages.add(self._get_dependency_name(dep))
return packages
-
+
def detect_cycles(self) -> Tuple[bool, List[List[str]]]:
"""Detect cycles in the dependency graph using DFS.
-
+
Uses depth-first search with three colors (white, gray, black) to detect
cycles in the directed graph. Gray nodes indicate a back edge which
forms a cycle.
-
+
Returns:
Tuple[bool, List[List[str]]]: A tuple containing:
- bool: Whether cycles were detected
@@ -121,13 +123,13 @@ def detect_cycles(self) -> Tuple[bool, List[List[str]]]:
colors = defaultdict(int)
cycles = []
path = []
-
+
def dfs(node: str) -> bool:
"""Depth-first search helper function.
-
+
Args:
node (str): Current node being visited.
-
+
Returns:
bool: True if a cycle is found from this node.
"""
@@ -137,41 +139,41 @@ def dfs(node: str) -> bool:
cycle = path[cycle_start:] + [node]
cycles.append(cycle)
return True
-
+
if colors[node] == 2: # Black - already processed
return False
-
+
# Mark as gray (visiting)
colors[node] = 1
path.append(node)
- # Visit all dependencies
+ # Visit all dependencies
for dep in self.adjacency_list.get(node, []):
dep_name = self._get_dependency_name(dep)
if dfs(dep_name):
# Continue searching for more cycles instead of returning immediately
pass
-
+
# Mark as black (visited)
colors[node] = 2
path.pop()
return False
-
+
# Check all nodes to find all cycles
for package in self.get_all_packages():
if colors[package] == 0: # White - unvisited
dfs(package)
-
+
return len(cycles) > 0, cycles
-
+
def topological_sort(self) -> Tuple[bool, List[str]]:
"""Perform topological sort of the dependency graph.
-
+
Returns packages in an order where dependencies come after their dependents.
It is possible users may expect the reverse order (dependencies before dependents),
but this implementation follows the standard convention. Simply reverse the result
if the reverse order is desired.
Only works for acyclic graphs.
-
+
Returns:
Tuple[bool, List[str]]: A tuple containing:
- bool: Whether the sort was successful (graph is acyclic)
@@ -181,123 +183,125 @@ def topological_sort(self) -> Tuple[bool, List[str]]:
has_cycles, _ = self.detect_cycles()
if has_cycles:
return False, []
-
+
# Kahn's algorithm
in_degree = defaultdict(int)
all_packages = self.get_all_packages()
-
+
# Calculate in-degrees
for package in all_packages:
if package not in in_degree:
- in_degree[package] = 0
+ in_degree[package] = 0
for package, deps in self.adjacency_list.items():
for dep in deps:
dep_name = self._get_dependency_name(dep)
in_degree[dep_name] += 1
-
+
# Start with packages that have no incoming edges
queue = deque([pkg for pkg in all_packages if in_degree[pkg] == 0])
result = []
-
+
while queue:
current = queue.popleft()
- result.append(current)
+ result.append(current)
# Remove edges from current package
for dep in self.adjacency_list.get(current, []):
dep_name = self._get_dependency_name(dep)
in_degree[dep_name] -= 1
if in_degree[dep_name] == 0:
queue.append(dep_name)
-
+
return len(result) == len(all_packages), result
-
+
def find_dependency_path(self, start: str, target: str) -> Optional[List[str]]:
"""Find a path from start package to target package.
-
+
Uses breadth-first search to find the shortest dependency path.
-
+
Args:
start (str): Starting package name.
target (str): Target package name.
-
+
Returns:
Optional[List[str]]: Path from start to target, or None if no path exists.
"""
if start == target:
return [start]
-
+
queue = deque([(start, [start])])
- visited = {start}
+ visited = {start}
while queue:
current, path = queue.popleft()
-
+
for dep in self.adjacency_list.get(current, []):
dep_name = self._get_dependency_name(dep)
if dep_name == target:
return path + [dep_name]
-
+
if dep_name not in visited:
visited.add(dep_name)
queue.append((dep_name, path + [dep_name]))
-
- return None
-
+
+ return None
+
@classmethod
- def from_dependency_dict(cls, dependencies: Dict[str, List[Dict]]) -> 'DependencyGraph':
+ def from_dependency_dict(
+ cls, dependencies: Dict[str, List[Dict]]
+ ) -> "DependencyGraph":
"""Create a dependency graph from a dependency dictionary.
-
+
Args:
dependencies (Dict[str, List[Dict]]): Dictionary mapping package names
to their direct dependencies.
-
+
Returns:
DependencyGraph: New dependency graph instance.
"""
return cls(adjacency_list=dict(dependencies))
-
+
def get_direct_dependencies(self, package: str) -> List[str]:
"""Get direct dependencies of a package.
-
+
Args:
package (str): Package name to get dependencies for.
-
+
Returns:
List[str]: List of direct dependency names.
"""
deps = self.adjacency_list.get(package, [])
- return [self._get_dependency_name(dep) for dep in deps]
-
+ return [self._get_dependency_name(dep) for dep in deps]
+
def get_direct_dependency_objects(self, package: str) -> List[Dict]:
"""Get direct dependency objects of a package.
-
+
Args:
package (str): Package name to get dependencies for.
-
+
Returns:
List[Dict]: List of direct dependency objects.
"""
return self.adjacency_list.get(package, []).copy()
-
+
def get_install_order_dependencies(self) -> List[Dict]:
"""Return install-ready dependency objects in topological order.
-
+
Returns a list of unique dependency objects in the order they should be installed,
with duplicates removed (keeping the first occurrence).
-
+
Returns:
List[Dict]: List of dependency objects with keys: name, version_constraint, resolved_version.
-
+
Raises:
DependencyGraphError: If the dependency graph contains cycles.
"""
ok, order = self.topological_sort()
if not ok:
raise DependencyGraphError("Dependency graph contains cycles")
-
+
# Collect all dependency objects in install order
seen = set()
result = []
-
+
for pkg in order:
for dep in self.adjacency_list.get(pkg, []):
dep_name = dep.get("name")
@@ -307,38 +311,40 @@ def get_install_order_dependencies(self) -> List[Dict]:
if dep_key not in seen:
seen.add(dep_key)
result.append(dep)
-
+
return result
-
+
def get_all_dependencies(self, package: str) -> Set[str]:
"""Get all transitive dependencies of a package.
-
+
Args:
package (str): Package name to get all dependencies for.
-
+
Returns:
Set[str]: Set of all transitive dependencies.
-
+
Raises:
DependencyGraphError: If the graph contains cycles.
"""
has_cycles, cycles = self.detect_cycles()
if has_cycles:
- raise DependencyGraphError(f"Cannot compute transitive dependencies: graph contains cycles: {cycles}")
-
+ raise DependencyGraphError(
+ f"Cannot compute transitive dependencies: graph contains cycles: {cycles}"
+ )
+
visited = set()
stack = [package]
-
+
while stack:
current = stack.pop()
if current in visited:
continue
- visited.add(current)
+ visited.add(current)
for dep in self.adjacency_list.get(current, []):
dep_name = self._get_dependency_name(dep)
if dep_name not in visited:
stack.append(dep_name)
-
+
# Remove the starting package from the result
visited.discard(package)
return visited
diff --git a/hatch_validator/utils/hatch_dependency_graph.py b/hatch_validator/utils/hatch_dependency_graph.py
index f82a477..362c5fb 100644
--- a/hatch_validator/utils/hatch_dependency_graph.py
+++ b/hatch_validator/utils/hatch_dependency_graph.py
@@ -12,16 +12,19 @@
from hatch_validator.utils.dependency_graph import DependencyGraph
from hatch_validator.core.validation_context import ValidationContext
from hatch_validator.core.validation_strategy import ValidationError
-from hatch_validator.registry.registry_service import RegistryService, RegistryError
+from hatch_validator.registry.registry_service import RegistryService
from hatch_validator.package.package_service import PackageService
logger = logging.getLogger("hatch.hatch_dependency_graph")
logger.setLevel(logging.DEBUG)
+
class HatchDependencyGraphBuilder:
"""Builder for creating a Hatch dependency graph."""
- def __init__(self, package_service: PackageService, registry_service: RegistryService):
+ def __init__(
+ self, package_service: PackageService, registry_service: RegistryService
+ ):
"""Initialize the dependency graph builder.
Args:
@@ -31,7 +34,9 @@ def __init__(self, package_service: PackageService, registry_service: RegistrySe
self.package_service = package_service
self.registry_service = registry_service
- def _get_local_dep_pkg_metadata(self, dep: Dict, root_dir: Optional[Path] = None) -> Dict:
+ def _get_local_dep_pkg_metadata(
+ self, dep: Dict, root_dir: Optional[Path] = None
+ ) -> Dict:
"""Get the metadata for a local dependency.
This method retrieves the package metadata from the local dependency's metadata file.
@@ -47,15 +52,21 @@ def _get_local_dep_pkg_metadata(self, dep: Dict, root_dir: Optional[Path] = None
metadata_path = path / "hatch_metadata.json"
if not metadata_path.exists():
- logger.error(f"Local dependency metadata file does not exist: {metadata_path}")
- raise ValidationError(f"Local dependency metadata file does not exist: {metadata_path}")
-
- with open(metadata_path, 'r') as f:
+ logger.error(
+ f"Local dependency metadata file does not exist: {metadata_path}"
+ )
+ raise ValidationError(
+ f"Local dependency metadata file does not exist: {metadata_path}"
+ )
+
+ with open(metadata_path, "r") as f:
local_metadata = json.load(f)
return local_metadata
- def build_dependency_graph(self, hatch_dependencies: List[Dict], context: ValidationContext) -> 'DependencyGraph':
+ def build_dependency_graph(
+ self, hatch_dependencies: List[Dict], context: ValidationContext
+ ) -> "DependencyGraph":
"""Build a dependency graph from Hatch dependencies.
This method builds a complete dependency graph including all transitive dependencies
@@ -72,36 +83,44 @@ def build_dependency_graph(self, hatch_dependencies: List[Dict], context: Valida
pkg_name, _ = context.get_data("pending_update", ("current_package", None))
logger.debug(f"Building dependency graph for package: {pkg_name}")
graph.add_package(pkg_name)
-
+
processed = set()
for dep in hatch_dependencies:
if self.package_service.is_local_dependency(dep, context.package_dir):
- self._add_local_dependency_graph(pkg_name, dep, graph, context, context.package_dir)
+ self._add_local_dependency_graph(
+ pkg_name, dep, graph, context, context.package_dir, processed
+ )
else:
- self._add_remote_dependency_graph(pkg_name, dep, graph, context, processed)
+ self._add_remote_dependency_graph(
+ pkg_name, dep, graph, context, processed
+ )
return graph
def get_install_ready_dependencies(self, context: ValidationContext) -> List[Dict]:
"""Get install-ready Hatch dependencies in topological order.
-
+
This method builds the dependency graph and returns a list of dependency objects
in the order they should be installed, with resolved versions.
-
+
Args:
context (ValidationContext): Validation context containing package information.
-
+
Returns:
List[Dict]: List of dependency objects with keys: name, version_constraint, resolved_version.
-
+
Raises:
ValidationError: If there are validation errors during graph construction.
DependencyGraphError: If the dependency graph contains cycles.
"""
- graph = self.build_dependency_graph(self.package_service.get_dependencies().get("hatch", []), context)
+ graph = self.build_dependency_graph(
+ self.package_service.get_dependencies().get("hatch", []), context
+ )
return graph.get_install_order_dependencies()
- def _get_local_dependency_path(self, dep: Dict, root_dir: Optional[Path] = None) -> Path:
+ def _get_local_dependency_path(
+ self, dep: Dict, root_dir: Optional[Path] = None
+ ) -> Path:
"""Get the local file path for a local dependency.
Args:
@@ -111,7 +130,7 @@ def _get_local_dependency_path(self, dep: Dict, root_dir: Optional[Path] = None)
Returns:
Path: Path to the local dependency
"""
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
path = Path(dep_name)
if not path.is_absolute():
if root_dir:
@@ -121,14 +140,22 @@ def _get_local_dependency_path(self, dep: Dict, root_dir: Optional[Path] = None)
if not path.is_dir():
logger.error(f"Local dependency path is not a directory: {path}")
raise ValidationError(f"Local dependency path is not a directory: {path}")
-
+
if not path.exists():
logger.error(f"Local dependency path does not exist: {path}")
raise ValidationError(f"Local dependency path does not exist: {path}")
-
+
return path
- def _add_local_dependency_graph(self, parent_pkg_name: str, dep: Dict, graph: DependencyGraph, context: ValidationContext, root_dir: Optional[Path] = None):
+ def _add_local_dependency_graph(
+ self,
+ parent_pkg_name: str,
+ dep: Dict,
+ graph: DependencyGraph,
+ context: ValidationContext,
+ root_dir: Optional[Path] = None,
+ processed: Set[str] = None,
+ ):
"""Add local dependency and its transitive dependencies to the graph.
Args:
@@ -138,35 +165,62 @@ def _add_local_dependency_graph(self, parent_pkg_name: str, dep: Dict, graph: De
context (ValidationContext): Validation context
root_dir (Path): Root directory of the package depending on this local dependency
"""
- try:
+ if processed is None:
+ processed = set()
+ try:
local_pkg_metadata = self._get_local_dep_pkg_metadata(dep, root_dir)
local_pkg_service = PackageService(local_pkg_metadata)
- local_pkg_name = local_pkg_service.get_field('name')
+ local_pkg_name = local_pkg_service.get_field("name")
+
+ if not local_pkg_name:
+ return
path = self._get_local_dependency_path(dep, root_dir)
remote_dep_obj = {
- "name": local_pkg_name,
- "version_constraint": dep.get('version_constraint'),
- "resolved_version": local_pkg_service.get_field('version'), # For local deps, use actual version
- "uri": f"file://{str(path)}"
- }
+ "name": local_pkg_name,
+ "version_constraint": dep.get("version_constraint"),
+ "resolved_version": local_pkg_service.get_field(
+ "version"
+ ), # For local deps, use actual version
+ "uri": f"file://{str(path)}",
+ }
graph.add_dependency(parent_pkg_name, remote_dep_obj)
+ if local_pkg_name in processed:
+ return
+
+ processed.add(local_pkg_name)
+
deps_obj = local_pkg_service.get_dependencies()
- hatch_deps = deps_obj.get('hatch', [])
+ hatch_deps = deps_obj.get("hatch", [])
for dep in hatch_deps:
if self.package_service.is_local_dependency(dep, path):
- self._add_local_dependency_graph(local_pkg_name, dep, graph, context, path)
+ self._add_local_dependency_graph(
+ local_pkg_name, dep, graph, context, path, processed
+ )
else:
- self._add_remote_dependency_graph(local_pkg_name, dep, graph, context)
+ self._add_remote_dependency_graph(
+ local_pkg_name, dep, graph, context, processed
+ )
except Exception as e:
- logger.error(f"Could not load metadata for local dependency '{local_pkg_name}': {e}")
- raise ValidationError(f"Could not load metadata for local dependency '{local_pkg_name}': {e}")
-
- def _add_remote_dependency_graph(self, parent_pkg_name: str, dep: Dict, graph: DependencyGraph, context: ValidationContext, processed: Set[str] = None):
+ logger.error(
+ f"Could not load metadata for local dependency '{local_pkg_name}': {e}"
+ )
+ raise ValidationError(
+ f"Could not load metadata for local dependency '{local_pkg_name}': {e}"
+ )
+
+ def _add_remote_dependency_graph(
+ self,
+ parent_pkg_name: str,
+ dep: Dict,
+ graph: DependencyGraph,
+ context: ValidationContext,
+ processed: Set[str] = None,
+ ):
"""Add remote dependency and its transitive dependencies to the graph.
This method uses the registry to fetch the complete dependency information
@@ -181,37 +235,48 @@ def _add_remote_dependency_graph(self, parent_pkg_name: str, dep: Dict, graph: D
"""
if processed is None:
processed = set()
- dep_name = dep.get('name')
+ dep_name = dep.get("name")
- if not dep_name or dep_name in processed:
+ if not dep_name:
return
-
- processed.add(dep_name)
+
try:
-
- version_constraint = dep.get('version_constraint')
- compatible_version = self.registry_service.find_compatible_version(dep_name, version_constraint)
+ version_constraint = dep.get("version_constraint")
+ compatible_version = self.registry_service.find_compatible_version(
+ dep_name, version_constraint
+ )
# Create rich dependency object
remote_dep_obj = {
"name": dep_name,
"version_constraint": version_constraint,
"resolved_version": compatible_version,
- "uri": self.registry_service.get_package_uri(dep_name, compatible_version)
+ "uri": self.registry_service.get_package_uri(
+ dep_name, compatible_version
+ ),
}
graph.add_dependency(parent_pkg_name, remote_dep_obj)
- hatch_deps_obj = self.registry_service.get_package_dependencies(dep_name, compatible_version)
- hatch_deps = hatch_deps_obj.get('dependencies', [])
+ if dep_name in processed:
+ return
- for remote_dep in hatch_deps:
+ processed.add(dep_name)
- remote_dep_name = remote_dep.get('name')
+ hatch_deps_obj = self.registry_service.get_package_dependencies(
+ dep_name, compatible_version
+ )
+ hatch_deps = hatch_deps_obj.get("dependencies", [])
+
+ for remote_dep in hatch_deps:
+ remote_dep_name = remote_dep.get("name")
if remote_dep_name not in processed:
- self._add_remote_dependency_graph(dep_name, remote_dep, graph, context, processed)
+ self._add_remote_dependency_graph(
+ dep_name, remote_dep, graph, context, processed
+ )
except Exception as e:
logger.error(f"Error processing remote dependency '{dep_name}': {e}")
- raise ValidationError(f"Error processing remote dependency '{dep_name}': {e}")
-
+ raise ValidationError(
+ f"Error processing remote dependency '{dep_name}': {e}"
+ )
diff --git a/hatch_validator/utils/version_utils.py b/hatch_validator/utils/version_utils.py
index 48362ec..7be2d88 100644
--- a/hatch_validator/utils/version_utils.py
+++ b/hatch_validator/utils/version_utils.py
@@ -5,34 +5,37 @@
"""
import re
-from typing import Dict, List, Set, Tuple, Optional
+from typing import List, Tuple, Optional
from packaging import version
from packaging.specifiers import SpecifierSet, InvalidSpecifier
class VersionConstraintError(Exception):
"""Exception raised for version constraint related errors."""
+
pass
class VersionConstraintValidator:
"""Utility class for validating version constraints.
-
+
Provides methods for parsing version constraint strings, validating them,
and checking compatibility between versions and constraints.
"""
-
+
# Common version constraint patterns
- VERSION_PATTERN = re.compile(r'^[0-9]+(?:\.[0-9]+)*(?:[-+][a-zA-Z0-9.-]*)?$')
- CONSTRAINT_PATTERN = re.compile(r'^[<>=!~^]*[0-9]+(?:\.[0-9]+)*(?:[-+][a-zA-Z0-9.-]*)?(?:\s*,\s*[<>=!~^]*[0-9]+(?:\.[0-9]+)*(?:[-+][a-zA-Z0-9.-]*)?)*$')
-
+ VERSION_PATTERN = re.compile(r"^[0-9]+(?:\.[0-9]+)*(?:[-+][a-zA-Z0-9.-]*)?$")
+ CONSTRAINT_PATTERN = re.compile(
+ r"^[<>=!~^]*[0-9]+(?:\.[0-9]+)*(?:[-+][a-zA-Z0-9.-]*)?(?:\s*,\s*[<>=!~^]*[0-9]+(?:\.[0-9]+)*(?:[-+][a-zA-Z0-9.-]*)?)*$"
+ )
+
@staticmethod
def validate_version(version_str: str) -> Tuple[bool, Optional[str]]:
"""Validate a version string.
-
+
Args:
version_str (str): Version string to validate (e.g., "1.2.3").
-
+
Returns:
Tuple[bool, Optional[str]]: A tuple containing:
- bool: Whether the version is valid
@@ -40,23 +43,23 @@ def validate_version(version_str: str) -> Tuple[bool, Optional[str]]:
"""
if not version_str or not isinstance(version_str, str):
return False, "Version must be a non-empty string"
-
+
try:
# Use packaging library to validate version
version.Version(version_str)
return True, None
except version.InvalidVersion as e:
return False, f"Invalid version format: {e}"
-
+
@staticmethod
def validate_constraint(constraint: str) -> Tuple[bool, Optional[str]]:
"""Validate a version constraint string.
-
+
Supports standard constraint operators like >=, <=, ==, !=, ~=, etc.
-
+
Args:
constraint (str): Version constraint string (e.g., ">=1.0.0,<2.0.0").
-
+
Returns:
Tuple[bool, Optional[str]]: A tuple containing:
- bool: Whether the constraint is valid
@@ -64,36 +67,43 @@ def validate_constraint(constraint: str) -> Tuple[bool, Optional[str]]:
"""
if not constraint or not isinstance(constraint, str):
return False, "Constraint must be a non-empty string"
-
+
try:
# Use packaging library to validate constraint
SpecifierSet(constraint)
return True, None
except InvalidSpecifier as e:
return False, f"Invalid constraint format: {e}"
-
+
@staticmethod
- def is_version_compatible(version_str: str, constraint: str) -> Tuple[bool, Optional[str]]:
+ def is_version_compatible(
+ version_str: str, constraint: str
+ ) -> Tuple[bool, Optional[str]]:
"""Check if a version satisfies a constraint.
-
+
Args:
version_str (str): Version string to check.
constraint (str): Version constraint to check against.
-
+
Returns:
Tuple[bool, Optional[str]]: A tuple containing:
- bool: True if version satisfies constraint
- Optional[str]: Error message if there's an issue, None otherwise
"""
# First validate both version and constraint
- version_valid, version_error = VersionConstraintValidator.validate_version(version_str)
+ version_valid, version_error = VersionConstraintValidator.validate_version(
+ version_str
+ )
if not version_valid:
return False, f"Invalid version: {version_error}"
-
- constraint_valid, constraint_error = VersionConstraintValidator.validate_constraint(constraint)
+
+ (
+ constraint_valid,
+ constraint_error,
+ ) = VersionConstraintValidator.validate_constraint(constraint)
if not constraint_valid:
return False, f"Invalid constraint: {constraint_error}"
-
+
try:
ver = version.Version(version_str)
spec = SpecifierSet(constraint)
@@ -101,24 +111,24 @@ def is_version_compatible(version_str: str, constraint: str) -> Tuple[bool, Opti
return is_compatible, None
except Exception as e:
return False, f"Error checking compatibility: {e}"
-
+
@staticmethod
def parse_constraint_operators(constraint: str) -> List[Tuple[str, str]]:
"""Parse a constraint string to extract operators and versions.
-
+
Args:
constraint (str): Version constraint string.
-
+
Returns:
List[Tuple[str, str]]: List of (operator, version) tuples.
-
+
Raises:
VersionConstraintError: If the constraint format is invalid.
"""
valid, error = VersionConstraintValidator.validate_constraint(constraint)
if not valid:
raise VersionConstraintError(f"Invalid constraint: {error}")
-
+
try:
spec_set = SpecifierSet(constraint)
result = []
@@ -127,83 +137,93 @@ def parse_constraint_operators(constraint: str) -> List[Tuple[str, str]]:
return result
except Exception as e:
raise VersionConstraintError(f"Error parsing constraint: {e}")
-
+
@staticmethod
def get_constraint_bounds(constraint: str) -> Tuple[Optional[str], Optional[str]]:
"""Get the minimum and maximum version bounds from a constraint.
-
+
Args:
constraint (str): Version constraint string.
-
+
Returns:
Tuple[Optional[str], Optional[str]]: A tuple containing:
- Optional[str]: Minimum version (None if no lower bound)
- Optional[str]: Maximum version (None if no upper bound)
-
+
Raises:
VersionConstraintError: If the constraint format is invalid.
"""
operators = VersionConstraintValidator.parse_constraint_operators(constraint)
-
+
min_version = None
max_version = None
-
+
for operator, ver_str in operators:
- if operator in ['>=', '>']:
- if min_version is None or version.Version(ver_str) > version.Version(min_version):
+ if operator in [">=", ">"]:
+ if min_version is None or version.Version(ver_str) > version.Version(
+ min_version
+ ):
min_version = ver_str
- elif operator in ['<=', '<']:
- if max_version is None or version.Version(ver_str) < version.Version(max_version):
+ elif operator in ["<=", "<"]:
+ if max_version is None or version.Version(ver_str) < version.Version(
+ max_version
+ ):
max_version = ver_str
- elif operator == '==':
+ elif operator == "==":
min_version = max_version = ver_str
-
+
return min_version, max_version
-
+
@staticmethod
- def _generate_test_versions(min1: Optional[str], max1: Optional[str],
- min2: Optional[str], max2: Optional[str]) -> List[str]:
+ def _generate_test_versions(
+ min1: Optional[str],
+ max1: Optional[str],
+ min2: Optional[str],
+ max2: Optional[str],
+ ) -> List[str]:
"""Generate a list of version strings to test for constraint overlap.
-
+
Args:
min1 (Optional[str]): Minimum version of first constraint.
max1 (Optional[str]): Maximum version of first constraint.
min2 (Optional[str]): Minimum version of second constraint.
max2 (Optional[str]): Maximum version of second constraint.
-
+
Returns:
List[str]: List of version strings to test.
"""
# Base set of versions to always test
test_versions = []
-
+
# Add specific versions from the constraints
for ver in [min1, max1, min2, max2]:
if ver:
test_versions.append(ver)
-
+
# Add intermediate versions that might be in the overlap
if min1 and min2:
higher_min = max(version.Version(min1), version.Version(min2))
test_versions.append(str(higher_min))
-
+
if max1 and max2:
lower_max = min(version.Version(max1), version.Version(max2))
test_versions.append(str(lower_max))
-
+
# Add commonly used versions that might be in the overlap
- common_versions = ['0.0.1', '1.0.0', '2.0.0', '10.0.0']
-
+ common_versions = ["0.0.1", "1.0.0", "2.0.0", "10.0.0"]
+
return test_versions + common_versions
-
+
@staticmethod
- def constraints_overlap(constraint1: str, constraint2: str) -> Tuple[bool, Optional[str]]:
+ def constraints_overlap(
+ constraint1: str, constraint2: str
+ ) -> Tuple[bool, Optional[str]]:
"""Check if two version constraints overlap (have a common version range).
-
+
Args:
constraint1 (str): First version constraint.
constraint2 (str): Second version constraint.
-
+
Returns:
Tuple[bool, Optional[str]]: A tuple containing:
- bool: True if constraints overlap
@@ -213,15 +233,15 @@ def constraints_overlap(constraint1: str, constraint2: str) -> Tuple[bool, Optio
valid1, error1 = VersionConstraintValidator.validate_constraint(constraint1)
if not valid1:
return False, f"Invalid constraint1: {error1}"
-
+
valid2, error2 = VersionConstraintValidator.validate_constraint(constraint2)
if not valid2:
return False, f"Invalid constraint2: {error2}"
-
+
try:
spec1 = SpecifierSet(constraint1)
spec2 = SpecifierSet(constraint2)
-
+
# For the specific case where one constraint is an exact version
if "==" in constraint1 or "==" in constraint2:
if "==" in constraint1:
@@ -232,55 +252,65 @@ def constraints_overlap(constraint1: str, constraint2: str) -> Tuple[bool, Optio
exact_version = next(s.version for s in spec2 if s.operator == "==")
exact_ver = version.Version(exact_version)
return exact_ver in spec1, None
-
+
# Get min/max bounds from both constraints
min1, max1 = VersionConstraintValidator.get_constraint_bounds(constraint1)
min2, max2 = VersionConstraintValidator.get_constraint_bounds(constraint2)
-
+
# If either constraint has no bounds, it's essentially unbounded
if (min1 is None and max1 is None) or (min2 is None and max2 is None):
return True, None
-
+
# Check for definite non-overlap using range boundaries
- if min1 is not None and max2 is not None and version.Version(min1) > version.Version(max2):
+ if (
+ min1 is not None
+ and max2 is not None
+ and version.Version(min1) > version.Version(max2)
+ ):
return False, None
- if min2 is not None and max1 is not None and version.Version(min2) > version.Version(max1):
+ if (
+ min2 is not None
+ and max1 is not None
+ and version.Version(min2) > version.Version(max1)
+ ):
return False, None
-
+
# If we have both min and max for both constraints, we can determine overlap mathematically
if min1 and max1 and min2 and max2:
min1_v = version.Version(min1)
max1_v = version.Version(max1)
min2_v = version.Version(min2)
max2_v = version.Version(max2)
-
+
# If one range is entirely within the other, they overlap
if (min1_v <= min2_v <= max1_v) or (min2_v <= min1_v <= max2_v):
return True, None
-
+
# For complex cases, use test versions to verify overlap
- test_versions = VersionConstraintValidator._generate_test_versions(min1, max1, min2, max2)
-
+ test_versions = VersionConstraintValidator._generate_test_versions(
+ min1, max1, min2, max2
+ )
+
# Check if any version satisfies both constraints
for test_ver in test_versions:
try:
ver = version.Version(test_ver)
if ver in spec1 and ver in spec2:
return True, None
- except:
+ except Exception:
continue
-
+
return False, None
except Exception as e:
return False, f"Error checking constraint overlap: {e}"
-
+
@staticmethod
def normalize_constraint(constraint: str) -> Tuple[str, Optional[str]]:
"""Normalize a version constraint to a standard format.
-
+
Args:
constraint (str): Version constraint string to normalize.
-
+
Returns:
Tuple[str, Optional[str]]: A tuple containing:
- str: Normalized constraint string
@@ -289,7 +319,7 @@ def normalize_constraint(constraint: str) -> Tuple[str, Optional[str]]:
valid, error = VersionConstraintValidator.validate_constraint(constraint)
if not valid:
return constraint, f"Invalid constraint: {error}"
-
+
try:
spec_set = SpecifierSet(constraint)
return str(spec_set), None
@@ -299,22 +329,24 @@ def normalize_constraint(constraint: str) -> Tuple[str, Optional[str]]:
class DependencyConstraintResolver:
"""Utility class for resolving conflicts between dependency constraints.
-
+
Provides methods for checking compatibility between multiple constraints
on the same dependency and resolving conflicts when possible.
"""
-
+
def __init__(self):
"""Initialize the constraint resolver."""
self.validator = VersionConstraintValidator()
-
- def check_constraint_compatibility(self, constraints: List[str], package_name: str) -> Tuple[bool, List[str]]:
+
+ def check_constraint_compatibility(
+ self, constraints: List[str], package_name: str
+ ) -> Tuple[bool, List[str]]:
"""Check if a list of constraints on the same package are compatible.
-
+
Args:
constraints (List[str]): List of version constraints for the same package.
package_name (str): Name of the package being constrained.
-
+
Returns:
Tuple[bool, List[str]]: A tuple containing:
- bool: Whether all constraints are compatible
@@ -322,39 +354,47 @@ def check_constraint_compatibility(self, constraints: List[str], package_name: s
"""
if not constraints:
return True, []
-
+
if len(constraints) == 1:
valid, error = self.validator.validate_constraint(constraints[0])
return valid, [error] if error else []
-
+
errors = []
-
+
# Validate all individual constraints first
for i, constraint in enumerate(constraints):
valid, error = self.validator.validate_constraint(constraint)
if not valid:
errors.append(f"Constraint {i+1} for {package_name}: {error}")
-
+
if errors:
return False, errors
-
+
# Check pairwise compatibility
for i in range(len(constraints)):
for j in range(i + 1, len(constraints)):
- overlap, error = self.validator.constraints_overlap(constraints[i], constraints[j])
+ overlap, error = self.validator.constraints_overlap(
+ constraints[i], constraints[j]
+ )
if error:
- errors.append(f"Error checking compatibility between constraints for {package_name}: {error}")
+ errors.append(
+ f"Error checking compatibility between constraints for {package_name}: {error}"
+ )
elif not overlap:
- errors.append(f"Incompatible constraints for {package_name}: '{constraints[i]}' and '{constraints[j]}' have no overlap")
-
+ errors.append(
+ f"Incompatible constraints for {package_name}: '{constraints[i]}' and '{constraints[j]}' have no overlap"
+ )
+
return len(errors) == 0, errors
-
- def resolve_constraints(self, constraints: List[str]) -> Tuple[Optional[str], List[str]]:
+
+ def resolve_constraints(
+ self, constraints: List[str]
+ ) -> Tuple[Optional[str], List[str]]:
"""Attempt to resolve multiple constraints into a single combined constraint.
-
+
Args:
constraints (List[str]): List of version constraints to combine.
-
+
Returns:
Tuple[Optional[str], List[str]]: A tuple containing:
- Optional[str]: Combined constraint string if successful, None otherwise
@@ -362,17 +402,17 @@ def resolve_constraints(self, constraints: List[str]) -> Tuple[Optional[str], Li
"""
if not constraints:
return None, ["No constraints provided"]
-
+
if len(constraints) == 1:
valid, error = self.validator.validate_constraint(constraints[0])
return constraints[0] if valid else None, [error] if error else []
-
+
try:
# Combine all constraints using intersection
combined_spec = SpecifierSet(constraints[0])
for constraint in constraints[1:]:
combined_spec &= SpecifierSet(constraint)
-
+
return str(combined_spec), []
except Exception as e:
return None, [f"Error combining constraints: {e}"]
diff --git a/package-lock.json b/package-lock.json
index b85e94c..5a2d825 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -432,6 +432,7 @@
"integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@octokit/auth-token": "^4.0.0",
"@octokit/graphql": "^7.1.0",
@@ -2106,6 +2107,7 @@
"integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"import-fresh": "^3.3.0",
"js-yaml": "^4.1.0",
@@ -4420,6 +4422,7 @@
"integrity": "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==",
"dev": true,
"license": "MIT",
+ "peer": true,
"bin": {
"marked": "bin/marked.js"
},
@@ -6719,6 +6722,7 @@
"dev": true,
"inBundle": true,
"license": "MIT",
+ "peer": true,
"engines": {
"node": ">=12"
},
@@ -7679,6 +7683,7 @@
"integrity": "sha512-6qGjWccl5yoyugHt3jTgztJ9Y0JVzyH8/Voc/D8PlLat9pwxQYXz7W1Dpnq5h0/G5GCYGUaDSlYcyk3AMh5A6g==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@semantic-release/commit-analyzer": "^13.0.1",
"@semantic-release/error": "^4.0.0",
@@ -7733,6 +7738,7 @@
"integrity": "sha512-DhGl4xMVFGVIyMwswXeyzdL4uXD5OGILGX5N8Y+f6W7LhC1Ze2poSNrkF/fedpVDHEEZ+PHFW0vL14I+mm8K3Q==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@octokit/auth-token": "^6.0.0",
"@octokit/graphql": "^9.0.3",
@@ -8986,6 +8992,7 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
+ "peer": true,
"engines": {
"node": ">=12"
},
@@ -9122,8 +9129,7 @@
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz",
"integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==",
"dev": true,
- "license": "MIT",
- "peer": true
+ "license": "MIT"
},
"node_modules/unicode-emoji-modifier-base": {
"version": "1.0.0",
diff --git a/tests/run_validator_tests.py b/tests/run_validator_tests.py
index fda0902..1c18e7b 100644
--- a/tests/run_validator_tests.py
+++ b/tests/run_validator_tests.py
@@ -12,142 +12,139 @@
# Configure logging
logging.basicConfig(
level=logging.INFO,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
handlers=[
logging.StreamHandler(),
- logging.FileHandler("validator_test_results.log")
- ]
+ logging.FileHandler("validator_test_results.log"),
+ ],
)
logger = logging.getLogger("hatch.validator_test_runner")
def configure_parser():
"""Configure command-line argument parser.
-
+
Returns:
argparse.ArgumentParser: Configured argument parser
"""
parser = argparse.ArgumentParser(
description="Run tests for Hatch-Validator",
- formatter_class=argparse.RawTextHelpFormatter
+ formatter_class=argparse.RawTextHelpFormatter,
)
-
+
# Create test groups
test_group = parser.add_argument_group("Test selection")
-
+
# Add mutual exclusion for test types
test_type = test_group.add_mutually_exclusive_group()
test_type.add_argument(
"--schemas-only",
action="store_true",
- help="Run only schema retriever integration tests (network tests)"
+ help="Run only schema retriever integration tests (network tests)",
)
test_type.add_argument(
- "--validator-only",
- action="store_true",
- help="Run only package validator tests"
+ "--validator-only", action="store_true", help="Run only package validator tests"
)
test_type.add_argument(
"--schema-validators-only",
action="store_true",
- help="Run only schema validator framework tests"
+ help="Run only schema validator framework tests",
)
test_type.add_argument(
"--validator_for_pkg_v1_1_0_only",
action="store_true",
- help="Run only v1.1.0 validator implementation tests"
+ help="Run only v1.1.0 validator implementation tests",
)
test_type.add_argument(
"--validator_for_pkg_v1_2_0_only",
action="store_true",
- help="Run only v1.2.0 validator implementation tests"
+ help="Run only v1.2.0 validator implementation tests",
)
test_type.add_argument(
"--dependency-graph-only",
action="store_true",
- help="Run only dependency graph utility tests"
+ help="Run only dependency graph utility tests",
)
test_type.add_argument(
"--version-utils-only",
action="store_true",
- help="Run only version constraint utility tests"
+ help="Run only version constraint utility tests",
)
test_type.add_argument(
"--dependency-v1-1-0-only",
action="store_true",
- help="Run only v1.1.0 dependency validation tests"
+ help="Run only v1.1.0 dependency validation tests",
)
test_type.add_argument(
"--package-service-only",
action="store_true",
- help="Run only package service and accessor tests"
+ help="Run only package service and accessor tests",
)
test_type.add_argument(
"--registry-service-only",
action="store_true",
- help="Run only RegistryService accessor tests"
+ help="Run only RegistryService accessor tests",
)
test_type.add_argument(
- "--all",
- action="store_true",
- help="Run all tests explicitly"
+ "--all", action="store_true", help="Run all tests explicitly"
)
test_type.add_argument(
- "--custom",
- metavar="MODULE_OR_CLASS",
- help="Run specific test module or class"
+ "--custom", metavar="MODULE_OR_CLASS", help="Run specific test module or class"
)
-
+
# Add options for test execution
options_group = parser.add_argument_group("Test options")
options_group.add_argument(
- "--verbose", "-v",
+ "--verbose",
+ "-v",
action="count",
default=1,
- help="Increase verbosity level (can be specified multiple times)"
+ help="Increase verbosity level (can be specified multiple times)",
)
options_group.add_argument(
- "--quiet", "-q",
- action="store_true",
- help="Minimal output"
+ "--quiet", "-q", action="store_true", help="Minimal output"
)
options_group.add_argument(
- "--failfast",
- action="store_true",
- help="Stop on first failure"
+ "--failfast", action="store_true", help="Stop on first failure"
)
-
+
return parser
def run_tests(args):
"""Run the selected tests.
-
+
Args:
args: Command-line arguments from argparse
-
+
Returns:
bool: True if tests passed, False otherwise
"""
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
-
+
# Determine test verbosity level
verbosity = 0 if args.quiet else args.verbose
-
+
# Prepare test loader
test_loader = unittest.TestLoader()
-
+
# Determine which tests to run
if args.schemas_only:
logger.info("Running schema retriever integration tests only...")
- test_suite = test_loader.loadTestsFromName("test_schemas_retriever.TestSchemaRetrieverIntegration")
+ test_suite = test_loader.loadTestsFromName(
+ "test_schemas_retriever.TestSchemaRetrieverIntegration"
+ )
elif args.validator_for_pkg_v1_1_0_only:
logger.info("Running package validator tests only...")
- test_suite = test_loader.loadTestsFromName("test_package_validator.TestHatchPackageValidator")
+ test_suite = test_loader.loadTestsFromName(
+ "test_package_validator.TestHatchPackageValidator"
+ )
elif args.validator_for_pkg_v1_2_0_only:
logger.info("Running v1.2.0 validator implementation tests only...")
- test_suite = test_loader.loadTestsFromName("test_package_validator_for_v1_2_0.TestHatchPackageValidator_v1_2_0")
+ test_suite = test_loader.loadTestsFromName(
+ "test_package_validator_for_v1_2_0.TestHatchPackageValidator_v1_2_0"
+ )
elif args.schema_validators_only:
logger.info("Running schema validator framework tests only...")
test_suite = test_loader.loadTestsFromName("test_schema_validators")
@@ -162,22 +159,26 @@ def run_tests(args):
test_suite = test_loader.loadTestsFromName("test_dependency_validation_v1_1_0")
elif args.package_service_only:
logger.info("Running package service and accessor tests only...")
- test_suite = test_loader.loadTestsFromName("test_package_service.TestPackageService")
+ test_suite = test_loader.loadTestsFromName(
+ "test_package_service.TestPackageService"
+ )
elif args.registry_service_only:
logger.info("Running RegistryService accessor tests only...")
- test_suite = test_loader.loadTestsFromName("test_registry_service.TestRegistryServiceV110")
+ test_suite = test_loader.loadTestsFromName(
+ "test_registry_service.TestRegistryServiceV110"
+ )
elif args.all:
# Run all tests explicitly
logger.info("Running all Hatch-Validator tests...")
test_modules = [
"test_schemas_retriever",
- "test_package_validator",
+ "test_package_validator",
"test_schema_validators",
"test_schema_validators_v1_1_0",
"test_dependency_graph",
"test_version_utils",
"test_dependency_validation_v1_1_0",
- "test_package_validator_for_v1_2_0"
+ "test_package_validator_for_v1_2_0",
]
test_suite = unittest.TestSuite()
for module_name in test_modules:
@@ -199,31 +200,28 @@ def run_tests(args):
# Run all tests using discovery as fallback
logger.info("Running all Hatch-Validator tests using discovery...")
current_dir = Path(__file__).parent
- test_suite = test_loader.discover(str(current_dir), pattern='test_*.py')
+ test_suite = test_loader.discover(str(current_dir), pattern="test_*.py")
# Run the tests
- test_runner = unittest.TextTestRunner(
- verbosity=verbosity,
- failfast=args.failfast
- )
+ test_runner = unittest.TextTestRunner(verbosity=verbosity, failfast=args.failfast)
result = test_runner.run(test_suite)
-
+
# Log test results summary
logger.info(f"Tests run: {result.testsRun}")
logger.info(f"Errors: {len(result.errors)}")
logger.info(f"Failures: {len(result.failures)}")
-
+
if result.wasSuccessful():
logger.info("All tests PASSED!")
else:
logger.warning("Some tests FAILED!")
-
+
return result.wasSuccessful()
if __name__ == "__main__":
parser = configure_parser()
args = parser.parse_args()
-
+
success = run_tests(args)
- sys.exit(0 if success else 1)
\ No newline at end of file
+ sys.exit(0 if success else 1)
diff --git a/tests/test_dependency_graph.py b/tests/test_dependency_graph.py
index cee39fe..dd1a305 100644
--- a/tests/test_dependency_graph.py
+++ b/tests/test_dependency_graph.py
@@ -31,15 +31,18 @@
"tags": ["core", "base"],
"versions": [
{
- "author": {"GitHubID": "aliceGH", "email": "alice@example.com"},
+ "author": {
+ "GitHubID": "aliceGH",
+ "email": "alice@example.com",
+ },
"version": "1.0.0",
"release_uri": "https://example.com/hatch-dev/base_pkg_1/1.0.0",
"added_date": "2025-06-23T12:00:00Z",
"hatch_dependencies_added": [],
- "hatch_dependencies_removed": []
+ "hatch_dependencies_removed": [],
}
],
- "latest_version": "1.0.0"
+ "latest_version": "1.0.0",
},
{
"name": "util_pkg",
@@ -52,20 +55,21 @@
"release_uri": "https://example.com/hatch-dev/util_pkg/0.1.0",
"added_date": "2025-06-23T12:00:00Z",
"hatch_dependencies_added": [
- {"name": "base_pkg_1", "type": "remote", "version_constraint": ">=1.0.0"}
+ {
+ "name": "base_pkg_1",
+ "type": "remote",
+ "version_constraint": ">=1.0.0",
+ }
],
- "hatch_dependencies_removed": []
+ "hatch_dependencies_removed": [],
}
],
- "latest_version": "0.1.0"
- }
- ]
+ "latest_version": "0.1.0",
+ },
+ ],
}
],
- "stats": {
- "total_packages": 2,
- "total_versions": 2
- }
+ "stats": {"total_packages": 2, "total_versions": 2},
}
MOCK_PKG_METADATA = {
@@ -78,7 +82,11 @@
"license": {"name": "MIT"},
"entry_point": "util_pkg.main:main",
"hatch_dependencies": [
- {"name": "base_pkg_1", "type": {"type": "remote"}, "version_constraint": ">=1.0.0"}
+ {
+ "name": "base_pkg_1",
+ "type": {"type": "remote"},
+ "version_constraint": ">=1.0.0",
+ }
],
"python_dependencies": [],
"contributors": [],
@@ -86,93 +94,156 @@
"documentation": "https://example.com/hatch-dev/util_pkg/docs",
"compatibility": {"hatchling": ">=0.1.0", "python": ">=3.7"},
"tools": [],
- "citations": {"origin": "", "mcp": ""}
+ "citations": {"origin": "", "mcp": ""},
}
+
class DummyContext(ValidationContext):
def __init__(self):
super().__init__()
self.package_dir = Path("/tmp")
self._data = {"pending_update": ("util_pkg", None)}
+
def get_data(self, key, default=None):
return self._data.get(key, default)
+
class TestDependencyGraph(unittest.TestCase):
"""Test cases for the DependencyGraph class."""
-
+
def setUp(self):
"""Set up test fixtures."""
self.empty_graph = DependencyGraph()
# Simple acyclic graph: A -> B -> C (rich format)
- self.simple_acyclic = DependencyGraph({
- 'A': [{"name": "B", "version_constraint": None, "resolved_version": None}],
- 'B': [{"name": "C", "version_constraint": None, "resolved_version": None}],
- 'C': []
- })
+ self.simple_acyclic = DependencyGraph(
+ {
+ "A": [
+ {"name": "B", "version_constraint": None, "resolved_version": None}
+ ],
+ "B": [
+ {"name": "C", "version_constraint": None, "resolved_version": None}
+ ],
+ "C": [],
+ }
+ )
# Graph with cycle: A -> B -> C -> A (rich format)
- self.simple_cyclic = DependencyGraph({
- 'A': [{"name": "B", "version_constraint": None, "resolved_version": None}],
- 'B': [{"name": "C", "version_constraint": None, "resolved_version": None}],
- 'C': [{"name": "A", "version_constraint": None, "resolved_version": None}]
- })
+ self.simple_cyclic = DependencyGraph(
+ {
+ "A": [
+ {"name": "B", "version_constraint": None, "resolved_version": None}
+ ],
+ "B": [
+ {"name": "C", "version_constraint": None, "resolved_version": None}
+ ],
+ "C": [
+ {"name": "A", "version_constraint": None, "resolved_version": None}
+ ],
+ }
+ )
# Complex acyclic graph (rich format)
- self.complex_acyclic = DependencyGraph({
- 'app': [
- {"name": "utils", "version_constraint": None, "resolved_version": None},
- {"name": "db", "version_constraint": None, "resolved_version": None}
- ],
- 'utils': [{"name": "math", "version_constraint": None, "resolved_version": None}],
- 'db': [{"name": "utils", "version_constraint": None, "resolved_version": None}],
- 'math': [],
- 'standalone': []
- })
+ self.complex_acyclic = DependencyGraph(
+ {
+ "app": [
+ {
+ "name": "utils",
+ "version_constraint": None,
+ "resolved_version": None,
+ },
+ {
+ "name": "db",
+ "version_constraint": None,
+ "resolved_version": None,
+ },
+ ],
+ "utils": [
+ {
+ "name": "math",
+ "version_constraint": None,
+ "resolved_version": None,
+ }
+ ],
+ "db": [
+ {
+ "name": "utils",
+ "version_constraint": None,
+ "resolved_version": None,
+ }
+ ],
+ "math": [],
+ "standalone": [],
+ }
+ )
# Complex graph with multiple cycles (rich format)
- self.complex_cyclic = DependencyGraph({
- 'A': [{"name": "B", "version_constraint": None, "resolved_version": None}],
- 'B': [
- {"name": "C", "version_constraint": None, "resolved_version": None},
- {"name": "D", "version_constraint": None, "resolved_version": None}
- ],
- 'C': [{"name": "A", "version_constraint": None, "resolved_version": None}], # Cycle: A -> B -> C -> A
- 'D': [{"name": "E", "version_constraint": None, "resolved_version": None}],
- 'E': [{"name": "D", "version_constraint": None, "resolved_version": None}] # Cycle: D -> E -> D
- })
-
+ self.complex_cyclic = DependencyGraph(
+ {
+ "A": [
+ {"name": "B", "version_constraint": None, "resolved_version": None}
+ ],
+ "B": [
+ {"name": "C", "version_constraint": None, "resolved_version": None},
+ {"name": "D", "version_constraint": None, "resolved_version": None},
+ ],
+ "C": [
+ {"name": "A", "version_constraint": None, "resolved_version": None}
+ ], # Cycle: A -> B -> C -> A
+ "D": [
+ {"name": "E", "version_constraint": None, "resolved_version": None}
+ ],
+ "E": [
+ {"name": "D", "version_constraint": None, "resolved_version": None}
+ ], # Cycle: D -> E -> D
+ }
+ )
+
def test_empty_graph_no_cycles(self):
"""Test that empty graph has no cycles."""
has_cycles, cycles = self.empty_graph.detect_cycles()
self.assertFalse(has_cycles, "Empty graph should not have any cycles")
self.assertEqual(cycles, [], "Empty graph should return empty cycles list")
-
+
def test_simple_acyclic_no_cycles(self):
"""Test that simple acyclic graph has no cycles."""
has_cycles, cycles = self.simple_acyclic.detect_cycles()
- self.assertFalse(has_cycles, "Simple acyclic graph (A->B->C) should not have cycles")
- self.assertEqual(cycles, [], "Simple acyclic graph should return empty cycles list")
-
+ self.assertFalse(
+ has_cycles, "Simple acyclic graph (A->B->C) should not have cycles"
+ )
+ self.assertEqual(
+ cycles, [], "Simple acyclic graph should return empty cycles list"
+ )
+
def test_simple_cyclic_detects_cycle(self):
"""Test that simple cyclic graph detects the cycle."""
has_cycles, cycles = self.simple_cyclic.detect_cycles()
- self.assertTrue(has_cycles, "Simple cyclic graph (A->B->C->A) should detect cycles")
- self.assertEqual(len(cycles), 1, "Simple cyclic graph should detect exactly one cycle")
+ self.assertTrue(
+ has_cycles, "Simple cyclic graph (A->B->C->A) should detect cycles"
+ )
+ self.assertEqual(
+ len(cycles), 1, "Simple cyclic graph should detect exactly one cycle"
+ )
# The cycle should be A -> B -> C -> A
cycle = cycles[0]
- self.assertIn('A', cycle, "Detected cycle should contain package A")
- self.assertIn('B', cycle, "Detected cycle should contain package B")
- self.assertIn('C', cycle, "Detected cycle should contain package C")
-
+ self.assertIn("A", cycle, "Detected cycle should contain package A")
+ self.assertIn("B", cycle, "Detected cycle should contain package B")
+ self.assertIn("C", cycle, "Detected cycle should contain package C")
+
def test_complex_acyclic_no_cycles(self):
"""Test that complex acyclic graph has no cycles."""
has_cycles, cycles = self.complex_acyclic.detect_cycles()
self.assertFalse(has_cycles, "Complex acyclic graph should not have cycles")
- self.assertEqual(cycles, [], "Complex acyclic graph should return empty cycles list")
-
+ self.assertEqual(
+ cycles, [], "Complex acyclic graph should return empty cycles list"
+ )
+
def test_complex_cyclic_detects_multiple_cycles(self):
"""Test that complex graph detects multiple cycles."""
has_cycles, cycles = self.complex_cyclic.detect_cycles()
- self.assertTrue(has_cycles, "Complex graph with multiple cycles should detect cycles")
- self.assertGreaterEqual(len(cycles), 1, "Complex graph should detect at least one cycle")
-
+ self.assertTrue(
+ has_cycles, "Complex graph with multiple cycles should detect cycles"
+ )
+ self.assertGreaterEqual(
+ len(cycles), 1, "Complex graph should detect at least one cycle"
+ )
+
def test_topological_sort_acyclic(self):
"""Test topological sort on acyclic graph."""
success, sorted_packages = self.simple_acyclic.topological_sort()
@@ -184,142 +255,197 @@ def test_topological_sort_acyclic(self):
# Check that dependencies come before dependents
# In a topological sort, if A depends on B, then B should come BEFORE A in the sorted order
- a_index = dependencies_first.index('A')
- b_index = dependencies_first.index('B')
- c_index = dependencies_first.index('C')
-
+ a_index = dependencies_first.index("A")
+ b_index = dependencies_first.index("B")
+ c_index = dependencies_first.index("C")
+
# The implementation correctly puts dependencies first (C depends on nothing, B depends on C, A depends on B)
# So order should be: C, B, A
- self.assertLess(c_index, b_index, "Package C should come before B in topological order (B depends on C)")
- self.assertLess(b_index, a_index, "Package B should come before A in topological order (A depends on B)")
-
+ self.assertLess(
+ c_index,
+ b_index,
+ "Package C should come before B in topological order (B depends on C)",
+ )
+ self.assertLess(
+ b_index,
+ a_index,
+ "Package B should come before A in topological order (A depends on B)",
+ )
+
def test_topological_sort_cyclic_fails(self):
"""Test that topological sort fails on cyclic graph."""
success, sorted_packages = self.simple_cyclic.topological_sort()
self.assertFalse(success, "Topological sort should fail on cyclic graph")
- self.assertEqual(sorted_packages, [], "Failed topological sort should return empty list")
-
+ self.assertEqual(
+ sorted_packages, [], "Failed topological sort should return empty list"
+ )
+
def test_find_dependency_path_exists(self):
"""Test finding a path when one exists."""
- path = self.simple_acyclic.find_dependency_path('A', 'C')
+ path = self.simple_acyclic.find_dependency_path("A", "C")
self.assertIsNotNone(path, "Should find a path from A to C in acyclic graph")
- self.assertEqual(path, ['A', 'B', 'C'], "Path from A to C should be A->B->C")
-
+ self.assertEqual(path, ["A", "B", "C"], "Path from A to C should be A->B->C")
+
def test_find_dependency_path_not_exists(self):
"""Test finding a path when none exists."""
- path = self.simple_acyclic.find_dependency_path('C', 'A')
- self.assertIsNone(path, "Should not find a path from C to A (reverse direction)")
-
+ path = self.simple_acyclic.find_dependency_path("C", "A")
+ self.assertIsNone(
+ path, "Should not find a path from C to A (reverse direction)"
+ )
+
def test_find_dependency_path_same_package(self):
"""Test finding a path to the same package."""
- path = self.simple_acyclic.find_dependency_path('A', 'A')
- self.assertEqual(path, ['A'], "Path from package to itself should be [package]")
-
+ path = self.simple_acyclic.find_dependency_path("A", "A")
+ self.assertEqual(path, ["A"], "Path from package to itself should be [package]")
+
def test_add_dependency(self):
"""Test adding dependencies to the graph."""
graph = DependencyGraph()
- graph.add_dependency('pkg1', {"name": "pkg2", "version_constraint": None, "resolved_version": None})
- graph.add_dependency('pkg1', {"name": "pkg3", "version_constraint": None, "resolved_version": None})
+ graph.add_dependency(
+ "pkg1",
+ {"name": "pkg2", "version_constraint": None, "resolved_version": None},
+ )
+ graph.add_dependency(
+ "pkg1",
+ {"name": "pkg3", "version_constraint": None, "resolved_version": None},
+ )
self.assertEqual(
- sorted(graph.get_direct_dependencies('pkg1')),
- ['pkg2', 'pkg3'],
- "Package pkg1 should have dependencies ['pkg2', 'pkg3'] after adding them"
+ sorted(graph.get_direct_dependencies("pkg1")),
+ ["pkg2", "pkg3"],
+ "Package pkg1 should have dependencies ['pkg2', 'pkg3'] after adding them",
)
-
+
def test_add_package(self):
"""Test adding a package without dependencies."""
graph = DependencyGraph()
- graph.add_package('standalone')
-
- self.assertEqual(graph.get_direct_dependencies('standalone'), [],
- "Standalone package should have no dependencies")
- self.assertIn('standalone', graph.get_all_packages(),
- "Standalone package should be present in all packages")
-
+ graph.add_package("standalone")
+
+ self.assertEqual(
+ graph.get_direct_dependencies("standalone"),
+ [],
+ "Standalone package should have no dependencies",
+ )
+ self.assertIn(
+ "standalone",
+ graph.get_all_packages(),
+ "Standalone package should be present in all packages",
+ )
+
def test_get_all_packages(self):
"""Test getting all packages from the graph."""
packages = self.simple_acyclic.get_all_packages()
- expected = {'A', 'B', 'C'}
- self.assertEqual(packages, expected, "Should return all packages in the graph including dependencies")
-
+ expected = {"A", "B", "C"}
+ self.assertEqual(
+ packages,
+ expected,
+ "Should return all packages in the graph including dependencies",
+ )
+
def test_get_direct_dependencies(self):
"""Test getting direct dependencies of a package."""
- deps = self.simple_acyclic.get_direct_dependencies('A')
- self.assertEqual(deps, ['B'], "Package A should have direct dependency on B")
-
- deps = self.simple_acyclic.get_direct_dependencies('C')
+ deps = self.simple_acyclic.get_direct_dependencies("A")
+ self.assertEqual(deps, ["B"], "Package A should have direct dependency on B")
+
+ deps = self.simple_acyclic.get_direct_dependencies("C")
self.assertEqual(deps, [], "Package C should have no dependencies")
-
+
def test_get_all_dependencies_acyclic(self):
"""Test getting all transitive dependencies on acyclic graph."""
- all_deps = self.simple_acyclic.get_all_dependencies('A')
- expected = {'B', 'C'}
- self.assertEqual(all_deps, expected, "Package A should have transitive dependencies B and C")
-
+ all_deps = self.simple_acyclic.get_all_dependencies("A")
+ expected = {"B", "C"}
+ self.assertEqual(
+ all_deps, expected, "Package A should have transitive dependencies B and C"
+ )
+
def test_get_all_dependencies_cyclic_raises_error(self):
"""Test that getting all dependencies on cyclic graph raises error."""
with self.assertRaises(DependencyGraphError):
- self.simple_cyclic.get_all_dependencies('A')
-
+ self.simple_cyclic.get_all_dependencies("A")
+
def test_from_dependency_dict(self):
"""Test creating graph from dependency dictionary."""
deps = {
- 'pkg1': [
+ "pkg1": [
{"name": "pkg2", "version_constraint": None, "resolved_version": None},
+ {"name": "pkg3", "version_constraint": None, "resolved_version": None},
+ ],
+ "pkg2": [
{"name": "pkg3", "version_constraint": None, "resolved_version": None}
],
- 'pkg2': [{"name": "pkg3", "version_constraint": None, "resolved_version": None}],
- 'pkg3': []
+ "pkg3": [],
}
graph = DependencyGraph.from_dependency_dict(deps)
self.assertEqual(
- sorted(graph.get_direct_dependencies('pkg1')),
- ['pkg2', 'pkg3'],
- "pkg1 should have dependencies pkg2 and pkg3"
+ sorted(graph.get_direct_dependencies("pkg1")),
+ ["pkg2", "pkg3"],
+ "pkg1 should have dependencies pkg2 and pkg3",
)
self.assertEqual(
- graph.get_direct_dependencies('pkg2'),
- ['pkg3'],
- "pkg2 should have dependency pkg3"
+ graph.get_direct_dependencies("pkg2"),
+ ["pkg3"],
+ "pkg2 should have dependency pkg3",
)
self.assertEqual(
- graph.get_direct_dependencies('pkg3'),
+ graph.get_direct_dependencies("pkg3"),
[],
- "pkg3 should have no dependencies"
+ "pkg3 should have no dependencies",
)
-
+
def test_self_dependency_cycle(self):
"""Test detection of self-dependency cycles."""
- graph = DependencyGraph({
- 'A': [{"name": "A", "version_constraint": None, "resolved_version": None}] # Self-dependency
- })
+ graph = DependencyGraph(
+ {
+ "A": [
+ {"name": "A", "version_constraint": None, "resolved_version": None}
+ ] # Self-dependency
+ }
+ )
has_cycles, cycles = graph.detect_cycles()
self.assertTrue(has_cycles, "Graph with self-dependency should detect cycle")
- self.assertEqual(len(cycles), 1, "Self-dependency should create exactly one cycle")
-
+ self.assertEqual(
+ len(cycles), 1, "Self-dependency should create exactly one cycle"
+ )
+
def test_complex_path_finding(self):
"""Test path finding in complex graph."""
- path = self.complex_acyclic.find_dependency_path('app', 'math')
+ path = self.complex_acyclic.find_dependency_path("app", "math")
self.assertIsNotNone(path, "Should find path from app to math in complex graph")
# Should find path: app -> utils -> math
- self.assertEqual(path, ['app', 'utils', 'math'], "Path should be app->utils->math")
-
+ self.assertEqual(
+ path, ["app", "utils", "math"], "Path should be app->utils->math"
+ )
+
def test_isolated_packages(self):
"""Test handling of isolated packages in the graph."""
- graph = DependencyGraph({
- 'connected1': [{"name": "connected2", "version_constraint": None, "resolved_version": None}],
- 'connected2': [],
- 'isolated': []
- })
-
+ graph = DependencyGraph(
+ {
+ "connected1": [
+ {
+ "name": "connected2",
+ "version_constraint": None,
+ "resolved_version": None,
+ }
+ ],
+ "connected2": [],
+ "isolated": [],
+ }
+ )
+
packages = graph.get_all_packages()
- expected = {'connected1', 'connected2', 'isolated'}
- self.assertEqual(packages, expected, "Should include both connected and isolated packages")
-
+ expected = {"connected1", "connected2", "isolated"}
+ self.assertEqual(
+ packages, expected, "Should include both connected and isolated packages"
+ )
+
# Should be able to sort even with isolated packages
success, sorted_packages = graph.topological_sort()
- self.assertTrue(success, "Topological sort should succeed with isolated packages")
- self.assertEqual(len(sorted_packages), 3, "Should include all packages in topological sort")
+ self.assertTrue(
+ success, "Topological sort should succeed with isolated packages"
+ )
+ self.assertEqual(
+ len(sorted_packages), 3, "Should include all packages in topological sort"
+ )
+
class TestHatchDependencyGraphBuilder(unittest.TestCase):
"""Test cases for the HatchDependencyGraphBuilder class (integration with registry and package service)."""
@@ -327,27 +453,37 @@ class TestHatchDependencyGraphBuilder(unittest.TestCase):
def setUp(self):
self.registry_service = RegistryService(MOCK_REGISTRY)
self.package_service = PackageService(MOCK_PKG_METADATA)
- self.builder = HatchDependencyGraphBuilder(self.package_service, self.registry_service)
+ self.builder = HatchDependencyGraphBuilder(
+ self.package_service, self.registry_service
+ )
self.context = DummyContext()
def test_build_dependency_graph(self):
deps = self.package_service.get_dependencies().get("hatch", [])
graph = self.builder.build_dependency_graph(deps, self.context)
- self.assertTrue("util_pkg" in graph.get_all_packages(), f"Expected 'util_pkg' in graph packages, got: {graph.get_all_packages()}")
- self.assertTrue("base_pkg_1" in graph.get_all_packages(), f"Expected 'base_pkg_1' in graph packages, got: {graph.get_all_packages()}")
+ self.assertTrue(
+ "util_pkg" in graph.get_all_packages(),
+ f"Expected 'util_pkg' in graph packages, got: {graph.get_all_packages()}",
+ )
+ self.assertTrue(
+ "base_pkg_1" in graph.get_all_packages(),
+ f"Expected 'base_pkg_1' in graph packages, got: {graph.get_all_packages()}",
+ )
# util_pkg should depend on base_pkg_1
self.assertIn(
"base_pkg_1",
graph.get_direct_dependencies("util_pkg"),
- f"Expected 'base_pkg_1' in util_pkg dependencies, got: {graph.get_direct_dependencies('util_pkg')}"
+ f"Expected 'base_pkg_1' in util_pkg dependencies, got: {graph.get_direct_dependencies('util_pkg')}",
)
def test_get_install_ready_dependencies(self):
install_order = self.builder.get_install_ready_dependencies(self.context)
# Should include both util_pkg and base_pkg_1
names = [dep["name"] for dep in install_order]
- self.assertIn("base_pkg_1", names, f"Expected 'base_pkg_1' in install order, got: {names}")
+ self.assertIn(
+ "base_pkg_1", names, f"Expected 'base_pkg_1' in install order, got: {names}"
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_package_service.py b/tests/test_package_service.py
index 8090a0e..a3803b4 100644
--- a/tests/test_package_service.py
+++ b/tests/test_package_service.py
@@ -28,7 +28,7 @@
"compatibility": {"hatchling": ">=0.1.0", "python": ">=3.7"},
"entry_point": "dummy_pkg_v110.main:main",
"tools": [{"name": "tool1", "description": "A tool"}],
- "citations": {"origin": "", "mcp": ""}
+ "citations": {"origin": "", "mcp": ""},
}
# Dummy package metadata for v1.2.0
@@ -44,23 +44,29 @@
"repository": "https://example.com/repo2",
"documentation": "https://example.com/docs2",
"dependencies": {
- "hatch": [
- {"name": "base_pkg_2", "version_constraint": ">=2.0.0"}
- ],
+ "hatch": [{"name": "base_pkg_2", "version_constraint": ">=2.0.0"}],
"python": [
- {"name": "numpy", "version_constraint": ">=1.18.0", "package_manager": "pip"}
+ {
+ "name": "numpy",
+ "version_constraint": ">=1.18.0",
+ "package_manager": "pip",
+ }
],
"system": [
- {"name": "libssl", "version_constraint": ">=1.1.1", "package_manager": "apt"}
+ {
+ "name": "libssl",
+ "version_constraint": ">=1.1.1",
+ "package_manager": "apt",
+ }
],
"docker": [
{"name": "ubuntu", "version_constraint": "==20.04", "registry": "dockerhub"}
- ]
+ ],
},
"compatibility": {"hatchling": ">=0.2.0", "python": ">=3.8"},
"entry_point": "dummy_pkg_v120.main:main",
"tools": [{"name": "tool2", "description": "Another tool"}],
- "citations": {"origin": "", "mcp": ""}
+ "citations": {"origin": "", "mcp": ""},
}
# Dummy package metadata for v1.2.1
@@ -76,22 +82,24 @@
"repository": "https://example.com/repo3",
"documentation": "https://example.com/docs3",
"dependencies": {
- "hatch": [
- {"name": "base_pkg_3", "version_constraint": ">=3.0.0"}
- ],
+ "hatch": [{"name": "base_pkg_3", "version_constraint": ">=3.0.0"}],
"python": [
- {"name": "fastapi", "version_constraint": ">=0.68.0", "package_manager": "pip"}
+ {
+ "name": "fastapi",
+ "version_constraint": ">=0.68.0",
+ "package_manager": "pip",
+ }
],
"system": [],
- "docker": []
+ "docker": [],
},
"compatibility": {"hatchling": ">=0.3.0", "python": ">=3.9"},
"entry_point": {
"mcp_server": "mcp_server.py",
- "hatch_mcp_server": "hatch_mcp_server.py"
+ "hatch_mcp_server": "hatch_mcp_server.py",
},
"tools": [{"name": "tool3", "description": "A dual entry point tool"}],
- "citations": {"origin": "", "mcp": ""}
+ "citations": {"origin": "", "mcp": ""},
}
DUMMY_METADATA_V200 = {
@@ -103,36 +111,43 @@
"authors": [{"name": "Frank", "email": "frank@example.com"}],
"license": {"name": "MIT"},
"documentation": "https://example.com/docs4",
- "provenance": {
- "source": "internal"
- },
+ "provenance": {"source": "internal"},
"dependencies": {
- "hatch": [
- {"name": "base_pkg_4", "version_constraint": ">=0.0.0"}
- ],
+ "hatch": [{"name": "base_pkg_4", "version_constraint": ">=0.0.0"}],
"python": [
- {"name": "pydantic", "version_constraint": ">=1.0.0", "package_manager": "pip"}
+ {
+ "name": "pydantic",
+ "version_constraint": ">=1.0.0",
+ "package_manager": "pip",
+ }
],
"system": [
- {"name": "libssl", "version_constraint": ">=1.1.1", "package_manager": "apt"}
+ {
+ "name": "libssl",
+ "version_constraint": ">=1.1.1",
+ "package_manager": "apt",
+ }
],
"docker": [
{
"name": "ubuntu",
"tag": "20.04",
"digest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
- "registry": "dockerhub"
+ "registry": "dockerhub",
}
- ]
+ ],
},
"entry_point": {
"mcp_server": "mcp_server.py",
- "hatch_mcp_server": "hatch_mcp_server.py"
+ "hatch_mcp_server": "hatch_mcp_server.py",
},
"tools": [{"name": "tool4", "desc": "A v2 tool"}],
- "citations": [{"format": "formatted", "value": "A sample citation", "note": "Test citation"}]
+ "citations": [
+ {"format": "formatted", "value": "A sample citation", "note": "Test citation"}
+ ],
}
+
class TestPackageService(unittest.TestCase):
"""Tests for the PackageService and concrete package accessors."""
@@ -219,7 +234,10 @@ def test_v200_fields(self):
self.assertEqual(deps["python"][0]["name"], "pydantic")
self.assertEqual(deps["system"][0]["name"], "libssl")
self.assertEqual(deps["docker"][0]["tag"], "20.04")
- self.assertEqual(deps["docker"][0]["digest"], "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef")
+ self.assertEqual(
+ deps["docker"][0]["digest"],
+ "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
+ )
def test_version_routing(self):
"""Test that PackageService routes to correct accessor based on schema version."""
@@ -241,5 +259,6 @@ def test_version_routing(self):
entry_point_v121 = service_v121.get_entry_point()
self.assertIsInstance(entry_point_v121, dict)
+
if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_package_validator.py b/tests/test_package_validator.py
index 7e5a684..f234af8 100644
--- a/tests/test_package_validator.py
+++ b/tests/test_package_validator.py
@@ -11,27 +11,30 @@
# Add the parent directory to the path if needed
sys.path.insert(0, str(Path(__file__).parent.parent))
-from hatch_validator.package_validator import HatchPackageValidator, PackageValidationError
-from hatch_validator.registry.registry_service import RegistryService
+from hatch_validator.package_validator import ( # noqa: E402
+ HatchPackageValidator,
+)
+from hatch_validator.registry.registry_service import RegistryService # noqa: E402
# Configure logging
logging.basicConfig(
- level=logging.DEBUG,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+ level=logging.DEBUG, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger("hatch.validator_tests")
class TestHatchPackageValidator(unittest.TestCase):
"""Tests for the Hatch package validator using real packages from Hatch-Dev."""
-
+
def setUp(self):
"""Set up test environment before each test."""
# Path to Hatch-Dev packages
self.hatch_dev_path = Path(__file__).parent.parent.parent / "Hatching-Dev"
- self.assertTrue(self.hatch_dev_path.exists(),
- f"Hatch-Dev directory not found at {self.hatch_dev_path}")
-
+ self.assertTrue(
+ self.hatch_dev_path.exists(),
+ f"Hatch-Dev directory not found at {self.hatch_dev_path}",
+ )
+
# Build registry data structure from Hatch-Dev packages
self.registry_data = self._build_test_registry()
@@ -40,7 +43,7 @@ def setUp(self):
# Create validator with registry data
self.validator = HatchPackageValidator(registry_data=self.registry_data)
-
+
def _build_test_registry(self):
"""
Build a test registry data structure from Hatch-Dev packages for dependency testing.
@@ -55,25 +58,25 @@ def _build_test_registry(self):
"name": "Hatch-Dev",
"url": "file://" + str(self.hatch_dev_path),
"packages": [],
- "last_indexed": datetime.now().isoformat()
+ "last_indexed": datetime.now().isoformat(),
}
- ]
+ ],
}
-
+
# Known packages in Hatch-Dev
pkg_names = [
- "arithmetic_pkg",
- "base_pkg_1",
- "base_pkg_2",
+ "arithmetic_pkg",
+ "base_pkg_1",
+ "base_pkg_2",
"python_dep_pkg",
"circular_dep_pkg_1",
"circular_dep_pkg_2",
"complex_dep_pkg",
"simple_dep_pkg",
"missing_dep_pkg",
- "version_dep_pkg"
+ "version_dep_pkg",
]
-
+
# Add each package to the registry
for pkg_name in pkg_names:
pkg_path = self.hatch_dev_path / pkg_name
@@ -81,9 +84,9 @@ def _build_test_registry(self):
metadata_path = pkg_path / "hatch_metadata.json"
if metadata_path.exists():
try:
- with open(metadata_path, 'r') as f:
+ with open(metadata_path, "r") as f:
metadata = json.load(f)
-
+
# Create a package entry with version information
pkg_entry = {
"name": metadata.get("name", pkg_name),
@@ -96,30 +99,44 @@ def _build_test_registry(self):
"version": metadata.get("version", "1.0.0"),
"release_uri": f"file://{pkg_path}",
"author": {
- "GitHubID": metadata.get("author", {}).get("name", "test_user"),
- "email": metadata.get("author", {}).get("email", "test@example.com")
+ "GitHubID": metadata.get("author", {}).get(
+ "name", "test_user"
+ ),
+ "email": metadata.get("author", {}).get(
+ "email", "test@example.com"
+ ),
},
"added_date": datetime.now().isoformat(),
# Add dependencies as differential changes
"hatch_dependencies_added": [
{
"name": dep["name"],
- "version_constraint": dep.get("version_constraint", "")
+ "version_constraint": dep.get(
+ "version_constraint", ""
+ ),
}
- for dep in metadata.get("hatch_dependencies", [])
+ for dep in metadata.get(
+ "hatch_dependencies", []
+ )
],
"python_dependencies_added": [
{
"name": dep["name"],
- "version_constraint": dep.get("version_constraint", ""),
- "package_manager": dep.get("package_manager", "pip")
+ "version_constraint": dep.get(
+ "version_constraint", ""
+ ),
+ "package_manager": dep.get(
+ "package_manager", "pip"
+ ),
}
- for dep in metadata.get("python_dependencies", [])
+ for dep in metadata.get(
+ "python_dependencies", []
+ )
],
}
- ]
+ ],
}
-
+
# Add to registry
registry["repositories"][0]["packages"].append(pkg_entry)
except Exception as e:
@@ -127,76 +144,132 @@ def _build_test_registry(self):
raise e
return registry
-
+
def test_valid_package_arithmetic(self):
"""Test validating a simple valid package (arithmetic_pkg)."""
pkg_path = self.hatch_dev_path / "arithmetic_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertTrue(is_valid, f"Package validation failed for arithmetic_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid")
- self.assertTrue(results["metadata_schema"]["valid"], f"Schema validation failed: {results.get('metadata_schema', {}).get('errors')}")
- self.assertTrue(results["entry_point"]["valid"], f"Entry point validation failed: {results.get('entry_point', {}).get('errors')}")
- self.assertTrue(results["tools"]["valid"], f"Tools validation failed: {results.get('tools', {}).get('errors')}")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed: {results.get('dependencies', {}).get('errors')}")
-
+
+ self.assertTrue(
+ is_valid, f"Package validation failed for arithmetic_pkg. Errors: {results}"
+ )
+ self.assertTrue(results["valid"], "Overall validation result should be valid")
+ self.assertTrue(
+ results["metadata_schema"]["valid"],
+ f"Schema validation failed: {results.get('metadata_schema', {}).get('errors')}",
+ )
+ self.assertTrue(
+ results["entry_point"]["valid"],
+ f"Entry point validation failed: {results.get('entry_point', {}).get('errors')}",
+ )
+ self.assertTrue(
+ results["tools"]["valid"],
+ f"Tools validation failed: {results.get('tools', {}).get('errors')}",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_valid_package_with_dependencies(self):
"""Test validating a package with valid dependencies (simple_dep_pkg)."""
pkg_path = self.hatch_dev_path / "simple_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertTrue(is_valid, f"Package validation failed for simple_dep_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for simple_dep_pkg")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed for simple_dep_pkg: {results.get('dependencies', {}).get('errors')}")
-
+
+ self.assertTrue(
+ is_valid, f"Package validation failed for simple_dep_pkg. Errors: {results}"
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for simple_dep_pkg",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed for simple_dep_pkg: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_missing_dependency(self):
"""Test validating a package with missing dependencies (missing_dep_pkg)."""
pkg_path = self.hatch_dev_path / "missing_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertFalse(is_valid, f"Package validation should fail for missing_dep_pkg")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for missing_dep_pkg")
- self.assertFalse(results["dependencies"]["valid"], f"Dependencies validation should fail for missing_dep_pkg")
- self.assertTrue(len(results["dependencies"]["errors"]) > 0, f"Missing dependency should produce error messages")
-
+
+ self.assertFalse(is_valid, "Package validation should fail for missing_dep_pkg")
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for missing_dep_pkg",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependencies validation should fail for missing_dep_pkg",
+ )
+ self.assertTrue(
+ len(results["dependencies"]["errors"]) > 0,
+ "Missing dependency should produce error messages",
+ )
+
# Check if the error message mentions the missing dependency
- any_error_mentions_missing = any("not found in registry" in error
- for error in results["dependencies"]["errors"])
- error_messages = "\n - ".join(results.get("dependencies", {}).get("errors", ["No errors"]))
- self.assertTrue(any_error_mentions_missing, f"Error should mention dependency not found. Actual errors: \n - {error_messages}")
-
+ any_error_mentions_missing = any(
+ "not found in registry" in error
+ for error in results["dependencies"]["errors"]
+ )
+ error_messages = "\n - ".join(
+ results.get("dependencies", {}).get("errors", ["No errors"])
+ )
+ self.assertTrue(
+ any_error_mentions_missing,
+ f"Error should mention dependency not found. Actual errors: \n - {error_messages}",
+ )
+
def test_complex_dependency_chain(self):
"""Test validating a package with complex dependency chain (complex_dep_pkg)."""
pkg_path = self.hatch_dev_path / "complex_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertTrue(is_valid, f"Package validation failed for complex_dep_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for complex_dep_pkg")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed for complex_dep_pkg: {results.get('dependencies', {}).get('errors')}")
-
+
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for complex_dep_pkg. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for complex_dep_pkg",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed for complex_dep_pkg: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_version_dependency_constraint(self):
"""Test validating a package with version-specific dependency (version_dep_pkg)."""
pkg_path = self.hatch_dev_path / "version_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertTrue(is_valid, f"Package validation failed for version_dep_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for version_dep_pkg")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed for version_dep_pkg: {results.get('dependencies', {}).get('errors')}")
-
+
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for version_dep_pkg. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for version_dep_pkg",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed for version_dep_pkg: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_version_dependency_constraint_incompatible(self):
"""Test validating a package with incompatible version dependency."""
# Create a copy of the registry with an incompatible version
modified_registry = self.registry_data.copy()
-
+
# Find base_pkg_1 in the registry
for repo in modified_registry["repositories"]:
for pkg in repo["packages"]:
if pkg["name"] == "base_pkg_1":
# Change the version to be incompatible
pkg["latest_version"] = "0.0.9"
- pkg["versions"][0]["version"] = "0.0.9"
+ pkg["versions"][0]["version"] = "0.0.9"
# Create a new registry service with the modified registry
- modified_registry_service = RegistryService(modified_registry)
+ _modified_registry_service = RegistryService(modified_registry)
# Create a new validator with the modified registry
validator = HatchPackageValidator(registry_data=modified_registry)
@@ -204,38 +277,71 @@ def test_version_dependency_constraint_incompatible(self):
# Validate the package with version-specific dependency
pkg_path = self.hatch_dev_path / "version_dep_pkg"
is_valid, results = validator.validate_package(pkg_path)
-
+
# No need to reset anything with RegistryService - each validator gets its own instance
-
- self.assertFalse(is_valid, f"Package validation should fail with incompatible version")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for incompatible version")
- self.assertFalse(results["dependencies"]["valid"], f"Dependencies validation should fail for incompatible version")
-
+
+ self.assertFalse(
+ is_valid, "Package validation should fail with incompatible version"
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for incompatible version",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependencies validation should fail for incompatible version",
+ )
+
# Check if error message mentions version mismatch
- any_error_mentions_version = any("satisfies constraint" in error
- for error in results["dependencies"]["errors"])
- error_messages = "\n - ".join(results.get("dependencies", {}).get("errors", ["No errors"]))
- self.assertTrue(any_error_mentions_version, f"Error should mention version constraint. Actual errors: \n - {error_messages}")
-
+ any_error_mentions_version = any(
+ "satisfies constraint" in error
+ for error in results["dependencies"]["errors"]
+ )
+ error_messages = "\n - ".join(
+ results.get("dependencies", {}).get("errors", ["No errors"])
+ )
+ self.assertTrue(
+ any_error_mentions_version,
+ f"Error should mention version constraint. Actual errors: \n - {error_messages}",
+ )
+
def test_circular_dependency_packages(self):
"""Test validating packages involved in a circular dependency."""
- #load the metadata for circular_dep_pkg_2_next_v
+ # load the metadata for circular_dep_pkg_2_next_v
pkg_path = self.hatch_dev_path / "circular_dep_pkg_2_next_v"
- with open(pkg_path / "hatch_metadata.json", 'r') as f:
+ with open(pkg_path / "hatch_metadata.json", "r") as f:
metadata = json.load(f)
# Validate - should detect the circular dependency
- is_valid, results = self.validator.validate_package(pkg_path, ("circular_dep_pkg_2", metadata))
+ is_valid, results = self.validator.validate_package(
+ pkg_path, pending_update=("circular_dep_pkg_2", metadata)
+ )
+
+ self.assertFalse(
+ is_valid, "Package validation should fail for circular dependency"
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for circular dependency",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependencies validation should fail for circular dependency",
+ )
- self.assertFalse(is_valid, f"Package validation should fail for circular dependency")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for circular dependency")
- self.assertFalse(results["dependencies"]["valid"], f"Dependencies validation should fail for circular dependency")
-
# Check if any error message mentions circular dependency
- any_error_mentions_circular = any("circular" in error.lower() for error in results["dependencies"]["errors"])
- error_messages = "\n - ".join(results.get("dependencies", {}).get("errors", ["No errors"]))
- self.assertTrue(any_error_mentions_circular, f"Error should mention circular dependency. Actual errors: \n - {error_messages}")
-
+ any_error_mentions_circular = any(
+ "circular" in error.lower()
+ for error in results["dependencies"]["errors"]
+ )
+ error_messages = "\n - ".join(
+ results.get("dependencies", {}).get("errors", ["No errors"])
+ )
+ self.assertTrue(
+ any_error_mentions_circular,
+ f"Error should mention circular dependency. Actual errors: \n - {error_messages}",
+ )
+
def test_entry_point_not_exists(self):
"""Test validating a package with a missing entry point file."""
# Create a temporary package with an invalid entry point
@@ -244,28 +350,29 @@ def test_entry_point_not_exists(self):
# Copy a valid package
pkg_path = temp_dir / "test_pkg"
shutil.copytree(self.hatch_dev_path / "arithmetic_pkg", pkg_path)
-
+
# Modify the metadata to point to a non-existent entry point
metadata_path = pkg_path / "hatch_metadata.json"
- with open(metadata_path, 'r') as f:
+ with open(metadata_path, "r") as f:
metadata = json.load(f)
-
+
metadata["entry_point"] = "non_existent_file.py"
-
- with open(metadata_path, 'w') as f:
+
+ with open(metadata_path, "w") as f:
json.dump(metadata, f)
-
+
# Validate the package
is_valid, results = self.validator.validate_package(pkg_path)
-
+
self.assertFalse(is_valid)
self.assertFalse(results["valid"])
self.assertFalse(results["entry_point"]["valid"])
self.assertTrue(len(results["entry_point"]["errors"]) > 0)
-
+
finally:
# Clean up
shutil.rmtree(temp_dir)
+
if __name__ == "__main__":
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/tests/test_package_validator_for_v1_2_0.py b/tests/test_package_validator_for_v1_2_0.py
index a2b6407..bd0ee9f 100644
--- a/tests/test_package_validator_for_v1_2_0.py
+++ b/tests/test_package_validator_for_v1_2_0.py
@@ -11,13 +11,14 @@
# Add the parent directory to the path if needed
sys.path.insert(0, str(Path(__file__).parent.parent))
-from hatch_validator.package_validator import HatchPackageValidator, PackageValidationError
-from hatch_validator.registry.registry_service import RegistryService
+from hatch_validator.package_validator import ( # noqa: E402
+ HatchPackageValidator,
+)
+from hatch_validator.registry.registry_service import RegistryService # noqa: E402
# Configure logging
logging.basicConfig(
- level=logging.DEBUG,
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
+ level=logging.DEBUG, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger("hatch.validator_tests_v1_2_0")
logger.setLevel(logging.DEBUG)
@@ -29,14 +30,16 @@ class TestHatchPackageValidator_v1_2_0(unittest.TestCase):
- 1.1.0:
- 1.2.0:
"""
-
+
def setUp(self):
"""Set up test environment before each test."""
# Path to Hatch-Dev packages
self.hatch_dev_path = Path(__file__).parent.parent.parent / "Hatching-Dev"
- self.assertTrue(self.hatch_dev_path.exists(),
- f"Hatch-Dev directory not found at {self.hatch_dev_path}")
-
+ self.assertTrue(
+ self.hatch_dev_path.exists(),
+ f"Hatch-Dev directory not found at {self.hatch_dev_path}",
+ )
+
# Build registry data structure from Hatch-Dev packages
self.registry_data = self._build_test_registry()
@@ -45,7 +48,7 @@ def setUp(self):
# Create validator with registry data
self.validator = HatchPackageValidator(registry_data=self.registry_data)
-
+
def _build_test_registry(self):
"""
Build a test registry data structure from Hatch-Dev packages for dependency testing.
@@ -60,15 +63,15 @@ def _build_test_registry(self):
"name": "Hatch-Dev",
"url": "file://" + str(self.hatch_dev_path),
"packages": [],
- "last_indexed": datetime.now().isoformat()
+ "last_indexed": datetime.now().isoformat(),
}
- ]
+ ],
}
-
+
# Known packages in Hatch-Dev
pkg_dirs = [
- "base_pkg_1_4_0",
- "base_pkg_2_1_1_0",
+ "base_pkg_1_4_0",
+ "base_pkg_2_1_1_0",
"python_dep_pkg_1_1_0",
"circular_dep_pkg_1_1_1_0",
"circular_dep_pkg_2_2_0_0",
@@ -77,9 +80,9 @@ def _build_test_registry(self):
"missing_dep_pkg_1_1_0",
"version_dep_pkg_1_1_0",
"system_dep_pkg",
- "docker_dep_pkg"
+ "docker_dep_pkg",
]
-
+
# Add each package to the registry
for pkg_name in pkg_dirs:
pkg_path = self.hatch_dev_path / pkg_name
@@ -87,7 +90,7 @@ def _build_test_registry(self):
metadata_path = pkg_path / "hatch_metadata.json"
if metadata_path.exists():
try:
- with open(metadata_path, 'r') as f:
+ with open(metadata_path, "r") as f:
metadata = json.load(f)
deps = metadata.get("dependencies", {})
@@ -103,17 +106,25 @@ def _build_test_registry(self):
"version": metadata.get("version", "1.0.0"),
"release_uri": f"file://{pkg_path}",
"author": {
- "GitHubID": metadata.get("author", {}).get("name", "test_user"),
- "email": metadata.get("author", {}).get("email", "test@example.com")
+ "GitHubID": metadata.get("author", {}).get(
+ "name", "test_user"
+ ),
+ "email": metadata.get("author", {}).get(
+ "email", "test@example.com"
+ ),
},
"added_date": datetime.now().isoformat(),
# Add dependencies as differential changes
- "hatch_dependencies_added": deps.get("hatch", []),
- "python_dependencies_added": deps.get("python", []),
+ "hatch_dependencies_added": deps.get(
+ "hatch", []
+ ),
+ "python_dependencies_added": deps.get(
+ "python", []
+ ),
}
- ]
+ ],
}
-
+
# Add to registry
registry["repositories"][0]["packages"].append(pkg_entry)
except Exception as e:
@@ -121,76 +132,138 @@ def _build_test_registry(self):
raise e
return registry
-
+
def test_valid_package_arithmetic(self):
"""Test validating a simple valid package (arithmetic_pkg_1_2_0)."""
pkg_path = self.hatch_dev_path / "arithmetic_pkg_1_2_0"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertTrue(is_valid, f"Package validation failed for arithmetic_pkg_1_2_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid")
- self.assertTrue(results["metadata_schema"]["valid"], f"Schema validation failed: {results.get('metadata_schema', {}).get('errors')}")
- self.assertTrue(results["entry_point"]["valid"], f"Entry point validation failed: {results.get('entry_point', {}).get('errors')}")
- self.assertTrue(results["tools"]["valid"], f"Tools validation failed: {results.get('tools', {}).get('errors')}")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed: {results.get('dependencies', {}).get('errors')}")
-
+
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for arithmetic_pkg_1_2_0. Errors: {results}",
+ )
+ self.assertTrue(results["valid"], "Overall validation result should be valid")
+ self.assertTrue(
+ results["metadata_schema"]["valid"],
+ f"Schema validation failed: {results.get('metadata_schema', {}).get('errors')}",
+ )
+ self.assertTrue(
+ results["entry_point"]["valid"],
+ f"Entry point validation failed: {results.get('entry_point', {}).get('errors')}",
+ )
+ self.assertTrue(
+ results["tools"]["valid"],
+ f"Tools validation failed: {results.get('tools', {}).get('errors')}",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_valid_package_with_dependencies(self):
"""Test validating a package with valid dependencies (simple_dep_pkg_1_1_0)."""
pkg_path = self.hatch_dev_path / "simple_dep_pkg_1_1_0"
- with open(pkg_path / "hatch_metadata.json", 'r') as f:
+ with open(pkg_path / "hatch_metadata.json", "r") as f:
metadata = json.load(f)
- is_valid, results = self.validator.validate_package(pkg_path, pending_update=(metadata.get("name"), metadata))
-
- self.assertTrue(is_valid, f"Package validation failed for simple_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for simple_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed for simple_dep_pkg_1_1_0: {results.get('dependencies', {}).get('errors')}")
-
+ is_valid, results = self.validator.validate_package(
+ pkg_path, pending_update=(metadata.get("name"), metadata)
+ )
+
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for simple_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for simple_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed for simple_dep_pkg_1_1_0: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_missing_dependency(self):
"""Test validating a package with missing dependencies (missing_dep_pkg_1_1_0)."""
pkg_path = self.hatch_dev_path / "missing_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertFalse(is_valid, f"Package validation should fail for missing_dep_pkg_1_1_0.")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for missing_dep_pkg_1_1_0")
- self.assertFalse(results["dependencies"]["valid"], f"Dependencies validation should fail for missing_dep_pkg_1_1_0")
- self.assertTrue(len(results["dependencies"]["errors"]) > 0, f"Missing dependency should produce error messages")
-
+
+ self.assertFalse(
+ is_valid, "Package validation should fail for missing_dep_pkg_1_1_0."
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for missing_dep_pkg_1_1_0",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependencies validation should fail for missing_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ len(results["dependencies"]["errors"]) > 0,
+ "Missing dependency should produce error messages",
+ )
+
# Check if the error message mentions the missing dependency
- any_error_mentions_missing = any("not found in registry" in error
- for error in results["dependencies"]["errors"])
- error_messages = "\n - ".join(results.get("dependencies", {}).get("errors", ["No errors"]))
- self.assertTrue(any_error_mentions_missing, f"Error should mention dependency not found. Actual errors: \n - {error_messages}")
-
+ any_error_mentions_missing = any(
+ "not found in registry" in error
+ for error in results["dependencies"]["errors"]
+ )
+ error_messages = "\n - ".join(
+ results.get("dependencies", {}).get("errors", ["No errors"])
+ )
+ self.assertTrue(
+ any_error_mentions_missing,
+ f"Error should mention dependency not found. Actual errors: \n - {error_messages}",
+ )
+
def test_complex_dependency_chain(self):
"""Test validating a package with complex dependency chain (complex_dep_pkg_1_1_0)."""
pkg_path = self.hatch_dev_path / "complex_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertTrue(is_valid, f"Package validation failed for complex_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for complex_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed for complex_dep_pkg_1_1_0: {results.get('dependencies', {}).get('errors')}")
-
+
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for complex_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for complex_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed for complex_dep_pkg_1_1_0: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_version_dependency_constraint(self):
"""Test validating a package with version-specific dependency (version_dep_pkg_1_1_0)."""
pkg_path = self.hatch_dev_path / "version_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
-
- self.assertTrue(is_valid, f"Package validation failed for version_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for version_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependencies validation failed for version_dep_pkg_1_1_0: {results.get('dependencies', {}).get('errors')}")
-
+
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for version_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for version_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependencies validation failed for version_dep_pkg_1_1_0: {results.get('dependencies', {}).get('errors')}",
+ )
+
def test_version_dependency_constraint_incompatible(self):
"""Test validating a package with incompatible version dependency (version_dep_pkg_1_1_0)."""
# Create a copy of the registry with an incompatible version
modified_registry = self.registry_data.copy()
-
+
# Find base_pkg_1 in the registry
for repo in modified_registry["repositories"]:
for pkg in repo["packages"]:
if pkg["name"] == "base_pkg_1":
# Change the version to be incompatible
pkg["latest_version"] = "0.0.9"
- pkg["versions"][0]["version"] = "0.0.9"
+ pkg["versions"][0]["version"] = "0.0.9"
# Create a new validator with the modified registry
validator = HatchPackageValidator(registry_data=modified_registry)
@@ -198,35 +271,68 @@ def test_version_dependency_constraint_incompatible(self):
# Validate the package with version-specific dependency
pkg_path = self.hatch_dev_path / "version_dep_pkg_1_1_0"
is_valid, results = validator.validate_package(pkg_path)
-
+
# No need to reset anything with RegistryService - each validator gets its own instance
-
- self.assertFalse(is_valid, f"Package validation should fail with incompatible version.")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for incompatible version")
- self.assertFalse(results["dependencies"]["valid"], f"Dependencies validation should fail for incompatible version")
-
+
+ self.assertFalse(
+ is_valid, "Package validation should fail with incompatible version."
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for incompatible version",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependencies validation should fail for incompatible version",
+ )
+
# Check if error message mentions version mismatch
- any_error_mentions_version = any("satisfies constraint" in error
- for error in results["dependencies"]["errors"])
- error_messages = "\n - ".join(results.get("dependencies", {}).get("errors", ["No errors"]))
- self.assertTrue(any_error_mentions_version, f"Error should mention version constraint. Actual errors: \n - {error_messages}")
-
+ any_error_mentions_version = any(
+ "satisfies constraint" in error
+ for error in results["dependencies"]["errors"]
+ )
+ error_messages = "\n - ".join(
+ results.get("dependencies", {}).get("errors", ["No errors"])
+ )
+ self.assertTrue(
+ any_error_mentions_version,
+ f"Error should mention version constraint. Actual errors: \n - {error_messages}",
+ )
+
def test_circular_dependency_packages(self):
"""Test validating packages involved in a circular dependency (circular_dep_pkg_2_2_0_0_next_v)."""
pkg_path = self.hatch_dev_path / "circular_dep_pkg_2_2_0_0_next_v"
- with open(pkg_path / "hatch_metadata.json", 'r') as f:
+ with open(pkg_path / "hatch_metadata.json", "r") as f:
metadata = json.load(f)
- is_valid, results = self.validator.validate_package(pkg_path, (metadata.get("name"), metadata))
-
- self.assertFalse(is_valid, f"Package validation should fail for circular dependency.")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for circular dependency")
- self.assertFalse(results["dependencies"]["valid"], f"Dependencies validation should fail for circular dependency")
-
- any_error_mentions_circular = any("circular" in error.lower() for error in results["dependencies"]["errors"])
- error_messages = "\n - ".join(results.get("dependencies", {}).get("errors", ["No errors"]))
- self.assertTrue(any_error_mentions_circular, f"Error should mention circular dependency. Actual errors: \n - {error_messages}")
-
+ is_valid, results = self.validator.validate_package(
+ pkg_path, pending_update=(metadata.get("name"), metadata)
+ )
+
+ self.assertFalse(
+ is_valid, "Package validation should fail for circular dependency."
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for circular dependency",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependencies validation should fail for circular dependency",
+ )
+
+ any_error_mentions_circular = any(
+ "circular" in error.lower()
+ for error in results["dependencies"]["errors"]
+ )
+ error_messages = "\n - ".join(
+ results.get("dependencies", {}).get("errors", ["No errors"])
+ )
+ self.assertTrue(
+ any_error_mentions_circular,
+ f"Error should mention circular dependency. Actual errors: \n - {error_messages}",
+ )
+
def test_entry_point_not_exists(self):
"""Test validating a package with a missing entry point file (arithmetic_pkg_1_2_0)."""
# Create a temporary package with an invalid entry point
@@ -235,24 +341,36 @@ def test_entry_point_not_exists(self):
# Copy a valid package
pkg_path = temp_dir / "test_pkg"
shutil.copytree(self.hatch_dev_path / "arithmetic_pkg_1_2_0", pkg_path)
-
+
# Modify the metadata to point to a non-existent entry point
metadata_path = pkg_path / "hatch_metadata.json"
- with open(metadata_path, 'r') as f:
+ with open(metadata_path, "r") as f:
metadata = json.load(f)
-
+
metadata["entry_point"] = "non_existent_file.py"
-
- with open(metadata_path, 'w') as f:
+
+ with open(metadata_path, "w") as f:
json.dump(metadata, f)
-
+
# Validate the package
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertFalse(is_valid, f"Package validation should fail for non_existent_entry_point.")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for non_existent_entry_point")
- self.assertFalse(results["entry_point"]["valid"], f"Entry point validation should fail for non_existent_entry_point")
- self.assertTrue(len(results["entry_point"]["errors"]) > 0, f"Entry point errors should be present for non_existent_entry_point")
+ self.assertFalse(
+ is_valid,
+ "Package validation should fail for non_existent_entry_point.",
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for non_existent_entry_point",
+ )
+ self.assertFalse(
+ results["entry_point"]["valid"],
+ "Entry point validation should fail for non_existent_entry_point",
+ )
+ self.assertTrue(
+ len(results["entry_point"]["errors"]) > 0,
+ "Entry point errors should be present for non_existent_entry_point",
+ )
finally:
# Clean up
@@ -262,131 +380,273 @@ def test_valid_system_dependency(self):
"""Test validating a package with a valid system dependency (system_dep_pkg)."""
pkg_path = self.hatch_dev_path / "system_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for system_dep_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for system_dep_pkg")
- self.assertTrue(results["dependencies"]["valid"], f"System dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid, f"Package validation failed for system_dep_pkg. Errors: {results}"
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for system_dep_pkg",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"System dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_docker_dependency(self):
"""Test validating a package with a valid docker dependency (docker_dep_pkg)."""
pkg_path = self.hatch_dev_path / "docker_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for docker_dep_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for docker_dep_pkg")
- self.assertTrue(results["dependencies"]["valid"], f"Docker dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid, f"Package validation failed for docker_dep_pkg. Errors: {results}"
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for docker_dep_pkg",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Docker dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_python_dependency(self):
"""Test validating a package with a valid Python dependency (python_dep_pkg)."""
pkg_path = self.hatch_dev_path / "python_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for python_dep_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for python_dep_pkg")
- self.assertTrue(results["dependencies"]["valid"], f"Python dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid, f"Package validation failed for python_dep_pkg. Errors: {results}"
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for python_dep_pkg",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Python dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_python_dependency_v1_2_0(self):
"""Test validating a v1.2.0 package with a valid Python dependency (python_dep_pkg_1_1_0)."""
pkg_path = self.hatch_dev_path / "python_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for python_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for python_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Python dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for python_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for python_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Python dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_arithmetic_pkg_v1_2_0(self):
"""Test validating arithmetic_pkg_1_2_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "arithmetic_pkg_1_2_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for arithmetic_pkg_1_2_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for arithmetic_pkg_1_2_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for arithmetic_pkg_1_2_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for arithmetic_pkg_1_2_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_base_pkg_1_v1_4_0(self):
"""Test validating base_pkg_1_4_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "base_pkg_1_4_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for base_pkg_1_4_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for base_pkg_1_4_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid, f"Package validation failed for base_pkg_1_4_0. Errors: {results}"
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for base_pkg_1_4_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_base_pkg_2_v1_1_0(self):
"""Test validating base_pkg_2_1_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "base_pkg_2_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for base_pkg_2_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for base_pkg_2_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid, f"Package validation failed for base_pkg_2_1_0. Errors: {results}"
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for base_pkg_2_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_circular_dep_pkg_1_v1_1_0(self):
"""Test validating circular_dep_pkg_1_1_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "circular_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for circular_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for circular_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for circular_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for circular_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_circular_dep_pkg_2_v2_0_0(self):
"""Test validating circular_dep_pkg_2_2_0_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "circular_dep_pkg_2_2_0_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for circular_dep_pkg_2_2_0_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for circular_dep_pkg_2_2_0_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for circular_dep_pkg_2_2_0_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for circular_dep_pkg_2_2_0_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_complex_dep_pkg_v1_1_0(self):
"""Test validating complex_dep_pkg_1_1_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "complex_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for complex_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for complex_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for complex_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for complex_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_missing_dep_pkg_v1_1_0(self):
"""Test validating missing_dep_pkg_1_1_0 (v1.2.0 schema, should fail)."""
pkg_path = self.hatch_dev_path / "missing_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertFalse(is_valid, f"Package validation should fail for missing_dep_pkg_1_1_0.")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for missing_dep_pkg_1_1_0")
- self.assertFalse(results["dependencies"]["valid"], f"Dependency validation should fail for missing_dep_pkg_1_1_0")
+ self.assertFalse(
+ is_valid, "Package validation should fail for missing_dep_pkg_1_1_0."
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for missing_dep_pkg_1_1_0",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependency validation should fail for missing_dep_pkg_1_1_0",
+ )
def test_valid_simple_dep_pkg_v1_1_0(self):
"""Test validating simple_dep_pkg_1_1_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "simple_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for simple_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for simple_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for simple_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for simple_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_version_dep_pkg_v1_1_0(self):
"""Test validating version_dep_pkg_1_1_0 (v1.2.0 schema)."""
pkg_path = self.hatch_dev_path / "version_dep_pkg_1_1_0"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for version_dep_pkg_1_1_0. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for version_dep_pkg_1_1_0")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for version_dep_pkg_1_1_0. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for version_dep_pkg_1_1_0",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_valid_local_path_dep_pkg(self):
"""Test validating a v1.2.0 package with a valid local path dependency (local_path_dep_pkg)."""
pkg_path = self.hatch_dev_path / "local_path_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertTrue(is_valid, f"Package validation failed for local_path_dep_pkg. Errors: {results}")
- self.assertTrue(results["valid"], f"Overall validation result should be valid for local_path_dep_pkg")
- self.assertTrue(results["dependencies"]["valid"], f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}")
+ self.assertTrue(
+ is_valid,
+ f"Package validation failed for local_path_dep_pkg. Errors: {results}",
+ )
+ self.assertTrue(
+ results["valid"],
+ "Overall validation result should be valid for local_path_dep_pkg",
+ )
+ self.assertTrue(
+ results["dependencies"]["valid"],
+ f"Dependency validation failed: {results.get('dependencies', {}).get('errors')}",
+ )
def test_nonexistent_repo_dep_pkg(self):
"""Test validating a v1.2.0 package with a dependency on a non-existent repository prefix (nonexistent_repo_dep_pkg)."""
pkg_path = self.hatch_dev_path / "nonexistent_repo_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertFalse(is_valid, f"Package validation should fail for nonexistent_repo_dep_pkg.")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for nonexistent_repo_dep_pkg")
- self.assertFalse(results["dependencies"]["valid"], f"Dependency validation should fail for nonexistent_repo_dep_pkg")
- self.assertTrue(any("Repository" in err for err in results["dependencies"].get("errors", [])), "Error should mention missing repository.")
+ self.assertFalse(
+ is_valid, "Package validation should fail for nonexistent_repo_dep_pkg."
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for nonexistent_repo_dep_pkg",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependency validation should fail for nonexistent_repo_dep_pkg",
+ )
+ self.assertTrue(
+ any(
+ "Repository" in err for err in results["dependencies"].get("errors", [])
+ ),
+ "Error should mention missing repository.",
+ )
def test_file_path_dep_pkg(self):
"""Test validating a v1.2.0 package with a local dependency that is a file, not a directory (file_path_dep_pkg)."""
pkg_path = self.hatch_dev_path / "file_path_dep_pkg"
is_valid, results = self.validator.validate_package(pkg_path)
- self.assertFalse(is_valid, f"Package validation should fail for file_path_dep_pkg.")
- self.assertFalse(results["valid"], f"Overall validation result should be invalid for file_path_dep_pkg")
- self.assertFalse(results["dependencies"]["valid"], f"Dependency validation should fail for file_path_dep_pkg")
-
+ self.assertFalse(
+ is_valid, "Package validation should fail for file_path_dep_pkg."
+ )
+ self.assertFalse(
+ results["valid"],
+ "Overall validation result should be invalid for file_path_dep_pkg",
+ )
+ self.assertFalse(
+ results["dependencies"]["valid"],
+ "Dependency validation should fail for file_path_dep_pkg",
+ )
+
+
if __name__ == "__main__":
- unittest.main()
\ No newline at end of file
+ unittest.main()
diff --git a/tests/test_package_validator_for_v1_2_2.py b/tests/test_package_validator_for_v1_2_2.py
index 9812124..202f358 100644
--- a/tests/test_package_validator_for_v1_2_2.py
+++ b/tests/test_package_validator_for_v1_2_2.py
@@ -6,17 +6,18 @@
"""
import unittest
-import json
from pathlib import Path
-from typing import Dict
# Add parent directory to path for imports
import sys
+
sys.path.insert(0, str(Path(__file__).parent.parent))
-from hatch_validator.core.validation_context import ValidationContext
-from hatch_validator.core.validator_factory import ValidatorFactory
-from hatch_validator.core.pkg_accessor_factory import HatchPkgAccessorFactory
+from hatch_validator.core.validation_context import ValidationContext # noqa: E402
+from hatch_validator.core.validator_factory import ValidatorFactory # noqa: E402
+from hatch_validator.core.pkg_accessor_factory import ( # noqa: E402
+ HatchPkgAccessorFactory,
+)
class TestV122PackageValidation(unittest.TestCase):
@@ -26,19 +27,16 @@ class TestV122PackageValidation(unittest.TestCase):
def setUpClass(cls):
"""Set up test fixtures."""
# Create minimal test registry data
- cls.registry_data = {
- "registry_schema_version": "1.0.0",
- "repositories": []
- }
+ cls.registry_data = {"registry_schema_version": "1.0.0", "repositories": []}
def setUp(self):
"""Set up each test."""
self.context = ValidationContext(
registry_data=self.registry_data,
allow_local_dependencies=False,
- force_schema_update=False
+ force_schema_update=False,
)
-
+
def test_valid_v122_package_with_conda_dependencies(self):
"""Test validation of valid v1.2.2 package with conda dependencies."""
metadata = {
@@ -51,7 +49,7 @@ def test_valid_v122_package_with_conda_dependencies(self):
"license": {"name": "MIT"},
"entry_point": {
"mcp_server": "server.py",
- "hatch_mcp_server": "hatch_server.py"
+ "hatch_mcp_server": "hatch_server.py",
},
"dependencies": {
"python": [
@@ -59,25 +57,25 @@ def test_valid_v122_package_with_conda_dependencies(self):
"name": "numpy",
"version_constraint": ">=1.20.0",
"package_manager": "conda",
- "channel": "conda-forge"
+ "channel": "conda-forge",
},
{
"name": "scipy",
"version_constraint": ">=1.7.0",
"package_manager": "conda",
- "channel": "bioconda"
- }
+ "channel": "bioconda",
+ },
]
- }
+ },
}
-
+
validator = ValidatorFactory.create_validator_chain("1.2.2")
is_valid, errors = validator.validate(metadata, self.context)
-
+
# Note: This will fail schema validation until we have the actual files
# but it tests the validator chain construction
self.assertIsNotNone(validator)
-
+
def test_valid_v122_package_with_pip_dependencies(self):
"""Test validation of valid v1.2.2 package with pip dependencies (backward compatibility)."""
metadata = {
@@ -90,24 +88,24 @@ def test_valid_v122_package_with_pip_dependencies(self):
"license": {"name": "MIT"},
"entry_point": {
"mcp_server": "server.py",
- "hatch_mcp_server": "hatch_server.py"
+ "hatch_mcp_server": "hatch_server.py",
},
"dependencies": {
"python": [
{
"name": "requests",
"version_constraint": ">=2.28.0",
- "package_manager": "pip"
+ "package_manager": "pip",
}
]
- }
+ },
}
-
+
validator = ValidatorFactory.create_validator_chain("1.2.2")
is_valid, errors = validator.validate(metadata, self.context)
-
+
self.assertIsNotNone(validator)
-
+
def test_valid_v122_package_with_mixed_dependencies(self):
"""Test validation of valid v1.2.2 package with mixed pip and conda dependencies."""
metadata = {
@@ -120,25 +118,25 @@ def test_valid_v122_package_with_mixed_dependencies(self):
"license": {"name": "MIT"},
"entry_point": {
"mcp_server": "server.py",
- "hatch_mcp_server": "hatch_server.py"
+ "hatch_mcp_server": "hatch_server.py",
},
"dependencies": {
"python": [
{
"name": "requests",
"version_constraint": ">=2.28.0",
- "package_manager": "pip"
+ "package_manager": "pip",
},
{
"name": "numpy",
"version_constraint": ">=1.20.0",
"package_manager": "conda",
- "channel": "conda-forge"
- }
+ "channel": "conda-forge",
+ },
]
- }
+ },
}
-
+
validator = ValidatorFactory.create_validator_chain("1.2.2")
is_valid, errors = validator.validate(metadata, self.context)
@@ -146,7 +144,9 @@ def test_valid_v122_package_with_mixed_dependencies(self):
def test_invalid_channel_for_pip_package(self):
"""Test that channel specification for pip package is invalid."""
- from hatch_validator.package.v1_2_2.dependency_validation import DependencyValidation
+ from hatch_validator.package.v1_2_2.dependency_validation import (
+ DependencyValidation,
+ )
dep_validation = DependencyValidation()
@@ -155,17 +155,21 @@ def test_invalid_channel_for_pip_package(self):
"name": "requests",
"version_constraint": ">=2.28.0",
"package_manager": "pip",
- "channel": "conda-forge" # Invalid for pip
+ "channel": "conda-forge", # Invalid for pip
}
- is_valid, errors = dep_validation._validate_single_python_dependency(dep, self.context)
+ is_valid, errors = dep_validation._validate_single_python_dependency(
+ dep, self.context
+ )
self.assertFalse(is_valid)
self.assertTrue(any("Channel" in error and "pip" in error for error in errors))
def test_invalid_channel_format(self):
"""Test that invalid channel format is rejected."""
- from hatch_validator.package.v1_2_2.dependency_validation import DependencyValidation
+ from hatch_validator.package.v1_2_2.dependency_validation import (
+ DependencyValidation,
+ )
dep_validation = DependencyValidation()
@@ -174,57 +178,80 @@ def test_invalid_channel_format(self):
"name": "numpy",
"version_constraint": ">=1.20.0",
"package_manager": "conda",
- "channel": "invalid channel!" # Invalid format (contains space and !)
+ "channel": "invalid channel!", # Invalid format (contains space and !)
}
- is_valid, errors = dep_validation._validate_single_python_dependency(dep, self.context)
+ is_valid, errors = dep_validation._validate_single_python_dependency(
+ dep, self.context
+ )
self.assertFalse(is_valid)
self.assertTrue(any("channel format" in error.lower() for error in errors))
def test_valid_channel_formats(self):
"""Test that valid channel formats are accepted."""
- from hatch_validator.package.v1_2_2.dependency_validation import DependencyValidation
+ from hatch_validator.package.v1_2_2.dependency_validation import (
+ DependencyValidation,
+ )
dep_validation = DependencyValidation()
- valid_channels = ["conda-forge", "bioconda", "colomoto", "my_channel", "channel123"]
+ valid_channels = [
+ "conda-forge",
+ "bioconda",
+ "colomoto",
+ "my_channel",
+ "channel123",
+ ]
for channel in valid_channels:
dep = {
"name": "numpy",
"version_constraint": ">=1.20.0",
"package_manager": "conda",
- "channel": channel
+ "channel": channel,
}
- is_valid, errors = dep_validation._validate_single_python_dependency(dep, self.context)
+ is_valid, errors = dep_validation._validate_single_python_dependency(
+ dep, self.context
+ )
# Should be valid (no channel format errors)
channel_format_errors = [e for e in errors if "channel format" in e.lower()]
- self.assertEqual(len(channel_format_errors), 0,
- f"Channel '{channel}' should be valid but got errors: {channel_format_errors}")
+ self.assertEqual(
+ len(channel_format_errors),
+ 0,
+ f"Channel '{channel}' should be valid but got errors: {channel_format_errors}",
+ )
def test_invalid_package_manager(self):
"""Test that invalid package_manager value is rejected."""
- from hatch_validator.package.v1_2_2.dependency_validation import DependencyValidation
+ from hatch_validator.package.v1_2_2.dependency_validation import (
+ DependencyValidation,
+ )
dep_validation = DependencyValidation()
dep = {
"name": "numpy",
"version_constraint": ">=1.20.0",
- "package_manager": "apt" # Invalid - only pip or conda allowed
+ "package_manager": "apt", # Invalid - only pip or conda allowed
}
- is_valid, errors = dep_validation._validate_single_python_dependency(dep, self.context)
+ is_valid, errors = dep_validation._validate_single_python_dependency(
+ dep, self.context
+ )
self.assertFalse(is_valid)
- self.assertTrue(any("package_manager" in error and "apt" in error for error in errors))
+ self.assertTrue(
+ any("package_manager" in error and "apt" in error for error in errors)
+ )
def test_conda_package_without_channel(self):
"""Test that conda package without channel is valid (channel is optional)."""
- from hatch_validator.package.v1_2_2.dependency_validation import DependencyValidation
+ from hatch_validator.package.v1_2_2.dependency_validation import (
+ DependencyValidation,
+ )
dep_validation = DependencyValidation()
@@ -235,14 +262,21 @@ def test_conda_package_without_channel(self):
# No channel specified - should be valid
}
- is_valid, errors = dep_validation._validate_single_python_dependency(dep, self.context)
+ is_valid, errors = dep_validation._validate_single_python_dependency(
+ dep, self.context
+ )
# Should be valid (channel is optional)
- self.assertTrue(is_valid, f"Conda package without channel should be valid, but got errors: {errors}")
+ self.assertTrue(
+ is_valid,
+ f"Conda package without channel should be valid, but got errors: {errors}",
+ )
def test_default_package_manager_is_pip(self):
"""Test that package_manager defaults to pip when not specified."""
- from hatch_validator.package.v1_2_2.dependency_validation import DependencyValidation
+ from hatch_validator.package.v1_2_2.dependency_validation import (
+ DependencyValidation,
+ )
dep_validation = DependencyValidation()
@@ -252,10 +286,15 @@ def test_default_package_manager_is_pip(self):
# No package_manager specified - should default to pip
}
- is_valid, errors = dep_validation._validate_single_python_dependency(dep, self.context)
+ is_valid, errors = dep_validation._validate_single_python_dependency(
+ dep, self.context
+ )
# Should be valid (defaults to pip)
- self.assertTrue(is_valid, f"Package without package_manager should default to pip, but got errors: {errors}")
+ self.assertTrue(
+ is_valid,
+ f"Package without package_manager should default to pip, but got errors: {errors}",
+ )
class TestV122AccessorChain(unittest.TestCase):
@@ -278,8 +317,8 @@ def test_accessor_delegates_to_v121(self):
"version": "1.0.0",
"entry_point": {
"mcp_server": "server.py",
- "hatch_mcp_server": "hatch_server.py"
- }
+ "hatch_mcp_server": "hatch_server.py",
+ },
}
# Test that accessor can access entry points (delegated to v1.2.1)
@@ -309,13 +348,13 @@ def test_validator_chain_includes_all_versions(self):
versions_in_chain = []
while current:
- if hasattr(current, 'can_handle'):
+ if hasattr(current, "can_handle"):
# Find which version this validator handles
for version in ["1.2.2", "1.2.1", "1.2.0", "1.1.0"]:
if current.can_handle(version):
versions_in_chain.append(version)
break
- current = getattr(current, 'next_validator', None)
+ current = getattr(current, "next_validator", None)
self.assertIn("1.2.2", versions_in_chain)
self.assertIn("1.2.1", versions_in_chain)
@@ -323,7 +362,5 @@ def test_validator_chain_includes_all_versions(self):
self.assertIn("1.1.0", versions_in_chain)
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
-
-
diff --git a/tests/test_package_validator_for_v2_0_0.py b/tests/test_package_validator_for_v2_0_0.py
index e110ff4..f276ae1 100644
--- a/tests/test_package_validator_for_v2_0_0.py
+++ b/tests/test_package_validator_for_v2_0_0.py
@@ -27,8 +27,8 @@ def test_authors_array_access(self):
"name": "test-package",
"authors": [
{"name": "Author One", "email": "one@example.com"},
- {"name": "Author Two", "email": "two@example.com"}
- ]
+ {"name": "Author Two", "email": "two@example.com"},
+ ],
}
service = PackageService()
diff --git a/tests/test_registry_service.py b/tests/test_registry_service.py
index 76bac71..d55c328 100644
--- a/tests/test_registry_service.py
+++ b/tests/test_registry_service.py
@@ -5,7 +5,7 @@
following the v1.1.0 schema.
"""
import unittest
-from hatch_validator.registry.registry_service import RegistryService, RegistryError
+from hatch_validator.registry.registry_service import RegistryService
# Minimal mock registry data following v1.1.0 schema
MOCK_REGISTRY_V110 = {
@@ -28,7 +28,7 @@
"release_uri": "https://example.com/hatch-dev/base_pkg_1/1.0.0",
"added_date": "2025-06-23T12:00:00Z",
"hatch_dependencies_added": [],
- "hatch_dependencies_removed": []
+ "hatch_dependencies_removed": [],
},
{
"author": "Alice",
@@ -36,10 +36,10 @@
"release_uri": "https://example.com/hatch-dev/base_pkg_1/1.1.0",
"added_date": "2025-06-23T12:00:00Z",
"hatch_dependencies_added": [],
- "hatch_dependencies_removed": []
- }
+ "hatch_dependencies_removed": [],
+ },
],
- "latest_version": "1.1.0"
+ "latest_version": "1.1.0",
},
{
"name": "util_pkg",
@@ -52,22 +52,24 @@
"release_uri": "https://example.com/hatch-dev/util_pkg/0.1.0",
"added_date": "2025-06-23T12:00:00Z",
"hatch_dependencies_added": [
- {"name": "base_pkg_1", "type": "remote", "version_constraint": ">=1.0.0"}
+ {
+ "name": "base_pkg_1",
+ "type": "remote",
+ "version_constraint": ">=1.0.0",
+ }
],
- "hatch_dependencies_removed": []
+ "hatch_dependencies_removed": [],
}
],
- "latest_version": "0.1.0"
- }
- ]
+ "latest_version": "0.1.0",
+ },
+ ],
}
],
- "stats": {
- "total_packages": 2,
- "total_versions": 3
- }
+ "stats": {"total_packages": 2, "total_versions": 3},
}
+
class TestRegistryServiceV110(unittest.TestCase):
"""Tests for RegistryService access operations on v1.1.0 mock registry."""
@@ -104,14 +106,20 @@ def test_package_exists(self):
self.assertTrue(self.service.package_exists("Hatch-Dev:base_pkg_1"))
self.assertFalse(self.service.package_exists("nonexistent_pkg"))
# With repo_name argument
- self.assertTrue(self.service.package_exists("base_pkg_1", repo_name="Hatch-Dev"))
- self.assertFalse(self.service.package_exists("nonexistent_pkg", repo_name="Hatch-Dev"))
+ self.assertTrue(
+ self.service.package_exists("base_pkg_1", repo_name="Hatch-Dev")
+ )
+ self.assertFalse(
+ self.service.package_exists("nonexistent_pkg", repo_name="Hatch-Dev")
+ )
def test_get_package_versions(self):
versions = self.service.get_package_versions("base_pkg_1")
self.assertEqual(sorted(versions), ["1.0.0", "1.1.0"])
# With repo_name
- versions2 = self.service.get_package_versions("base_pkg_1", repo_name="Hatch-Dev")
+ versions2 = self.service.get_package_versions(
+ "base_pkg_1", repo_name="Hatch-Dev"
+ )
self.assertEqual(versions, versions2)
# With repo name in package name
versions3 = self.service.get_package_versions("Hatch-Dev:base_pkg_1")
@@ -120,9 +128,13 @@ def test_get_package_versions(self):
def test_get_package_version_info(self):
info = self.service.get_package_version_info("base_pkg_1", "1.1.0")
self.assertEqual(info["version"], "1.1.0")
- self.assertEqual(info["release_uri"], "https://example.com/hatch-dev/base_pkg_1/1.1.0")
+ self.assertEqual(
+ info["release_uri"], "https://example.com/hatch-dev/base_pkg_1/1.1.0"
+ )
# With repo_name
- info2 = self.service.get_package_version_info("base_pkg_1", "1.1.0", repo_name="Hatch-Dev")
+ info2 = self.service.get_package_version_info(
+ "base_pkg_1", "1.1.0", repo_name="Hatch-Dev"
+ )
self.assertEqual(info, info2)
# With repo name in package name
info3 = self.service.get_package_version_info("Hatch-Dev:base_pkg_1", "1.1.0")
@@ -133,17 +145,23 @@ def test_get_package_dependencies(self):
self.assertIn("dependencies", deps)
self.assertEqual(deps["dependencies"][0]["name"], "base_pkg_1")
# With repo_name
- deps2 = self.service.get_package_dependencies("util_pkg", version="0.1.0", repo_name="Hatch-Dev")
+ deps2 = self.service.get_package_dependencies(
+ "util_pkg", version="0.1.0", repo_name="Hatch-Dev"
+ )
self.assertEqual(deps, deps2)
# With repo name in package name
- deps3 = self.service.get_package_dependencies("Hatch-Dev:util_pkg", version="0.1.0")
+ deps3 = self.service.get_package_dependencies(
+ "Hatch-Dev:util_pkg", version="0.1.0"
+ )
self.assertEqual(deps, deps3)
def test_get_package_uri(self):
uri = self.service.get_package_uri("base_pkg_1", "1.0.0")
self.assertEqual(uri, "https://example.com/hatch-dev/base_pkg_1/1.0.0")
# With repo_name
- uri2 = self.service.get_package_uri("base_pkg_1", "1.0.0", repo_name="Hatch-Dev")
+ uri2 = self.service.get_package_uri(
+ "base_pkg_1", "1.0.0", repo_name="Hatch-Dev"
+ )
self.assertEqual(uri, uri2)
# With repo name in package name
uri3 = self.service.get_package_uri("Hatch-Dev:base_pkg_1", "1.0.0")
@@ -153,7 +171,9 @@ def test_find_compatible_version(self):
v = self.service.find_compatible_version("base_pkg_1", ">=1.0.0")
self.assertIn(v, ["1.0.0", "1.1.0"])
# With repo_name
- v2 = self.service.find_compatible_version("base_pkg_1", ">=1.0.0", repo_name="Hatch-Dev")
+ v2 = self.service.find_compatible_version(
+ "base_pkg_1", ">=1.0.0", repo_name="Hatch-Dev"
+ )
self.assertIn(v2, ["1.0.0", "1.1.0"])
# With repo name in package name
v3 = self.service.find_compatible_version("Hatch-Dev:base_pkg_1", ">=1.0.0")
@@ -183,5 +203,6 @@ def test_get_registry_data(self):
def test_get_schema_version(self):
self.assertEqual(self.service.get_schema_version(), "1.1.0")
+
if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_schema_validators.py b/tests/test_schema_validators.py
index ad75a3d..12c798e 100644
--- a/tests/test_schema_validators.py
+++ b/tests/test_schema_validators.py
@@ -10,25 +10,26 @@
# Add parent directory to path for imports
import sys
+
sys.path.insert(0, str(Path(__file__).parent.parent))
-from hatch_validator.core.validation_context import ValidationContext
-from hatch_validator.core.validator_base import Validator
-from hatch_validator.core.validation_strategy import (
+from hatch_validator.core.validation_context import ValidationContext # noqa: E402
+from hatch_validator.core.validator_base import Validator # noqa: E402
+from hatch_validator.core.validation_strategy import ( # noqa: E402
DependencyValidationStrategy,
ToolsValidationStrategy,
EntryPointValidationStrategy,
- SchemaValidationStrategy
+ SchemaValidationStrategy,
)
-from hatch_validator.core.validator_factory import ValidatorFactory
+from hatch_validator.core.validator_factory import ValidatorFactory # noqa: E402
class ConcreteValidator(Validator):
"""Concrete implementation of Validator for testing."""
-
+
def __init__(self, supported_version: str, next_validator=None):
"""Initialize test validator.
-
+
Args:
supported_version (str): Version this validator supports
next_validator: Next validator in chain
@@ -36,18 +37,22 @@ def __init__(self, supported_version: str, next_validator=None):
super().__init__(next_validator)
self.supported_version = supported_version
self.validation_called = False
-
- def validate(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Test implementation of validate method."""
self.validation_called = True
-
+
if not self.can_handle(metadata.get("package_schema_version", "")):
if self.next_validator:
return self.next_validator.validate(metadata, context)
- return False, [f"Unsupported schema version: {metadata.get('package_schema_version')}"]
-
+ return False, [
+ f"Unsupported schema version: {metadata.get('package_schema_version')}"
+ ]
+
return True, []
-
+
def can_handle(self, schema_version: str) -> bool:
"""Test implementation of can_handle method."""
return schema_version == self.supported_version
@@ -55,12 +60,14 @@ def can_handle(self, schema_version: str) -> bool:
class ConcreteDependencyValidationStrategy(DependencyValidationStrategy):
"""Concrete implementation of DependencyValidationStrategy for testing."""
-
+
def __init__(self):
"""Initialize test strategy."""
self.validation_called = False
-
- def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> Tuple[bool, List[str]]:
+
+ def validate_dependencies(
+ self, metadata: Dict, context: ValidationContext
+ ) -> Tuple[bool, List[str]]:
"""Test implementation of validate_dependencies method."""
self.validation_called = True
return True, []
@@ -68,41 +75,41 @@ def validate_dependencies(self, metadata: Dict, context: ValidationContext) -> T
class TestValidationContext(unittest.TestCase):
"""Test cases for ValidationContext class."""
-
+
def test_initialization_with_defaults(self):
"""Test ValidationContext initialization with default values."""
context = ValidationContext()
-
+
self.assertIsNone(context.package_dir)
self.assertIsNone(context.registry_data)
self.assertTrue(context.allow_local_dependencies)
self.assertFalse(context.force_schema_update)
self.assertEqual(context.additional_data, {})
-
+
def test_initialization_with_values(self):
"""Test ValidationContext initialization with provided values."""
package_dir = Path("/test/package")
registry_data = {"test": "data"}
-
+
context = ValidationContext(
package_dir=package_dir,
registry_data=registry_data,
allow_local_dependencies=False,
- force_schema_update=True
+ force_schema_update=True,
)
-
+
self.assertEqual(context.package_dir, package_dir)
self.assertEqual(context.registry_data, registry_data)
self.assertFalse(context.allow_local_dependencies)
self.assertTrue(context.force_schema_update)
-
+
def test_set_and_get_data(self):
"""Test setting and getting additional data in context."""
context = ValidationContext()
-
+
context.set_data("test_key", "test_value")
self.assertEqual(context.get_data("test_key"), "test_value")
-
+
# Test default value
self.assertEqual(context.get_data("nonexistent_key", "default"), "default")
self.assertIsNone(context.get_data("nonexistent_key"))
@@ -110,62 +117,62 @@ def test_set_and_get_data(self):
class TestSchemaValidator(unittest.TestCase):
"""Test cases for Validator abstract base class."""
-
+
def test_chain_construction(self):
"""Test that validator chain can be constructed properly."""
validator1 = ConcreteValidator("1.1.0")
validator2 = ConcreteValidator("1.0.0")
-
+
validator1.set_next(validator2)
-
+
self.assertEqual(validator1.next_validator, validator2)
self.assertIsNone(validator2.next_validator)
-
+
def test_can_handle_functionality(self):
"""Test the can_handle method functionality."""
validator = ConcreteValidator("1.1.0")
-
+
self.assertTrue(validator.can_handle("1.1.0"))
self.assertFalse(validator.can_handle("1.2.0"))
self.assertFalse(validator.can_handle(""))
-
+
def test_validation_delegation(self):
"""Test that validation is properly delegated in the chain."""
validator1 = ConcreteValidator("1.2.0")
validator2 = ConcreteValidator("1.1.0")
validator1.set_next(validator2)
-
+
context = ValidationContext()
metadata = {"package_schema_version": "1.1.0"}
-
+
# Should delegate to validator2
is_valid, errors = validator1.validate(metadata, context)
-
+
self.assertTrue(is_valid)
self.assertEqual(errors, [])
self.assertTrue(validator1.validation_called)
self.assertTrue(validator2.validation_called)
-
+
def test_validation_without_delegation(self):
"""Test validation when validator can handle the version directly."""
validator = ConcreteValidator("1.1.0")
context = ValidationContext()
metadata = {"package_schema_version": "1.1.0"}
-
+
is_valid, errors = validator.validate(metadata, context)
-
+
self.assertTrue(is_valid)
self.assertEqual(errors, [])
self.assertTrue(validator.validation_called)
-
+
def test_validation_failure_no_handler(self):
"""Test validation failure when no validator in chain can handle version."""
validator = ConcreteValidator("1.1.0")
context = ValidationContext()
metadata = {"package_schema_version": "2.0.0"}
-
+
is_valid, errors = validator.validate(metadata, context)
-
+
self.assertFalse(is_valid)
self.assertEqual(len(errors), 1)
self.assertIn("Unsupported schema version", errors[0])
@@ -173,68 +180,69 @@ def test_validation_failure_no_handler(self):
class TestValidationStrategies(unittest.TestCase):
"""Test cases for validation strategy interfaces."""
-
+
def test_dependency_strategy_interface(self):
"""Test that DependencyValidationStrategy interface works correctly."""
strategy = ConcreteDependencyValidationStrategy()
context = ValidationContext()
metadata = {"dependencies": []}
-
+
is_valid, errors = strategy.validate_dependencies(metadata, context)
-
+
self.assertTrue(is_valid)
self.assertEqual(errors, [])
self.assertTrue(strategy.validation_called)
-
+
def test_cannot_instantiate_abstract_classes(self):
"""Test that abstract base classes cannot be instantiated directly."""
with self.assertRaises(TypeError):
Validator()
-
+
with self.assertRaises(TypeError):
DependencyValidationStrategy()
-
+
with self.assertRaises(TypeError):
ToolsValidationStrategy()
-
+
with self.assertRaises(TypeError):
EntryPointValidationStrategy()
-
+
with self.assertRaises(TypeError):
SchemaValidationStrategy()
class TestValidatorFactory(unittest.TestCase):
"""Test cases for ValidatorFactory class."""
+
def test_factory_implementation(self):
"""Test that factory now works after Phase 2 implementation."""
validator = ValidatorFactory.create_validator_chain()
self.assertIsNotNone(validator)
-
+
validator_v1_1_0 = ValidatorFactory.create_validator_chain("1.1.0")
self.assertIsNotNone(validator_v1_1_0)
-
+
# Test v1.2.0 validator creation
validator_v1_2_0 = ValidatorFactory.create_validator_chain("1.2.0")
self.assertIsNotNone(validator_v1_2_0)
-
+
def test_v1_2_0_validator_chain_delegation(self):
"""Test that v1.2.0 validator properly delegates to v1.1.0."""
validator = ValidatorFactory.create_validator_chain("1.2.0")
context = ValidationContext()
-
+
# Test v1.2.0 metadata
v1_2_0_metadata = {"package_schema_version": "1.2.0"}
is_valid, errors = validator.validate(v1_2_0_metadata, context)
self.assertIsInstance(is_valid, bool)
self.assertIsInstance(errors, list)
-
+
# Test v1.1.0 metadata (should delegate)
v1_1_0_metadata = {"package_schema_version": "1.1.0"}
is_valid, errors = validator.validate(v1_1_0_metadata, context)
self.assertIsInstance(is_valid, bool)
self.assertIsInstance(errors, list)
-
+
def test_supported_versions_includes_v1_2_0(self):
"""Test that v1.2.0 is included in supported versions."""
supported_versions = ValidatorFactory.get_supported_versions()
diff --git a/tests/test_schemas_retriever.py b/tests/test_schemas_retriever.py
index 271f25e..d36d93b 100644
--- a/tests/test_schemas_retriever.py
+++ b/tests/test_schemas_retriever.py
@@ -1,8 +1,11 @@
"""Integration tests for schemas_retriever with real network calls."""
-import os
import unittest
-from hatch_validator.schemas.schemas_retriever import get_package_schema, get_registry_schema
+from hatch_validator.schemas.schemas_retriever import (
+ get_package_schema,
+ get_registry_schema,
+)
+
class TestSchemaRetrieverIntegration(unittest.TestCase):
"""Integration tests for schemas_retriever with real network calls."""
@@ -11,19 +14,31 @@ def test_real_github_api_call(self):
"""Test real GitHub API call for schema info."""
schema = get_package_schema(force_update=True)
self.assertIsInstance(schema, dict, "Downloaded schema should be a dict")
- self.assertIn("title", schema, "Downloaded schema should contain a 'title' field")
+ self.assertIn(
+ "title", schema, "Downloaded schema should contain a 'title' field"
+ )
def test_real_registry_schema_download(self):
"""Test real registry schema download from GitHub."""
schema = get_registry_schema(force_update=True)
- self.assertIsInstance(schema, dict, "Downloaded registry schema should be a dict")
- self.assertIn("title", schema, "Downloaded registry schema should contain a 'title' field")
+ self.assertIsInstance(
+ schema, dict, "Downloaded registry schema should be a dict"
+ )
+ self.assertIn(
+ "title", schema, "Downloaded registry schema should contain a 'title' field"
+ )
def test_real_specific_version_download(self):
"""Test real download of a specific schema version from GitHub."""
schema = get_package_schema(version="1.2.0", force_update=True)
- self.assertIsInstance(schema, dict, "Downloaded specific version schema should be a dict")
- self.assertIn("title", schema, "Downloaded specific version schema should contain a 'title' field")
+ self.assertIsInstance(
+ schema, dict, "Downloaded specific version schema should be a dict"
+ )
+ self.assertIn(
+ "title",
+ schema,
+ "Downloaded specific version schema should contain a 'title' field",
+ )
def test_cache_behavior(self):
"""Test that schema is loaded from cache if not forcing update."""
@@ -31,8 +46,15 @@ def test_cache_behavior(self):
schema1 = get_package_schema(force_update=True)
# Second, call without force_update (should use cache, not re-download)
schema2 = get_package_schema(force_update=False)
- self.assertIsInstance(schema2, dict, "Schema loaded from cache should be a dict")
- self.assertEqual(schema1["title"], schema2["title"], "Schema loaded from cache should match the forced download")
+ self.assertIsInstance(
+ schema2, dict, "Schema loaded from cache should be a dict"
+ )
+ self.assertEqual(
+ schema1["title"],
+ schema2["title"],
+ "Schema loaded from cache should match the forced download",
+ )
+
if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_v1_2_2_integration.py b/tests/test_v1_2_2_integration.py
index 5a30224..cc421a6 100644
--- a/tests/test_v1_2_2_integration.py
+++ b/tests/test_v1_2_2_integration.py
@@ -10,26 +10,25 @@
sys.path.insert(0, str(Path(__file__).parent.parent))
-from hatch_validator.core.validator_factory import ValidatorFactory
-from hatch_validator.core.pkg_accessor_factory import HatchPkgAccessorFactory
-from hatch_validator.core.validation_context import ValidationContext
+from hatch_validator.core.validator_factory import ValidatorFactory # noqa: E402
+from hatch_validator.core.pkg_accessor_factory import ( # noqa: E402
+ HatchPkgAccessorFactory,
+)
+from hatch_validator.core.validation_context import ValidationContext # noqa: E402
class TestV122Integration(unittest.TestCase):
"""Integration tests for v1.2.2 schema support."""
-
+
def setUp(self):
"""Set up test environment."""
- self.registry_data = {
- "registry_schema_version": "1.0.0",
- "repositories": []
- }
+ self.registry_data = {"registry_schema_version": "1.0.0", "repositories": []}
self.context = ValidationContext(
registry_data=self.registry_data,
allow_local_dependencies=False,
- force_schema_update=False
+ force_schema_update=False,
)
-
+
def test_full_v122_package_with_conda(self):
"""Test complete v1.2.2 package with conda dependencies."""
metadata = {
@@ -39,19 +38,13 @@ def test_full_v122_package_with_conda(self):
"version": "2.1.0",
"description": "A bioinformatics analysis tool using conda packages",
"tags": ["bioinformatics", "conda", "analysis"],
- "author": {
- "name": "Research Team",
- "email": "research@example.com"
- },
- "license": {
- "name": "MIT",
- "uri": "https://opensource.org/licenses/MIT"
- },
+ "author": {"name": "Research Team", "email": "research@example.com"},
+ "license": {"name": "MIT", "uri": "https://opensource.org/licenses/MIT"},
"repository": "https://github.com/example/bioinformatics-tool",
"documentation": "https://bioinformatics-tool.readthedocs.io",
"entry_point": {
"mcp_server": "server.py",
- "hatch_mcp_server": "hatch_server.py"
+ "hatch_mcp_server": "hatch_server.py",
},
"dependencies": {
"python": [
@@ -59,74 +52,75 @@ def test_full_v122_package_with_conda(self):
"name": "numpy",
"version_constraint": ">=1.20.0",
"package_manager": "conda",
- "channel": "conda-forge"
+ "channel": "conda-forge",
},
{
"name": "biopython",
"version_constraint": ">=1.79",
"package_manager": "conda",
- "channel": "bioconda"
+ "channel": "bioconda",
},
{
"name": "requests",
"version_constraint": ">=2.28.0",
- "package_manager": "pip"
- }
+ "package_manager": "pip",
+ },
]
},
"tools": [
{
"name": "analyze_sequence",
- "description": "Analyze DNA/RNA sequences"
+ "description": "Analyze DNA/RNA sequences",
},
- {
- "name": "compare_genomes",
- "description": "Compare genomic data"
- }
- ]
+ {"name": "compare_genomes", "description": "Compare genomic data"},
+ ],
}
-
+
# Create validator chain
validator = ValidatorFactory.create_validator_chain("1.2.2")
-
+
# Verify validator can handle v1.2.2
self.assertTrue(validator.can_handle("1.2.2"))
-
+
# Create accessor chain
accessor = HatchPkgAccessorFactory.create_accessor_chain("1.2.2")
-
+
# Verify accessor can handle v1.2.2
self.assertTrue(accessor.can_handle("1.2.2"))
-
+
# Test accessor methods
self.assertEqual(accessor.get_name(metadata), "bioinformatics_tool")
self.assertEqual(accessor.get_version(metadata), "2.1.0")
self.assertEqual(accessor.get_mcp_entry_point(metadata), "server.py")
- self.assertEqual(accessor.get_hatch_mcp_entry_point(metadata), "hatch_server.py")
-
+ self.assertEqual(
+ accessor.get_hatch_mcp_entry_point(metadata), "hatch_server.py"
+ )
+
# Test dependency access
deps = accessor.get_dependencies(metadata)
self.assertIn("python", deps)
self.assertEqual(len(deps["python"]), 3)
-
+
# Verify conda dependencies
conda_deps = [d for d in deps["python"] if d.get("package_manager") == "conda"]
self.assertEqual(len(conda_deps), 2)
-
+
# Verify pip dependencies
- pip_deps = [d for d in deps["python"] if d.get("package_manager", "pip") == "pip"]
+ pip_deps = [
+ d for d in deps["python"] if d.get("package_manager", "pip") == "pip"
+ ]
self.assertEqual(len(pip_deps), 1)
-
+
print("\nā
Integration test passed!")
- print(f" - Validator chain constructed for v1.2.2")
- print(f" - Accessor chain constructed for v1.2.2")
- print(f" - Package metadata accessed successfully")
+ print(" - Validator chain constructed for v1.2.2")
+ print(" - Accessor chain constructed for v1.2.2")
+ print(" - Package metadata accessed successfully")
print(f" - Conda dependencies: {len(conda_deps)}")
print(f" - Pip dependencies: {len(pip_deps)}")
-
+
def test_backward_compatibility_v121(self):
"""Test that v1.2.2 chain can handle v1.2.1 packages."""
- metadata_v121 = {
+ _metadata_v121 = {
"package_schema_version": "1.2.1",
"name": "legacy_package",
"version": "1.0.0",
@@ -136,30 +130,29 @@ def test_backward_compatibility_v121(self):
"license": {"name": "MIT"},
"entry_point": {
"mcp_server": "server.py",
- "hatch_mcp_server": "hatch_server.py"
+ "hatch_mcp_server": "hatch_server.py",
},
"dependencies": {
"python": [
{
"name": "requests",
"version_constraint": ">=2.28.0",
- "package_manager": "pip"
+ "package_manager": "pip",
}
]
- }
+ },
}
-
+
# Create v1.2.2 validator chain
validator = ValidatorFactory.create_validator_chain("1.2.2")
-
+
# Should delegate to v1.2.1 validator
self.assertFalse(validator.can_handle("1.2.1"))
self.assertTrue(validator.next_validator.can_handle("1.2.1"))
-
+
print("\nā
Backward compatibility test passed!")
- print(f" - v1.2.2 chain correctly delegates to v1.2.1")
+ print(" - v1.2.2 chain correctly delegates to v1.2.1")
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main(verbosity=2)
-
diff --git a/tests/test_version_utils.py b/tests/test_version_utils.py
index 9b6d313..4e69884 100644
--- a/tests/test_version_utils.py
+++ b/tests/test_version_utils.py
@@ -6,15 +6,15 @@
import unittest
from hatch_validator.utils.version_utils import (
- VersionConstraintValidator,
+ VersionConstraintValidator,
DependencyConstraintResolver,
- VersionConstraintError
+ VersionConstraintError,
)
class TestVersionConstraintValidator(unittest.TestCase):
"""Test cases for the VersionConstraintValidator class."""
-
+
def test_validate_version_valid_cases(self):
"""Test validation of valid version strings."""
valid_versions = [
@@ -24,33 +24,31 @@ def test_validate_version_valid_cases(self):
"10.20.30",
"1.0.0-alpha",
"1.0.0+build.1",
- "2.0.0-rc.1+build.123"
+ "2.0.0-rc.1+build.123",
]
-
+
for ver in valid_versions:
with self.subTest(version=ver):
valid, error = VersionConstraintValidator.validate_version(ver)
- self.assertTrue(valid, f"Version '{ver}' should be valid but got error: {error}")
- self.assertIsNone(error, f"Valid version '{ver}' should not have error message")
-
+ self.assertTrue(
+ valid, f"Version '{ver}' should be valid but got error: {error}"
+ )
+ self.assertIsNone(
+ error, f"Valid version '{ver}' should not have error message"
+ )
+
def test_validate_version_invalid_cases(self):
"""Test validation of invalid version strings."""
- invalid_versions = [
- "",
- None,
- "1.2", # May be valid depending on packaging version
- "1.2.3.4.5.6", # May be valid
- "invalid",
- "1.2.3-",
- "1.2.3+",
- ]
-
- for ver in [None, "", "invalid", "1.2.3-", "1.2.3+"]:
+ invalid_versions = [None, "", "invalid", "1.2.3-", "1.2.3+"]
+
+ for ver in invalid_versions:
with self.subTest(version=ver):
valid, error = VersionConstraintValidator.validate_version(ver)
self.assertFalse(valid, f"Version '{ver}' should be invalid")
- self.assertIsNotNone(error, f"Invalid version '{ver}' should have error message")
-
+ self.assertIsNotNone(
+ error, f"Invalid version '{ver}' should have error message"
+ )
+
def test_validate_constraint_valid_cases(self):
"""Test validation of valid constraint strings."""
valid_constraints = [
@@ -60,15 +58,23 @@ def test_validate_constraint_valid_cases(self):
"~=1.4",
">1.0.0,<2.0.0",
">=1.0.0,!=1.5.0,<2.0.0",
- "==1.0.*"
+ "==1.0.*",
]
-
+
for constraint in valid_constraints:
with self.subTest(constraint=constraint):
- valid, error = VersionConstraintValidator.validate_constraint(constraint)
- self.assertTrue(valid, f"Constraint '{constraint}' should be valid but got error: {error}")
- self.assertIsNone(error, f"Valid constraint '{constraint}' should not have error message")
-
+ valid, error = VersionConstraintValidator.validate_constraint(
+ constraint
+ )
+ self.assertTrue(
+ valid,
+ f"Constraint '{constraint}' should be valid but got error: {error}",
+ )
+ self.assertIsNone(
+ error,
+ f"Valid constraint '{constraint}' should not have error message",
+ )
+
def test_validate_constraint_invalid_cases(self):
"""Test validation of invalid constraint strings."""
invalid_constraints = [
@@ -80,13 +86,18 @@ def test_validate_constraint_invalid_cases(self):
"1.0.0 >=",
">=1.0.0 <=",
]
-
+
for constraint in invalid_constraints:
with self.subTest(constraint=constraint):
- valid, error = VersionConstraintValidator.validate_constraint(constraint)
+ valid, error = VersionConstraintValidator.validate_constraint(
+ constraint
+ )
self.assertFalse(valid, f"Constraint '{constraint}' should be invalid")
- self.assertIsNotNone(error, f"Invalid constraint '{constraint}' should have error message")
-
+ self.assertIsNotNone(
+ error,
+ f"Invalid constraint '{constraint}' should have error message",
+ )
+
def test_is_version_compatible_true_cases(self):
"""Test version compatibility when version satisfies constraint."""
test_cases = [
@@ -97,13 +108,18 @@ def test_is_version_compatible_true_cases(self):
("1.5.0", ">=1.0.0,<2.0.0"),
("1.4.5", "~=1.4"),
]
-
+
for version_str, constraint in test_cases:
with self.subTest(version=version_str, constraint=constraint):
- compatible, error = VersionConstraintValidator.is_version_compatible(version_str, constraint)
- self.assertTrue(compatible, f"Version '{version_str}' should satisfy constraint '{constraint}'")
- self.assertIsNone(error, f"Compatible check should not have error")
-
+ compatible, error = VersionConstraintValidator.is_version_compatible(
+ version_str, constraint
+ )
+ self.assertTrue(
+ compatible,
+ f"Version '{version_str}' should satisfy constraint '{constraint}'",
+ )
+ self.assertIsNone(error, "Compatible check should not have error")
+
def test_is_version_compatible_false_cases(self):
"""Test version compatibility when version does not satisfy constraint."""
test_cases = [
@@ -114,13 +130,18 @@ def test_is_version_compatible_false_cases(self):
("2.5.0", ">=1.0.0,<2.0.0"),
("1.3.0", "~=1.4"),
]
-
+
for version_str, constraint in test_cases:
with self.subTest(version=version_str, constraint=constraint):
- compatible, error = VersionConstraintValidator.is_version_compatible(version_str, constraint)
- self.assertFalse(compatible, f"Version '{version_str}' should not satisfy constraint '{constraint}'")
- self.assertIsNone(error, f"Incompatible check should not have error")
-
+ compatible, error = VersionConstraintValidator.is_version_compatible(
+ version_str, constraint
+ )
+ self.assertFalse(
+ compatible,
+ f"Version '{version_str}' should not satisfy constraint '{constraint}'",
+ )
+ self.assertIsNone(error, "Incompatible check should not have error")
+
def test_is_version_compatible_error_cases(self):
"""Test version compatibility with invalid inputs."""
error_cases = [
@@ -129,43 +150,58 @@ def test_is_version_compatible_error_cases(self):
("", ">=1.0.0"),
("1.0.0", ""),
]
-
+
for version_str, constraint in error_cases:
with self.subTest(version=version_str, constraint=constraint):
- compatible, error = VersionConstraintValidator.is_version_compatible(version_str, constraint)
- self.assertFalse(compatible, f"Invalid inputs should return False")
- self.assertIsNotNone(error, f"Invalid inputs should have error message")
-
+ compatible, error = VersionConstraintValidator.is_version_compatible(
+ version_str, constraint
+ )
+ self.assertFalse(compatible, "Invalid inputs should return False")
+ self.assertIsNotNone(error, "Invalid inputs should have error message")
+
def test_parse_constraint_operators(self):
"""Test parsing constraint strings into operators and versions."""
test_cases = [
(">=1.0.0", [(">=", "1.0.0")]),
("==1.2.3", [("==", "1.2.3")]),
- (">1.0.0,<2.0.0", [(">=", "1.0.0"), ("<", "2.0.0")]), # Note: > might be normalized to >=
+ (
+ ">1.0.0,<2.0.0",
+ [(">=", "1.0.0"), ("<", "2.0.0")],
+ ), # Note: > might be normalized to >=
]
-
+
for constraint, expected in test_cases:
with self.subTest(constraint=constraint):
try:
- operators = VersionConstraintValidator.parse_constraint_operators(constraint)
- self.assertIsInstance(operators, list, "Should return a list of tuples")
- self.assertGreater(len(operators), 0, "Should return at least one operator")
+ operators = VersionConstraintValidator.parse_constraint_operators(
+ constraint
+ )
+ self.assertIsInstance(
+ operators, list, "Should return a list of tuples"
+ )
+ self.assertGreater(
+ len(operators), 0, "Should return at least one operator"
+ )
for op, ver in operators:
self.assertIsInstance(op, str, "Operator should be string")
self.assertIsInstance(ver, str, "Version should be string")
except VersionConstraintError:
- self.fail(f"Valid constraint '{constraint}' should not raise VersionConstraintError")
-
+ self.fail(
+ f"Valid constraint '{constraint}' should not raise VersionConstraintError"
+ )
+
def test_parse_constraint_operators_invalid(self):
"""Test parsing invalid constraint strings raises error."""
invalid_constraints = ["invalid", ">=", ">>1.0.0"]
-
+
for constraint in invalid_constraints:
with self.subTest(constraint=constraint):
- with self.assertRaises(VersionConstraintError,
- msg=f"Invalid constraint '{constraint}' should raise VersionConstraintError"):
+ with self.assertRaises(
+ VersionConstraintError,
+ msg=f"Invalid constraint '{constraint}' should raise VersionConstraintError",
+ ):
VersionConstraintValidator.parse_constraint_operators(constraint)
-
+
def test_get_constraint_bounds(self):
"""Test extracting min/max bounds from constraints."""
test_cases = [
@@ -174,17 +210,29 @@ def test_get_constraint_bounds(self):
(">=1.0.0,<=2.0.0", ("1.0.0", "2.0.0")),
("==1.5.0", ("1.5.0", "1.5.0")),
]
-
+
for constraint, expected_bounds in test_cases:
with self.subTest(constraint=constraint):
try:
- min_ver, max_ver = VersionConstraintValidator.get_constraint_bounds(constraint)
+ min_ver, max_ver = VersionConstraintValidator.get_constraint_bounds(
+ constraint
+ )
expected_min, expected_max = expected_bounds
- self.assertEqual(min_ver, expected_min, f"Minimum version mismatch for '{constraint}'")
- self.assertEqual(max_ver, expected_max, f"Maximum version mismatch for '{constraint}'")
+ self.assertEqual(
+ min_ver,
+ expected_min,
+ f"Minimum version mismatch for '{constraint}'",
+ )
+ self.assertEqual(
+ max_ver,
+ expected_max,
+ f"Maximum version mismatch for '{constraint}'",
+ )
except VersionConstraintError:
- self.fail(f"Valid constraint '{constraint}' should not raise VersionConstraintError")
-
+ self.fail(
+ f"Valid constraint '{constraint}' should not raise VersionConstraintError"
+ )
+
def test_constraints_overlap_true_cases(self):
"""Test constraint overlap detection when constraints do overlap."""
overlapping_cases = [
@@ -192,13 +240,18 @@ def test_constraints_overlap_true_cases(self):
(">=1.0.0,<=2.0.0", ">=1.5.0,<=1.8.0"),
("==1.5.0", ">=1.0.0,<=2.0.0"),
]
-
+
for constraint1, constraint2 in overlapping_cases:
with self.subTest(constraint1=constraint1, constraint2=constraint2):
- overlap, error = VersionConstraintValidator.constraints_overlap(constraint1, constraint2)
- self.assertTrue(overlap, f"Constraints '{constraint1}' and '{constraint2}' should overlap")
- self.assertIsNone(error, f"Overlap check should not have error")
-
+ overlap, error = VersionConstraintValidator.constraints_overlap(
+ constraint1, constraint2
+ )
+ self.assertTrue(
+ overlap,
+ f"Constraints '{constraint1}' and '{constraint2}' should overlap",
+ )
+ self.assertIsNone(error, "Overlap check should not have error")
+
def test_constraints_overlap_false_cases(self):
"""Test constraint overlap detection when constraints don't overlap."""
non_overlapping_cases = [
@@ -206,13 +259,18 @@ def test_constraints_overlap_false_cases(self):
(">2.0.0", "<1.0.0"),
("==1.0.0", "==2.0.0"),
]
-
+
for constraint1, constraint2 in non_overlapping_cases:
with self.subTest(constraint1=constraint1, constraint2=constraint2):
- overlap, error = VersionConstraintValidator.constraints_overlap(constraint1, constraint2)
- self.assertFalse(overlap, f"Constraints '{constraint1}' and '{constraint2}' should not overlap")
- self.assertIsNone(error, f"Non-overlap check should not have error")
-
+ overlap, error = VersionConstraintValidator.constraints_overlap(
+ constraint1, constraint2
+ )
+ self.assertFalse(
+ overlap,
+ f"Constraints '{constraint1}' and '{constraint2}' should not overlap",
+ )
+ self.assertIsNone(error, "Non-overlap check should not have error")
+
def test_normalize_constraint(self):
"""Test constraint normalization."""
test_cases = [
@@ -220,79 +278,103 @@ def test_normalize_constraint(self):
">=1.0.0,<=2.0.0",
"==1.5.0",
]
-
+
for constraint in test_cases:
with self.subTest(constraint=constraint):
- normalized, error = VersionConstraintValidator.normalize_constraint(constraint)
- self.assertIsNotNone(normalized, f"Normalized constraint should not be None")
- self.assertIsNone(error, f"Normalization should not have error")
+ normalized, error = VersionConstraintValidator.normalize_constraint(
+ constraint
+ )
+ self.assertIsNotNone(
+ normalized, "Normalized constraint should not be None"
+ )
+ self.assertIsNone(error, "Normalization should not have error")
# Check that normalized constraint is still valid
valid, _ = VersionConstraintValidator.validate_constraint(normalized)
- self.assertTrue(valid, f"Normalized constraint '{normalized}' should be valid")
+ self.assertTrue(
+ valid, f"Normalized constraint '{normalized}' should be valid"
+ )
class TestDependencyConstraintResolver(unittest.TestCase):
"""Test cases for the DependencyConstraintResolver class."""
-
+
def setUp(self):
"""Set up test fixtures."""
self.resolver = DependencyConstraintResolver()
-
+
def test_check_constraint_compatibility_empty(self):
"""Test compatibility check with empty constraint list."""
- compatible, errors = self.resolver.check_constraint_compatibility([], "test_package")
+ compatible, errors = self.resolver.check_constraint_compatibility(
+ [], "test_package"
+ )
self.assertTrue(compatible, "Empty constraint list should be compatible")
self.assertEqual(errors, [], "Empty constraint list should have no errors")
-
+
def test_check_constraint_compatibility_single_valid(self):
"""Test compatibility check with single valid constraint."""
- compatible, errors = self.resolver.check_constraint_compatibility([">=1.0.0"], "test_package")
+ compatible, errors = self.resolver.check_constraint_compatibility(
+ [">=1.0.0"], "test_package"
+ )
self.assertTrue(compatible, "Single valid constraint should be compatible")
self.assertEqual(errors, [], "Single valid constraint should have no errors")
-
+
def test_check_constraint_compatibility_single_invalid(self):
"""Test compatibility check with single invalid constraint."""
- compatible, errors = self.resolver.check_constraint_compatibility(["invalid"], "test_package")
- self.assertFalse(compatible, "Single invalid constraint should not be compatible")
- self.assertGreater(len(errors), 0, "Single invalid constraint should have errors")
-
+ compatible, errors = self.resolver.check_constraint_compatibility(
+ ["invalid"], "test_package"
+ )
+ self.assertFalse(
+ compatible, "Single invalid constraint should not be compatible"
+ )
+ self.assertGreater(
+ len(errors), 0, "Single invalid constraint should have errors"
+ )
+
def test_check_constraint_compatibility_multiple_compatible(self):
"""Test compatibility check with multiple compatible constraints."""
constraints = [">=1.0.0", "<=2.0.0", "!=1.5.0"]
- compatible, errors = self.resolver.check_constraint_compatibility(constraints, "test_package")
+ compatible, errors = self.resolver.check_constraint_compatibility(
+ constraints, "test_package"
+ )
self.assertTrue(compatible, "Compatible constraints should be compatible")
self.assertEqual(errors, [], "Compatible constraints should have no errors")
-
+
def test_check_constraint_compatibility_multiple_incompatible(self):
"""Test compatibility check with incompatible constraints."""
constraints = [">=2.0.0", "<=1.0.0"]
- compatible, errors = self.resolver.check_constraint_compatibility(constraints, "test_package")
- self.assertFalse(compatible, "Incompatible constraints should not be compatible")
- self.assertGreater(len(errors), 0, "Incompatible constraints should have errors")
-
+ compatible, errors = self.resolver.check_constraint_compatibility(
+ constraints, "test_package"
+ )
+ self.assertFalse(
+ compatible, "Incompatible constraints should not be compatible"
+ )
+ self.assertGreater(
+ len(errors), 0, "Incompatible constraints should have errors"
+ )
+
def test_resolve_constraints_empty(self):
"""Test constraint resolution with empty list."""
result, errors = self.resolver.resolve_constraints([])
self.assertIsNone(result, "Empty constraint list should return None")
self.assertGreater(len(errors), 0, "Empty constraint list should have error")
-
+
def test_resolve_constraints_single(self):
"""Test constraint resolution with single constraint."""
result, errors = self.resolver.resolve_constraints([">=1.0.0"])
self.assertIsNotNone(result, "Single constraint should return result")
self.assertEqual(errors, [], "Single constraint should have no errors")
-
+
def test_resolve_constraints_multiple_compatible(self):
"""Test constraint resolution with multiple compatible constraints."""
constraints = [">=1.0.0", "<=2.0.0"]
result, errors = self.resolver.resolve_constraints(constraints)
self.assertIsNotNone(result, "Compatible constraints should return result")
self.assertEqual(errors, [], "Compatible constraints should have no errors")
-
+
# Check that result is a valid constraint
valid, _ = VersionConstraintValidator.validate_constraint(result)
self.assertTrue(valid, f"Resolved constraint '{result}' should be valid")
-
+
def test_resolve_constraints_multiple_incompatible(self):
"""Test constraint resolution with incompatible constraints."""
constraints = [">=2.0.0", "<=1.0.0"]
@@ -302,8 +384,8 @@ def test_resolve_constraints_multiple_incompatible(self):
if result is not None:
# If we get a result, it should be valid
valid, _ = VersionConstraintValidator.validate_constraint(result)
- self.assertTrue(valid, f"Any returned constraint should be valid")
+ self.assertTrue(valid, "Any returned constraint should be valid")
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()