Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
*.egg-info
**/__pycache__
.artifacts_cache.json
.artifacts_index.json
.envrc
.venv
_build
dist/
glrd/artifacts-cache.json
shell.nix
releases.json
releases.yaml
Expand Down
27 changes: 9 additions & 18 deletions glrd/manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,10 @@
timestamp_to_isodate,
get_version,
NoAliasDumper,
get_flavors_from_git,
get_s3_artifacts_data,
get_flavors_from_s3_artifacts,
)
from python_gardenlinux_lib.flavors.parse_flavors import (
parse_flavors_commit,
)
from python_gardenlinux_lib.s3.s3 import get_s3_artifacts, get_s3_client

# silence boto3 logging
boto3.set_stream_logger(name="botocore.credentials", level=logging.ERROR)
Expand Down Expand Up @@ -643,29 +642,21 @@ def create_single_release(release_type, args, existing_releases):
# Create version object
version = {"major": major, "minor": minor, "patch": patch}

# First try to get flavors from flavors.yaml
flavors = parse_flavors_commit(
commit, version=version, query_s3=False, logger=logging.getLogger()
)
flavors = get_flavors_from_git(commit)

# Only if no flavors found in flavors.yaml, try S3
if not flavors:
logging.info("No flavors found in flavors.yaml, checking S3 artifacts...")
# Get artifacts data from S3 with caching
artifacts_data = get_s3_artifacts(
# Get S3 artifacts using gardenlinux library
artifacts_data = get_s3_artifacts_data(
DEFAULTS["ARTIFACTS_S3_BUCKET_NAME"],
DEFAULTS["ARTIFACTS_S3_PREFIX"],
logger=logging.getLogger(),
DEFAULTS["ARTIFACTS_S3_CACHE_FILE"],
)

if artifacts_data:
flavors = parse_flavors_commit(
commit,
version=version,
query_s3=True,
s3_objects=artifacts_data,
logger=logging.getLogger(),
)
flavors = get_flavors_from_s3_artifacts(artifacts_data, version, commit)
else:
logging.warning("No artifacts data available from S3")

Expand Down Expand Up @@ -1348,7 +1339,7 @@ def merge_existing_s3_data(bucket_name, bucket_key, local_file, new_data):

def download_all_s3_files(bucket_name, bucket_prefix):
"""Download all release files from S3 bucket."""
s3_client = get_s3_client()
s3_client = boto3.client("s3")

try:
# List all objects in the bucket with the given prefix
Expand Down
45 changes: 20 additions & 25 deletions glrd/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,14 @@
import boto3

from glrd.manage import download_all_s3_files, upload_all_local_files
from glrd.util import DEFAULTS, ERROR_CODES, get_version

from python_gardenlinux_lib.flavors.parse_flavors import parse_flavors_commit
from python_gardenlinux_lib.s3.s3 import get_s3_artifacts
from glrd.util import (
DEFAULTS,
ERROR_CODES,
get_version,
get_flavors_from_git,
get_s3_artifacts_data,
get_flavors_from_s3_artifacts,
)

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -168,30 +172,23 @@ def update_flavors(release):
commit = release["git"]["commit"]
version = release["version"]

# First try flavors.yaml
flavors = parse_flavors_commit(
commit, version=version, query_s3=False, logger=logger
)
# First try flavors.yaml using gardenlinux library
flavors = get_flavors_from_git(commit)

# If no flavors found, try S3
if not flavors:
logger.info(
f"No flavors found in flavors.yaml for {release['name']}, checking S3..."
)
artifacts_data = get_s3_artifacts(
# Get S3 artifacts using gardenlinux library
artifacts_data = get_s3_artifacts_data(
DEFAULTS["ARTIFACTS_S3_BUCKET_NAME"],
DEFAULTS["ARTIFACTS_S3_PREFIX"],
logger=logger,
DEFAULTS["ARTIFACTS_S3_CACHE_FILE"],
)

if artifacts_data:
flavors = parse_flavors_commit(
commit,
version=version,
query_s3=True,
s3_objects=artifacts_data,
logger=logger,
)
flavors = get_flavors_from_s3_artifacts(artifacts_data, version, commit)
else:
logger.warning(f"No artifacts data available from S3 for {release['name']}")

Expand Down Expand Up @@ -291,8 +288,11 @@ def process_releases(args):
logging.info(
f"Fetching artifacts data from S3 bucket {DEFAULTS['ARTIFACTS_S3_BUCKET_NAME']}"
)
artifacts_data = get_s3_artifacts(
DEFAULTS["ARTIFACTS_S3_BUCKET_NAME"], DEFAULTS["ARTIFACTS_S3_PREFIX"]
# Get S3 artifacts using gardenlinux library
artifacts_data = get_s3_artifacts_data(
DEFAULTS["ARTIFACTS_S3_BUCKET_NAME"],
DEFAULTS["ARTIFACTS_S3_PREFIX"],
DEFAULTS["ARTIFACTS_S3_CACHE_FILE"],
)

if not artifacts_data or not artifacts_data.get("artifacts"):
Expand Down Expand Up @@ -400,12 +400,7 @@ def process_releases(args):
update_source_repo_attribute([release])

# Get flavors for this commit using artifacts data
flavors = parse_flavors_commit(
commit,
version=version,
query_s3=True,
s3_objects=artifacts_data,
)
flavors = get_flavors_from_s3_artifacts(artifacts_data, version, commit)
if flavors:
release["flavors"] = flavors
modified = True
Expand Down
151 changes: 150 additions & 1 deletion glrd/util.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,23 @@
import importlib.metadata
import logging
import os
import re
import signal
import sys
from datetime import datetime
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Any, Dict, List, Optional

import importlib.metadata
import pytz
import yaml

from gardenlinux.constants import GL_REPOSITORY_URL
from gardenlinux.flavors import Parser
from gardenlinux.git import Repository
from gardenlinux.s3 import Bucket


ERROR_CODES = {
"validation_error": 1,
"subprocess_output_error": 2,
Expand Down Expand Up @@ -50,6 +59,7 @@
"ARTIFACTS_S3_BUCKET_NAME": "gardenlinux-github-releases",
"ARTIFACTS_S3_PREFIX": "objects/",
"ARTIFACTS_S3_BASE_URL": ("https://gardenlinux-github-releases.s3.amazonaws.com"),
"ARTIFACTS_S3_CACHE_FILE": "artifacts-cache.json",
# Garden Linux repository
"GL_REPO_NAME": "gardenlinux",
"GL_REPO_OWNER": "gardenlinux",
Expand Down Expand Up @@ -145,3 +155,142 @@ def ignore_aliases(self, data):


signal.signal(signal.SIGPIPE, handle_broken_pipe_error)


def get_flavors_from_git(commit: str) -> List[str]:
"""
Get flavors from Git repository using gardenlinux library.

Args:
commit: Git commit hash (or 'latest')

Returns:
List of flavor strings
"""

try:
with TemporaryDirectory() as git_directory:
# Use gardenlinux Repository class for sparse checkout with pygit2
Repository.checkout_repo_sparse(
git_directory=git_directory,
repo_url=GL_REPOSITORY_URL,
commit=commit if commit != "latest" else None,
pathspecs=["flavors.yaml"], # Only checkout the flavors.yaml file
)

flavors_file = Path(git_directory, "flavors.yaml")
if flavors_file.exists():
with flavors_file.open("r") as fp:
flavors_data = fp.read()
flavors_yaml = yaml.safe_load(flavors_data)
parser = Parser(flavors_yaml)
combinations = parser.filter()
all_flavors = set(combination for _, combination in combinations)
flavors = sorted(all_flavors)
logging.info(f"Found {len(flavors)} flavors in Git")
return flavors
else:
logging.warning("flavors.yaml not found in repository")
return []
except Exception as exc:
logging.debug(f"Could not get flavors from Git: {exc}")
return []


def get_s3_artifacts_data(
bucket_name: str,
prefix: str,
cache_file: Optional[str] = None,
cache_ttl: int = 3600,
) -> Optional[Dict]:
"""
Get S3 artifacts data using gardenlinux library with caching support.

Args:
bucket_name: S3 bucket name
prefix: S3 prefix
cache_file: Optional cache file path for S3 object keys
cache_ttl: Cache time-to-live in seconds (default: 1 hour)

Returns:
Dictionary containing S3 artifacts data with 'index' and 'artifacts' keys
"""

try:
bucket = Bucket(bucket_name)

artifacts = bucket.read_cache_file_or_filter(
cache_file, cache_ttl=cache_ttl, Prefix=prefix
)

index = {}
for key in artifacts:
try:
parts = key.split("/")
if len(parts) >= 3:
version_commit = parts[1]
if "-" in version_commit:
version_part, commit_part = version_commit.split("-", 1)
if version_part not in index:
index[version_part] = []
index[version_part].append(key)
except Exception as e:
logging.debug(f"Could not parse version from key {key}: {e}")

result = {"index": index, "artifacts": artifacts}
logging.info(f"Found {len(artifacts)} artifacts and {len(index)} index entries")
return result
except Exception as e:
logging.error(f"Error getting S3 artifacts: {e}")
return None


def get_flavors_from_s3_artifacts(
artifacts_data: Dict, version: Dict[str, Any], commit: str
) -> List[str]:
"""
Extract flavors from S3 artifacts data.

Args:
artifacts_data: S3 artifacts data dictionary
version: Version dictionary with major, minor, micro
commit: Git commit hash

Returns:
List of flavor strings
"""

try:
version_info = f"{version['major']}.{version.get('minor', 0)}"
commit_short = commit[:8]

# Try index lookup first
search_key = f"{version_info}-{commit_short}"
if search_key in artifacts_data.get("index", {}):
flavors = artifacts_data["index"][search_key]
logging.debug(f"Found flavors in S3 index for {search_key}")
return flavors
else:
# Search through artifacts
found_flavors = set()
for key in artifacts_data.get("artifacts", []):
if version_info in key and commit_short in key:
try:
parts = key.split("/")
if len(parts) >= 2:
flavor_with_version = parts[1]
flavor = flavor_with_version.rsplit(f"-{version_info}", 1)[
0
]
if flavor:
found_flavors.add(flavor)
except Exception as e:
logging.debug(f"Error parsing artifact key {key}: {e}")
continue
flavors = sorted(found_flavors)
if flavors:
logging.info(f"Found {len(flavors)} flavors in S3 artifacts")
return flavors
except Exception as e:
logging.error(f"Error processing S3 artifacts: {e}")
return []
Loading
Loading