From 12c61d36b7d70f858bc402063f45da716b3ba991 Mon Sep 17 00:00:00 2001 From: Runn Vermel Date: Sun, 16 Nov 2025 17:06:47 -0700 Subject: [PATCH] Added config file added dynamic column generated payload, with some fields following open project specs renamed script file to remove a space updated README --- .DS_Store | Bin 0 -> 6148 bytes README.md | 15 +- config.json | 32 ++++ import requests_basicauth.py | 75 --------- import_requests_basicauth.py | 286 +++++++++++++++++++++++++++++++++++ 5 files changed, 329 insertions(+), 79 deletions(-) create mode 100644 .DS_Store create mode 100644 config.json delete mode 100644 import requests_basicauth.py create mode 100644 import_requests_basicauth.py diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6 GIT binary patch literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0 1 else "config.json" + + if os.path.isfile(config_path): + try: + with open(config_path, "r", encoding="utf-8") as f: + cfg = json.load(f) + print(f"Loaded config from {config_path}") + return cfg + except Exception as e: + print(f"Could not read config file {config_path}: {e}") + return {} + else: + print(f"No config file found at {config_path}. Using interactive prompts.") + return {} + + +# Load config +config = load_config() + +# Helper for normalizing map keys +def _normalize_key(name): + if name is None: + return None + return str(name).lower().strip() + + +# Load maps from config and normalize keys to lowercase +raw_user_map = config.get("user_map", {}) +user_map = {_normalize_key(name): uid for name, uid in raw_user_map.items()} + +raw_status_map = config.get("status_map", {}) +status_map = {_normalize_key(name): sid for name, sid in raw_status_map.items()} + +raw_priority_map = config.get("priority_map", {}) +priority_map = {_normalize_key(name): pid for name, pid in raw_priority_map.items()} + +raw_type_map = config.get("type_map", {}) +type_map = {_normalize_key(name): tid for name, tid in raw_type_map.items()} + +raw_version_map = config.get("version_map", {}) +version_map = {_normalize_key(name): vid for name, vid in raw_version_map.items()} + +# Get settings from config or prompt the user +openproject_url = ( + config.get("openproject_url") + or input("Enter your OpenProject URL (for example, https://openproject.local): ") +).strip().rstrip("/") + +api_token = ( + config.get("api_token") + or input("Enter your OpenProject API token: ") +).strip() + +# Project id can be int in config, convert to string for URLs +project_id = config.get("project_id") +if project_id is None: + project_id = input("Enter your OpenProject numeric project ID (for example, 5): ").strip() +else: + project_id = str(project_id).strip() + +csv_file = ( + config.get("csv_file") + or config.get("csv_path") + or input("Enter the full path to the CSV file to import (for example, /path/to/file.csv): ") +).strip() + +# Remove any surrounding quotes from the input path +csv_file = csv_file.strip('"').strip("'") + +# Check if file exists +if not os.path.isfile(csv_file): + print(f"The file '{csv_file}' does not exist. Please check the path and try again.") + sys.exit(1) + +print(f"Using CSV file: {csv_file}") + +# Load CSV +df = pd.read_csv(csv_file, header=0) + +# Drop any anonymous "Unnamed" columns if present +df = df.loc[:, ~df.columns.str.startswith("Unnamed")] + +# Replace all NaN and NaT with None so JSON encoding is safe +df = df.replace({np.nan: None}) + +# Normalize column names: "Start Date" -> "start_date", "Assignee" -> "assignee" +col_map = {col: col.strip().lower().replace(" ", "_") for col in df.columns} +print("Detected columns:") +for orig, norm in col_map.items(): + print(f" {orig!r} -> {norm!r}") + +# Set Basic Auth header using "apikey" as username and API token as password +auth_header = base64.b64encode(f"apikey:{api_token}".encode("utf-8")).decode("utf-8") +headers = { + "Authorization": f"Basic {auth_header}", + "Content-Type": "application/json", +} + +# Test API connectivity +try: + test_response = requests.get( + f"{openproject_url}/api/v3/projects/{project_id}", headers=headers + ) + if test_response.status_code != 200: + print( + f"API token test failed - Status {test_response.status_code}: {test_response.text}" + ) + sys.exit(1) + else: + print("API token verified. Proceeding with import.") +except Exception as e: + print(f"Failed to connect to API: {e}") + sys.exit(1) + +# Track subjects seen in this run to avoid accidental duplicates inside one CSV +seen_subjects = set() + +# Loop over each row and create one work package per row +for idx, row in df.iterrows(): + # Normalize row into {normalized_column_name: value} + row_norm = {} + for orig_col, norm_col in col_map.items(): + row_norm[norm_col] = row.get(orig_col, None) + + # Subject is required + subject = row_norm.get("subject") + if not subject: + print(f"Row {idx}: missing 'subject', skipping") + continue + + if subject in seen_subjects: + print(f"Row {idx}: duplicate subject in CSV, skipping: {subject}") + continue + seen_subjects.add(subject) + + description_raw = row_norm.get("description") or "" + + # Base payload with required OpenProject bits + payload = { + "_links": { + "project": { + "href": f"/api/v3/projects/{project_id}" + }, + # type link may be overridden by type_map + "type": { + "href": "/api/v3/types/1" + }, + }, + "subject": subject, + } + + if description_raw: + payload["description"] = { + "format": "markdown", + "raw": description_raw, + } + + # Keys we do not send dynamically as plain strings because the API expects objects or links + reserved = { + "subject", + "description", + "status", + "priority", + "type", + "version", + "assignee", + "project", + } + + # Handle assignee using user_map (name -> user id) + assignee_name = row_norm.get("assignee") + if assignee_name: + lookup = _normalize_key(assignee_name) + user_id = user_map.get(lookup) + if user_id: + payload["_links"]["assignee"] = {"href": f"/api/v3/users/{user_id}"} + else: + print(f"Row {idx}: no user_map entry for assignee '{assignee_name}', leaving unassigned") + + # Handle status via status_map -> statuses/:id + status_name = row_norm.get("status") + if status_name: + lookup = _normalize_key(status_name) + status_id = status_map.get(lookup) + if status_id: + payload["_links"]["status"] = {"href": f"/api/v3/statuses/{status_id}"} + else: + print(f"Row {idx}: no status_map entry for status '{status_name}', leaving default") + + # Handle priority via priority_map -> priorities/:id + priority_name = row_norm.get("priority") + if priority_name: + lookup = _normalize_key(priority_name) + priority_id = priority_map.get(lookup) + if priority_id: + payload["_links"]["priority"] = {"href": f"/api/v3/priorities/{priority_id}"} + else: + print(f"Row {idx}: no priority_map entry for priority '{priority_name}', leaving default") + + # Handle type via type_map -> types/:id + type_name = row_norm.get("type") + if type_name: + lookup = _normalize_key(type_name) + type_id = type_map.get(lookup) + if type_id: + payload["_links"]["type"] = {"href": f"/api/v3/types/{type_id}"} + else: + print(f"Row {idx}: no type_map entry for type '{type_name}', keeping default /types/1") + + # Handle version via version_map -> versions/:id + version_name = row_norm.get("version") + if version_name: + lookup = _normalize_key(version_name) + version_id = version_map.get(lookup) + if version_id: + if "_links" not in payload: + payload["_links"] = {} + payload["_links"]["version"] = {"href": f"/api/v3/versions/{version_id}"} + else: + print(f"Row {idx}: no version_map entry for version '{version_name}', leaving unset") + + # Dynamically add everything else + for norm_key, value in row_norm.items(): + if norm_key in reserved: + continue + if value in (None, ""): + continue + + # Simple mapping for date-ish column names to OpenProject attribute names + if norm_key == "start_date": + payload["startDate"] = value + elif norm_key == "due_date": + payload["dueDate"] = value + elif norm_key == "closed_on": + payload["closedOn"] = value + else: + # Everything else goes in as is + payload[norm_key] = value + + clean_payload = scrub_nans(payload) + + # Print JSON for this task + print(f"\n=== Row {idx} subject: {subject} ===") + print(json.dumps(clean_payload, indent=2)) + + # Single POST per row + response = requests.post( + f"{openproject_url}/api/v3/work_packages", + headers=headers, + json=clean_payload, + ) + + if response.status_code == 201: + print(f"Created work package: {subject}") + else: + print( + f"Failed to create {subject} - Status {response.status_code}: {response.text}" + ) + +# Built by @SOCKS for OpenProject ingestion \ No newline at end of file