From d092f5dbeaf2c4723e2831601c57c88526212d6a Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:24:58 -0500 Subject: [PATCH 01/18] xseed.sh v2.0.0 --- xseed.sh | 193 +++++++++++++++++++++---------------------------------- 1 file changed, 72 insertions(+), 121 deletions(-) diff --git a/xseed.sh b/xseed.sh index fe4fe63..657962b 100755 --- a/xseed.sh +++ b/xseed.sh @@ -1,137 +1,88 @@ #!/bin/bash -# Configure variables to fit your setup -# Assumes download clients have the same name across all Starrs using this script. -# See https://www.cross-seed.org/docs/basics/faq-troubleshooting#searching-media-libraries-vs-torrent-data-data-based-searching -# For how to configure Cross Seed with Starr Data Matching -# Download Client Names in Starr -torrentclientname="Qbit" -usenetclientname="SABnzbd" -# Cross seed host (ip or container name) and port information -xseed_host="crossseed" -xseed_port="2468" -# Set a path to store this script's database of prior searched -# This is mounted to /config in the containers -log_file="/config/xseed_db.log" -# Optional; Set to "" to ignore -xseed_apikey="" +# Load environment variables +source ./.env -# Determine app and set variables -if [ -n "$radarr_eventtype" ]; then - app="radarr" - # shellcheck disable=SC2154 - clientID="$radarr_download_client" - # shellcheck disable=SC2154 - downloadID="$radarr_download_id" - # shellcheck disable=SC2154 - filePath="$radarr_moviefile_path" - # shellcheck disable=SC2154 - eventType="$radarr_eventtype" -elif [ -n "$sonarr_eventtype" ]; then - app="sonarr" - # shellcheck disable=SC2154 - clientID="$sonarr_download_client" - # shellcheck disable=SC2154 - downloadID="$sonarr_download_id" - # shellcheck disable=SC2154 - filePath="$sonarr_episodefile_path" - # shellcheck disable=SC2154 - folderPath="$sonarr_episodefile_sourcefolder" - # shellcheck disable=SC2154 - eventType="$sonarr_eventtype" -elif [ -n "$Lidarr_EventType" ]; then - app="lidarr" - # shellcheck disable=SC2154 - clientID="$Lidarr_Download_Client" - # shellcheck disable=SC2154 - filePath="$Lidarr_Artist_Path" - # shellcheck disable=SC2154 - downloadID="$Lidarr_Download_Id" - # shellcheck disable=SC2154 - eventType="$Lidarr_EventType" -elif [ -n "$Readarr_EventType" ]; then - app="readarr" - # shellcheck disable=SC2154 - clientID="$Readarr_Download_Client" - # shellcheck disable=SC2154 - filePath="$Readarr_Author_Path" - # shellcheck disable=SC2154 - downloadID="$Readarr_Download_Id" - # shellcheck disable=SC2154 - eventType="$Readarr_EventType" -else - echo "|WARN| Unknown Event Type. Failing." - exit 1 -fi -echo "$app detected with event type $eventType" - -# Function to send request to cross-seed +# Function to send a request to Cross Seed API cross_seed_request() { local endpoint="$1" local data="$2" - if [ -n "$xseed_apikey" ]; then - curl --silent --output /dev/null --write-out "%{http_code}" -X POST "http://$xseed_host:$xseed_port/api/$endpoint" -H "X-Api-Key: $xseed_apikey" --data-urlencode "$data" - else - curl --silent --output /dev/null --write-out "%{http_code}" -X POST "http://$xseed_host:$xseed_port/api/$endpoint" --data-urlencode "$data" + local headers=(-X POST "http://$xseed_host:$xseed_port/api/$endpoint" --data-urlencode "$data") + if [ -n "$xseed_apikey" ]; then + headers+=(-H "X-Api-Key: $xseed_apikey") fi + response=$(curl --silent --output /dev/null --write-out "%{http_code}" "${headers[@]}") + echo $response } -# Create the log file if it doesn't exist -[ ! -f "$log_file" ] && touch "$log_file" +# Detect application and set environment +detect_application() { + app="unknown" + if [ -n "$radarr_eventtype" ]; then + app="radarr" + clientID="$radarr_download_client" + downloadID="$radarr_download_id" + filePath="$radarr_moviefile_path" + eventType="$radarr_eventtype" + elif [ -n "$sonarr_eventtype" ]; then + app="sonarr" + clientID="$sonarr_download_client" + downloadID="$sonarr_download_id" + filePath="$sonarr_episodefile_path" + folderPath="$sonarr_episodefile_sourcefolder" + eventType="$sonarr_eventtype" + elif [ -n "$lidarr_eventtype" ]; then + app="lidarr" + clientID="$lidarr_download_client" + filePath="$lidarr_artist_path" + downloadID="$lidarr_download_id" + eventType="$lidarr_eventtype" + elif [ -n "$readarr_eventtype" ]; then + app="readarr" + clientID="$readarr_download_client" + filePath="$readarr_author_path" + downloadID="$readarr_download_id" + eventType="$readarr_eventtype" + fi + [ "$app" == "unknown" ] && { echo "Unknown application type detected. Exiting."; exit 1; } +} -# Check if the downloadID exists in the log file -unique_id="${downloadID}-${clientID}" -# if id is blank (i.e. manual import skip) -if [ -z "$unique_id" ]; then - echo "UniqueDownloadID $unique_id is blanking. Ignoring." - exit 0 -fi -# If unique_id is not blank, then proceed with checking the id -grep -qF "$unique_id" "$log_file" && echo "UniqueDownloadID $unique_id has already been processed. Skipping..." && exit 0 +# Validate the process +validate_process() { + [ ! -f "$log_file" ] && touch "$log_file" + unique_id="${downloadID}-${clientID}" -# Handle Unknown Event Type -[ -z "$eventType" ] && echo "|WARN| Unknown Event Type. Failing." && exit 1 + [ -z "$unique_id" ] && return + grep -qF "$unique_id" "$log_file" && { echo "Download ID $unique_id already processed. Exiting."; exit 0; } -# Handle Test Event -[ "$eventType" == "Test" ] && echo "Test passed for $app. DownloadClient: $clientID, DownloadId: $downloadID and FilePath: $filePath" && exit 0 + [ -z "$eventType" ] && { echo "No event type specified. Exiting."; exit 1; } + [ "$eventType" == "test" ] && { echo "Test event detected. Exiting."; exit 0; } + [ -z "$downloadID" ] || [ -z "$filePath" ] && { echo "Essential parameters missing. Exiting."; exit 1; } +} -# Ensure we have necessary details -[ -z "$downloadID" ] && echo "DownloadID is empty from $app. Skipping cross-seed search. DownloadClient: $clientID and DownloadId: $downloadID" && exit 0 -[ -z "$filePath" ] && echo "FilePath is empty from $app. Skipping cross-seed search. DownloadClient: $clientID and FilePath: $filePath" && exit 0 +# Main logic for handling operations +handle_operations() { + detect_application + validate_process -# Handle client based operations -case "$clientID" in - "$torrentclientname") - echo "Client $torrentclientname triggered id search for DownloadId $downloadID with FilePath $filePath and FolderPath $folderPath" - xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID") - - if [ "$xseed_resp" != "204" ]; then - echo "Client $torrentclientname triggered data search for DownloadId $downloadID using FilePath $filePath with FolderPath $folderPath" - sleep 15 - xseed_resp=$(cross_seed_request "webhook" "path=$filePath") - fi - ;; - "$usenetclientname") - if [[ "$folderPath" =~ S[0-9]{1,2}(?!\.E[0-9]{1,2}) ]]; then - echo "Client $usenetclientname skipped search for FolderPath $folderPath due to being a SeasonPack for Usenet" - exit 0 - else - echo "Client $usenetclientname triggered data search for DownloadId $downloadID using FilePath $filePath with FolderPath $folderPath" + case "$clientID" in + "$torrentclientname") + echo "Processing torrent client operations..." + xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID") + [ "$xseed_resp" != "204" ] && sleep 15 && xseed_resp=$(cross_seed_request "webhook" "path=$filePath") + ;; + "$usenetclientname") + [[ "$folderPath" =~ S[0-9]{1,2}(?!\.E[0-9]{1,2}) ]] && { echo "Skipping season pack search."; exit 0; } + echo "Processing Usenet client operations..." xseed_resp=$(cross_seed_request "webhook" "path=$filePath") - fi - ;; - *) - echo "|WARN| Client $clientID does not match configured Clients of $torrentclientname or $usenetclientname. Skipping..." - exit 0 - ;; -esac + ;; + *) + echo "Unrecognized client $clientID. Exiting." + exit 1 + ;; + esac + echo "Cross-seed API response: $xseed_resp" + [ "$xseed_resp" == "204" ] && { echo "$unique_id" >> "$log_file"; echo "Process completed successfully."; } || { echo "Process failed with API response: $xseed_resp"; exit 1; } +} -# Handle Cross Seed Response -if [ "$xseed_resp" == "204" ]; then - echo "Success. Cross-seed search triggered by $app for DownloadClient: $clientID, DownloadId: $downloadID and FilePath: $filePath with FolderPath $folderPath" - echo "$unique_id" >> "$log_file" - exit 0 -else - echo "|WARN| Cross-seed webhook failed - HTTP Code $xseed_resp from $app for DownloadClient: $clientID, DownloadId: $downloadID and FilePath: $filePath with FolderPath $folderPath" - exit 1 -fi +handle_operations From 1633d1f44c3177a38c3905efcc377525499071ca Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:26:31 -0500 Subject: [PATCH 02/18] add .env.file --- .env.sample | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .env.sample diff --git a/.env.sample b/.env.sample new file mode 100644 index 0000000..8a4c1db --- /dev/null +++ b/.env.sample @@ -0,0 +1,16 @@ +# Environment Configuration Sample +# Rename this file to .env and fill in the values accordingly. + +# Download Client Names +torrentclientname="" # Example: "Qbit" +usenetclientname="" # Example: "SABnzbd" + +# Cross Seed API configuration +xseed_host="" # Example: "crossseed-server" +xseed_port="" # Example: "2468" + +# API Key for Cross Seed, if applicable +xseed_apikey="" # Example: "your-api-key" + +# Path to store the script's database of prior searches +log_file="" # Example: "/config/xseed_db.log" From 95f9f6a86d74fc74655b372129c54f2f720cb247 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:30:53 -0500 Subject: [PATCH 03/18] Update zfsburn.sh to use env --- zfsburn.sh | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/zfsburn.sh b/zfsburn.sh index 1e4718e..a89de8e 100755 --- a/zfsburn.sh +++ b/zfsburn.sh @@ -1,12 +1,9 @@ #!/bin/bash -# Constants -VERBOSE=0 # Set this to 1 for trace-level logging, 0 for informational logging -MAX_FREQ=2 -MAX_HOURLY=2 -MAX_DAILY=1 -MAX_WEEKLY=0 -MAX_MONTHLY=0 +# Load .env file +set -o allexport +source ./.env +set +o allexport0 # Logging function based on verbosity level log() { From 21d96248e527eca4d690d0a4787fdcc33c744e78 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:32:46 -0500 Subject: [PATCH 04/18] Update .env.sample for zfs and refactor --- .env.sample | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/.env.sample b/.env.sample index 8a4c1db..178e4c6 100644 --- a/.env.sample +++ b/.env.sample @@ -1,16 +1,22 @@ -# Environment Configuration Sample +# .env +# Environment Configuration StarrScripts Sample 2024-04 # Rename this file to .env and fill in the values accordingly. - -# Download Client Names +# Xseed +## Download Client Names torrentclientname="" # Example: "Qbit" usenetclientname="" # Example: "SABnzbd" - -# Cross Seed API configuration +## Cross Seed API configuration xseed_host="" # Example: "crossseed-server" xseed_port="" # Example: "2468" - -# API Key for Cross Seed, if applicable +## API Key for Cross Seed, if applicable xseed_apikey="" # Example: "your-api-key" - -# Path to store the script's database of prior searches +## Path to store the script's database of prior searches log_file="" # Example: "/config/xseed_db.log" + +# ZFS Destory +VERBOSE=0 +MAX_FREQ=2 +MAX_HOURLY=2 +MAX_DAILY=1 +MAX_WEEKLY=0 +MAX_MONTHLY=0 From 27f325d4803b2230e2dffa5b463c86bf89383da3 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:35:03 -0500 Subject: [PATCH 05/18] Update .env.sample add jdupes --- .env.sample | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.env.sample b/.env.sample index 178e4c6..7a15b67 100644 --- a/.env.sample +++ b/.env.sample @@ -12,7 +12,6 @@ xseed_port="" # Example: "2468" xseed_apikey="" # Example: "your-api-key" ## Path to store the script's database of prior searches log_file="" # Example: "/config/xseed_db.log" - # ZFS Destory VERBOSE=0 MAX_FREQ=2 @@ -20,3 +19,8 @@ MAX_HOURLY=2 MAX_DAILY=1 MAX_WEEKLY=0 MAX_MONTHLY=0 +# Jdupes +JDUPES_OUTPUT_LOG=/.config/jdupes.log +JDUPES_SOURCE_DIR=/mnt/data/media/ +JDUPES_DESTINATION_DIR=/mnt/data/torrents/ +JDUPES_HASH_DB=/.config/jdupes_hashdb From a753cbdd31548443f01a5a6a02a898519304e52c Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:35:21 -0500 Subject: [PATCH 06/18] Update dupe.sh v2 --- dupe.sh | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/dupe.sh b/dupe.sh index 8aabb03..a652537 100755 --- a/dupe.sh +++ b/dupe.sh @@ -1,15 +1,22 @@ #!/bin/bash +# Load environment variables from .env file +set -a # automatically export all variables +source .env +set +a + +# Command and options jdupes_command="/usr/bin/jdupes" exclude_dirs="-X nostr:.RecycleBin -X nostr:.trash" include_ext="-X onlyext:mp4,mkv,avi" -output_log="/.config/jdupes.log" -source_dir="/mnt/data/media/" -destination_dir="/mnt/data/torrents/" -hash_db="/.config/jdupes_hashdb" +# Logging the start of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") -echo "[$timestamp] Duplicate search started for $source_dir and $destination_dir." >> "$output_log" -$jdupes_command $exclude_dirs $include_ext -L -r -Z -y "$hash_db" "$source_dir" "$destination_dir" >> "$output_log" +echo "[$timestamp] Duplicate search started for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >> "$JDUPES_OUTPUT_LOG" + +# Running jdupes with the loaded environment variables +$jdupes_command $exclude_dirs $include_ext -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >> "$JDUPES_OUTPUT_LOG" + +# Logging the completion of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") -echo "[$timestamp] Duplicate search completed for $source_dir and $destination_dir." >> "$output_log" +echo "[$timestamp] Duplicate search completed for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >> "$JDUPES_OUTPUT_LOG" From 2e0b231e00b17256ebc323ba897b5448270aaf1a Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:39:54 -0500 Subject: [PATCH 07/18] Update notifiarr-branch-builder.sh v3 --- notifiarr-branch-builder.sh | 130 +++++++++++++----------------------- 1 file changed, 46 insertions(+), 84 deletions(-) diff --git a/notifiarr-branch-builder.sh b/notifiarr-branch-builder.sh index 114e6a1..3d80da5 100755 --- a/notifiarr-branch-builder.sh +++ b/notifiarr-branch-builder.sh @@ -1,61 +1,43 @@ #!/bin/bash + +# Extend the PATH to include the go binary directory export PATH=$PATH:/usr/local/go/bin -# Function to display an error message and exit +# Function to display error messages and exit with status 1 handle_error() { - echo "Error: $1" + echo "Error: $1" >&2 exit 1 } -# Display usage information +# Function to display usage information display_help() { echo "Usage: $0 [options]" echo "Options:" - echo " -h Display this help message" - echo " --repo-url URL Set the repository URL (default: https://github.com/Notifiarr/notifiarr.git)" - echo " --repo-dir DIR Set the repository directory (default: /home/bakerboy448/notifiarr)" - echo " --bin-path PATH Set the binary path (default: /usr/bin/notifiarr)" - echo " --branch BRANCH Set the branch (default: master)" - echo " --reinstall-apt Reinstall Notifiarr using apt without prompting." + echo " -h, --help Display this help message" + echo " --repo-url URL Set the repository URL (default: https://github.com/Notifiarr/notifiarr.git)" + echo " --repo-dir DIR Set the repository directory (default: /opt/notifiarr-repo)" + echo " --bin-path PATH Set the binary path (default: /usr/bin/notifiarr)" + echo " --branch BRANCH Set the branch (default: master)" + echo " --reinstall-apt Reinstall Notifiarr using apt without prompting." exit 0 } -#TODO Fix this later -# Check if Golang is installed, install if not -#if ! command -v go &>/dev/null; then -# read -p "Golang is not installed. Do you want to install it? [Y/n] " go_install_choice -# if [[ "$go_install_choice" == [Yy]* ]]; then -# # Download Go tarball to /tmp directory -# if curl -o /tmp/go1.21.3.linux-amd64.tar.gz https://go.dev/dl/go1.21.3.linux-amd64.tar.gz; then -# # Remove any existing Go installation, extract Go, update PATH, and check Go version -# sudo rm -rf /usr/local/go && \ -# sudo tar -C /usr/local -xzf /tmp/go1.21.3.linux-amd64.tar.gz && \ -# echo "export PATH=\$PATH:/usr/local/go/bin" >> ~/.bashrc && \ -# source ~/.bashrc && \ -# go version && \ -# rm /tmp/go1.21.3.linux-amd64.tar.gz -# else -# echo "Failed to download Golang." -# exit 1 -# fi -# else -# echo "Golang is required for this script. Exiting." -# exit 1 -# fi -#fi - -# Check if Make is installed, install if not -if ! command -v make &>/dev/null; then - read -p "Make is not installed. Do you want to install it? [Y/n] " make_install_choice - if [[ "$make_install_choice" == [Yy]* ]]; then - sudo apt update && sudo apt install -y make || handle_error "Failed to install Make." - else - echo "Make is required for this script. Exiting." - exit 1 +# Function to check and prompt for installation of a required tool +ensure_tool_installed() { + local tool=$1 + local install_cmd=$2 + if ! command -v "$tool" &>/dev/null; then + read -p "$tool is not installed. Do you want to install it? [Y/n] " response + if [[ "$response" =~ ^[Yy] ]]; then + eval "$install_cmd" || handle_error "Failed to install $tool." + else + echo "$tool is required for this script. Exiting." + exit 1 + fi fi -fi +} -# Default parameter values +# Default parameters repo_url="https://github.com/Notifiarr/notifiarr.git" repo_dir="/opt/notifiarr-repo" bin_path="/usr/bin/notifiarr" @@ -65,7 +47,7 @@ apt_reinstall=false # Parse command line options while [[ $# -gt 0 ]]; do case "$1" in - -h | --help) + -h|--help) display_help ;; --repo-url) @@ -86,8 +68,7 @@ while [[ $# -gt 0 ]]; do ;; --reinstall-apt) apt_reinstall=true - ;; - + ;; *) echo "Invalid option: $1. Use -h for help." exit 1 @@ -96,81 +77,62 @@ while [[ $# -gt 0 ]]; do shift done -# Check if user wants to reinstall using apt -if [[ $apt_reinstall == true || ( $apt_reinstall == false && $(read -p "Do you want to reinstall Notifiarr using apt? [Y/n] " apt_choice; echo "$apt_choice") == [Yy]* ) ]]; then +# Ensure required tools are installed +ensure_tool_installed "make" "sudo apt update && sudo apt install -y make" +# Reinstallation condition handling +reinstall_notifiarr() { sudo apt update && sudo apt install --reinstall notifiarr || handle_error "Failed to reinstall Notifiarr using apt." - exit 0 -fi +} + +[[ $apt_reinstall == true ]] && reinstall_notifiarr -# Clone the repo if it doesn't exist, else fetch the latest +# Repository management if [[ ! -d "$repo_dir" ]]; then git clone "$repo_url" "$repo_dir" || handle_error "Failed to clone repository." else git -C "$repo_dir" fetch --all --prune || handle_error "Failed to fetch updates from remote." fi -# Get the current branch +# Branch handling and updating current_branch=$(git -C "$repo_dir" rev-parse --abbrev-ref HEAD) -echo "Current branch is: $current_branch" -read -p "Do you want to use the current branch? [Y/n] " choice - -if [[ "$choice" != [Yy]* ]]; then - # List all available branches - branches=$(git -C "$repo_dir" branch -r | sed 's/origin\///' | sed 's/* //') +read -p "Do you want to use the current branch ($current_branch)? [Y/n] " choice +if [[ "$choice" =~ ^[Nn] ]]; then + branches=$(git -C "$repo_dir" branch -r | sed 's/origin\///;s/* //') echo "Available branches:" echo "$branches" - while true; do read -p "Enter the branch name you want to use: " branch if [[ $branches =~ $branch ]]; then + git -C "$repo_dir" checkout "$branch" || handle_error "Failed to checkout branch $branch." break else echo "Invalid choice. Please select a valid branch." fi done - - # Checkout the selected branch - git -C "$repo_dir" checkout "$branch" || handle_error "Failed to checkout branch $branch." -else - branch=$current_branch fi -# Pull latest changes from the selected branch git -C "$repo_dir" pull || handle_error "Failed to pull latest changes." - -# Compile the code (assuming the repository requires a 'make' step) make --directory="$repo_dir" || handle_error "Failed to compile." +# Service management echo "Stopping notifiarr..." sudo systemctl stop notifiarr -# Move the binaries if [[ -f "$bin_path" ]]; then sudo mv "$bin_path" "$repo_dir".old && echo "Old binary moved to $repo_dir.old" fi sudo mv "$repo_dir/notifiarr" "$bin_path" && echo "New binary moved to $bin_path" -# Change owner of the compiled binary -sudo chown "root:root" "$bin_path" +sudo chown root:root "$bin_path" -# Start the service again +echo "Starting Notifiarr..." sudo systemctl start notifiarr -# Check if the service started successfully -if [[ $? -eq 0 ]]; then - echo "Notifiarr service started successfully" - - # Check the status of the service - sudo systemctl is-active --quiet notifiarr - if [[ $? -eq 0 ]]; then - echo "Notifiarr service is currently running" - else - echo "Notifiarr service is not running" - fi +if sudo systemctl is-active –quiet notifiarr; then +echo “Notifiarr service started and is currently running” else - echo "Failed to start Notifiarr service" +handle_error “Failed to start Notifiarr service” fi -# Exit the script exit 0 From adc93297fd2cf037a8d0d9a5b10960aac4608373 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:42:16 -0500 Subject: [PATCH 08/18] Update omegabrr_upgrade.sh v2 --- omegabrr_upgrade.sh | 47 ++++++++++++++++++++++++++++----------------- 1 file changed, 29 insertions(+), 18 deletions(-) diff --git a/omegabrr_upgrade.sh b/omegabrr_upgrade.sh index 0881239..87f9827 100755 --- a/omegabrr_upgrade.sh +++ b/omegabrr_upgrade.sh @@ -1,29 +1,40 @@ #!/bin/bash +# Define service name as a variable +service_name="omegabrr@bakerboy448" + +# Function to handle errors and exit +handle_error() { + echo "Error: $1" >&2 + exit 1 +} + # Get the old version of omegabrr old_version=$(omegabrr version) # Fetch the URL of the latest release for linux_x86_64 -dlurl=$(curl -s https://api.github.com/repos/autobrr/omegabrr/releases/latest | grep -E 'browser_download_url.*linux_x86_64' | cut -d\" -f4) +dlurl=$(curl -s https://api.github.com/repos/autobrr/omegabrr/releases/latest | \ + grep -E 'browser_download_url.*linux_x86_64' | cut -d\" -f4) -# Download the latest release -if [ -n "$dlurl" ]; then - wget "$dlurl" - # Extract the downloaded archive - sudo tar -xzf omegabrr*.tar.gz - # Move omegabrr to /usr/bin - sudo mv omegabrr /usr/bin/omegabrr - # Clean up downloaded files - rm omegabrr*.tar.gz - echo "Omegabrr Updated" -else - echo "Failed to fetch download URL. Exiting..." - exit 1 +# Validate the download URL +if [ -z "$dlurl" ]; then + handle_error "Failed to fetch download URL." fi +# Download the latest release +wget "$dlurl" -O omegabrr_latest.tar.gz || handle_error "Failed to download the latest version." + +# Extract the downloaded archive +sudo tar -xzf omegabrr_latest.tar.gz -C /usr/bin/ || handle_error "Failed to extract files." + +# Clean up downloaded files +rm omegabrr_latest.tar.gz + # Display old and new versions -echo "Old Version: $old_version" -echo "New Version: $(omegabrr version)" +new_version=$(omegabrr version) +echo "Omegabrr updated from $old_version to $new_version" + +# Restart the specified service +sudo systemctl restart $service_name || handle_error "Failed to restart the service $service_name." -# Restart the omegabrr service (assuming sysrestart command exists) -sysrestart omegabrr@bakerboy448 +echo "Update and restart successful!" From 11e1c5bf06c34b9bcb46e474152073f4ae75a174 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:46:14 -0500 Subject: [PATCH 09/18] Update qbm-qbit.sh v2 --- qbm-qbit.sh | 37 ++++++++++++++++++++++++++----------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/qbm-qbit.sh b/qbm-qbit.sh index 5ac6e98..fb3574d 100755 --- a/qbm-qbit.sh +++ b/qbm-qbit.sh @@ -1,21 +1,36 @@ #!/bin/bash -LOCK=/var/lock/qbm-qbit.lock -PATH_QBM=/opt/QbitManage + +# Load environment variables from .env file if it exists +if [ -f ".env" ]; then + source ".env" +fi + +# Use environment variables with descriptive default values +LOCK=${QBIT_MANAGE_LOCK_FILE_PATH:-/var/lock/qbm-qbit.lock} +PATH_QBM=${QBIT_MANAGE_PATH:-/opt/qbit-manage} +VENV_PATH=${QBIT_MANAGE_VENV_PATH:-/opt/qbit-manage/.venv} +CONFIG_PATH=${QBIT_MANAGE_CONFIG_PATH:-/opt/qbit-manage/config.yml} +QBIT_OPTIONS=${QBIT_MANAGE_OPTIONS:-"-cs -re -cu -tu -ru -sl -r"} + +# Function to remove the lock file remove_lock() { rm -f "$LOCK" } + +# Function to handle detection of another running instance another_instance() { - echo "There is another instance running, exiting" + echo "There is another instance running, exiting." exit 1 } + +# Acquire a lock to prevent concurrent execution, with a timeout and lease time lockfile -r 0 -l 3600 "$LOCK" || another_instance + +# Ensure the lock is removed when the script exits trap remove_lock EXIT + +# Pause the script to wait for any pending operations (demonstrative purpose) sleep 600 -# -cs = cross-seed -# -re = recheck -# -cu = cat-update -# -tu = tag-update -# -ru = remove unregistered -# Do not remove orphaned torrents as imports may be in-progress -# -sl = share limits -/opt/.venv/qbm-venv/bin/python "$PATH_QBM"/qbit_manage.py -cs -re -cu -tu -ru -sl -r --config-file /.config/QbitMngr/config.yml + +# Execute qbit_manage with configurable options +"$VENV_PATH"/bin/python "$PATH_QBM"/qbit_manage.py $QBIT_OPTIONS --config-file "$CONFIG_PATH" From 261574f9e8235b51a488de1f35ad287911de2349 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Thu, 25 Apr 2024 00:47:03 -0500 Subject: [PATCH 10/18] Update .env.sample qbm --- .env.sample | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.env.sample b/.env.sample index 7a15b67..9975656 100644 --- a/.env.sample +++ b/.env.sample @@ -24,3 +24,10 @@ JDUPES_OUTPUT_LOG=/.config/jdupes.log JDUPES_SOURCE_DIR=/mnt/data/media/ JDUPES_DESTINATION_DIR=/mnt/data/torrents/ JDUPES_HASH_DB=/.config/jdupes_hashdb +# Qbittorrent Manage Trigger +# .env file - Set environment variables for the qbit_manage script +QBIT_MANAGE_LOCK_FILE_PATH=/var/lock/qbm-qbit.lock +QBIT_MANAGE_PATH=/opt/qbit-manage +QBIT_MANAGE_VENV_PATH=/opt/qbit-manage/.venv +QBIT_MANAGE_CONFIG_PATH=/opt/qbit-manage/config.yml +QBIT_MANAGE_OPTIONS="-cs -re -cu -tu -ru -sl -r" From 04dde3de0b19ee72b72cfb2833d156d365691bfd Mon Sep 17 00:00:00 2001 From: zakary Date: Fri, 26 Apr 2024 08:45:53 -0500 Subject: [PATCH 11/18] fix(xseed/non-gpt): fix gpt errors and fallback to data in torrent (#10) --- xseed.sh | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/xseed.sh b/xseed.sh index 657962b..5456699 100755 --- a/xseed.sh +++ b/xseed.sh @@ -56,8 +56,18 @@ validate_process() { grep -qF "$unique_id" "$log_file" && { echo "Download ID $unique_id already processed. Exiting."; exit 0; } [ -z "$eventType" ] && { echo "No event type specified. Exiting."; exit 1; } - [ "$eventType" == "test" ] && { echo "Test event detected. Exiting."; exit 0; } - [ -z "$downloadID" ] || [ -z "$filePath" ] && { echo "Essential parameters missing. Exiting."; exit 1; } + [ "$eventType" == "Test" ] && { echo "Test event detected. Exiting."; exit 0; } + [ -z "$filePath" ] && [ -z "$downloadID" ] && { echo "Essential parameters missing. Exiting."; exit 1; } + + if [ -z "$downloadID" ] || [ -z "$filePath" ]; then + echo "Download ID is missing. Checking if file path works for data/path based cross-seeding." + if [ -z "$filePath" ]; then + echo "File path is missing. Exiting." + exit 1 + fi + fi + + [ -z "$filePath" ] && [ -z "$downloadID" ] && { echo "Essential parameters missing. Exiting."; exit 1; } } # Main logic for handling operations @@ -68,7 +78,7 @@ handle_operations() { case "$clientID" in "$torrentclientname") echo "Processing torrent client operations..." - xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID") + [ -n "$downloadID" ] && { xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID"); } [ "$xseed_resp" != "204" ] && sleep 15 && xseed_resp=$(cross_seed_request "webhook" "path=$filePath") ;; "$usenetclientname") From d93c77a04e5eb03bea22f44b8d290e42cbe31754 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Fri, 26 Apr 2024 09:22:24 -0500 Subject: [PATCH 12/18] misc fixes --- .env.sample | 12 +++---- .gitignore | 3 ++ dupe.sh | 26 +++++++++------ qbm-qbit.sh | 2 +- xseed.sh | 91 +++++++++++++++++++++++++++++++++++++---------------- zfsburn.sh | 23 +++++++++----- 6 files changed, 107 insertions(+), 50 deletions(-) diff --git a/.env.sample b/.env.sample index 9975656..d6ae8d1 100644 --- a/.env.sample +++ b/.env.sample @@ -3,15 +3,15 @@ # Rename this file to .env and fill in the values accordingly. # Xseed ## Download Client Names -torrentclientname="" # Example: "Qbit" -usenetclientname="" # Example: "SABnzbd" +torrentclientname="" # Example: "Qbit" +usenetclientname="" # Example: "SABnzbd" ## Cross Seed API configuration -xseed_host="" # Example: "crossseed-server" -xseed_port="" # Example: "2468" +xseed_host="" # Example: "crossseed" +xseed_port="" # Example: "2468" ## API Key for Cross Seed, if applicable -xseed_apikey="" # Example: "your-api-key" +xseed_apikey="" # Example: "your-api-key" ## Path to store the script's database of prior searches -log_file="" # Example: "/config/xseed_db.log" +log_file="" # Example: "/config/xseed_db.log" # ZFS Destory VERBOSE=0 MAX_FREQ=2 diff --git a/.gitignore b/.gitignore index dfcfd56..5f74ef5 100644 --- a/.gitignore +++ b/.gitignore @@ -348,3 +348,6 @@ MigrationBackup/ # Ionide (cross platform F# VS Code tools) working folder .ionide/ + +# Ignore .env +.env diff --git a/dupe.sh b/dupe.sh index a652537..3f7d803 100755 --- a/dupe.sh +++ b/dupe.sh @@ -1,22 +1,30 @@ #!/bin/bash # Load environment variables from .env file -set -a # automatically export all variables -source .env +set -a # automatically export all variables +# Load environment variables from .env file if it exists +if [ -f ".env" ]; then + # shellcheck source=.env + source ".env" +fi +# Use environment variables with descriptive default values set +a -# Command and options -jdupes_command="/usr/bin/jdupes" -exclude_dirs="-X nostr:.RecycleBin -X nostr:.trash" -include_ext="-X onlyext:mp4,mkv,avi" +JDUPES_OUTPUT_LOG=${JDUPES_OUTPUT_LOG:-/var/log/jdupes.log} +JDUPES_SOURCE_DIR=${JDUPES_SOURCE_DIR:-/mnt/data/media/} +JDUPES_DESTINATION_DIR=${JDUPES_DESTINATION_DIR:-/mnt/data/torrents/} +JDUPES_HASH_DB=${JDUPES_HASH_DB:-/var/lib/jdupes_hashdb} +JDUPES_COMMAND=${JDUPES_COMMAND:-/usr/bin/jdupes} +EXCLUDE_DIRS=${EXCLUDE_DIRS:-"-X nostr:.RecycleBin -X nostr:.trash"} +INCLUDE_EXT=${INCLUDE_EXT:-"-X onlyext:mp4,mkv,avi"} # Logging the start of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") -echo "[$timestamp] Duplicate search started for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >> "$JDUPES_OUTPUT_LOG" +echo "[$timestamp] Duplicate search started for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >>"$JDUPES_OUTPUT_LOG" # Running jdupes with the loaded environment variables -$jdupes_command $exclude_dirs $include_ext -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >> "$JDUPES_OUTPUT_LOG" +$JDUPES_COMMAND "$EXCLUDE_DIRS" "$INCLUDE_EXT" -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >>"$JDUPES_OUTPUT_LOG" # Logging the completion of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") -echo "[$timestamp] Duplicate search completed for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >> "$JDUPES_OUTPUT_LOG" +echo "[$timestamp] Duplicate search completed for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >>"$JDUPES_OUTPUT_LOG" diff --git a/qbm-qbit.sh b/qbm-qbit.sh index fb3574d..4a233c3 100755 --- a/qbm-qbit.sh +++ b/qbm-qbit.sh @@ -33,4 +33,4 @@ trap remove_lock EXIT sleep 600 # Execute qbit_manage with configurable options -"$VENV_PATH"/bin/python "$PATH_QBM"/qbit_manage.py $QBIT_OPTIONS --config-file "$CONFIG_PATH" +"$VENV_PATH"/bin/python "$PATH_QBM"/qbit_manage.py "$QBIT_OPTIONS" --config-file "$CONFIG_PATH" diff --git a/xseed.sh b/xseed.sh index 5456699..848ced8 100755 --- a/xseed.sh +++ b/xseed.sh @@ -1,18 +1,28 @@ #!/bin/bash -# Load environment variables -source ./.env +# Load environment variables from .env file if it exists +if [ -f ".env" ]; then + # shellcheck source=.env + source ".env" +fi + +# Use environment variables with descriptive default values +TORRENT_CLIENT_NAME=${TORRENT_CLIENT_NAME:-Qbit} +USENET_CLIENT_NAME=${USENET_CLIENT_NAME:-SABnzbd} +XSEED_HOST=${XSEED_HOST:-crossseed} +XSEED_PORT=${XSEED_PORT:-8080} +LOG_FILE=${LOG_FILE:-/var/log/xseed.log} # Function to send a request to Cross Seed API cross_seed_request() { local endpoint="$1" local data="$2" - local headers=(-X POST "http://$xseed_host:$xseed_port/api/$endpoint" --data-urlencode "$data") + local headers=(-X POST "http://$XSEED_HOST:$XSEED_PORT/api/$endpoint" --data-urlencode "$data") if [ -n "$xseed_apikey" ]; then headers+=(-H "X-Api-Key: $xseed_apikey") fi response=$(curl --silent --output /dev/null --write-out "%{http_code}" "${headers[@]}") - echo $response + echo "$response" } # Detect application and set environment @@ -44,30 +54,48 @@ detect_application() { downloadID="$readarr_download_id" eventType="$readarr_eventtype" fi - [ "$app" == "unknown" ] && { echo "Unknown application type detected. Exiting."; exit 1; } + [ "$app" == "unknown" ] && { + echo "Unknown application type detected. Exiting." + exit 1 + } } # Validate the process validate_process() { - [ ! -f "$log_file" ] && touch "$log_file" + [ ! -f "$LOG_FILE" ] && touch "$LOG_FILE" unique_id="${downloadID}-${clientID}" [ -z "$unique_id" ] && return - grep -qF "$unique_id" "$log_file" && { echo "Download ID $unique_id already processed. Exiting."; exit 0; } + grep -qF "$unique_id" "$LOG_FILE" && { + echo "Download ID $unique_id already processed. Exiting." + exit 0 + } - [ -z "$eventType" ] && { echo "No event type specified. Exiting."; exit 1; } - [ "$eventType" == "Test" ] && { echo "Test event detected. Exiting."; exit 0; } - [ -z "$filePath" ] && [ -z "$downloadID" ] && { echo "Essential parameters missing. Exiting."; exit 1; } + [ -z "$eventType" ] && { + echo "No event type specified. Exiting." + exit 1 + } + [ "$eventType" == "Test" ] && { + echo "Test event detected. Exiting." + exit 0 + } + [ -z "$filePath" ] && [ -z "$downloadID" ] && { + echo "Essential parameters missing. Exiting." + exit 1 + } if [ -z "$downloadID" ] || [ -z "$filePath" ]; then echo "Download ID is missing. Checking if file path works for data/path based cross-seeding." if [ -z "$filePath" ]; then echo "File path is missing. Exiting." exit 1 - fi + fi fi - [ -z "$filePath" ] && [ -z "$downloadID" ] && { echo "Essential parameters missing. Exiting."; exit 1; } + [ -z "$filePath" ] && [ -z "$downloadID" ] && { + echo "Essential parameters missing. Exiting." + exit 1 + } } # Main logic for handling operations @@ -76,23 +104,32 @@ handle_operations() { validate_process case "$clientID" in - "$torrentclientname") - echo "Processing torrent client operations..." - [ -n "$downloadID" ] && { xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID"); } - [ "$xseed_resp" != "204" ] && sleep 15 && xseed_resp=$(cross_seed_request "webhook" "path=$filePath") - ;; - "$usenetclientname") - [[ "$folderPath" =~ S[0-9]{1,2}(?!\.E[0-9]{1,2}) ]] && { echo "Skipping season pack search."; exit 0; } - echo "Processing Usenet client operations..." - xseed_resp=$(cross_seed_request "webhook" "path=$filePath") - ;; - *) - echo "Unrecognized client $clientID. Exiting." - exit 1 - ;; + "$TORRENT_CLIENT_NAME") + echo "Processing torrent client operations..." + [ -n "$downloadID" ] && { xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID"); } + [ "$xseed_resp" != "204" ] && sleep 15 && xseed_resp=$(cross_seed_request "webhook" "path=$filePath") + ;; + "$USENET_CLIENT_NAME") + [[ "$folderPath" =~ S[0-9]{1,2}(?!\.E[0-9]{1,2}) ]] && { + echo "Skipping season pack search." + exit 0 + } + echo "Processing Usenet client operations..." + xseed_resp=$(cross_seed_request "webhook" "path=$filePath") + ;; + *) + echo "Unrecognized client $clientID. Exiting." + exit 1 + ;; esac echo "Cross-seed API response: $xseed_resp" - [ "$xseed_resp" == "204" ] && { echo "$unique_id" >> "$log_file"; echo "Process completed successfully."; } || { echo "Process failed with API response: $xseed_resp"; exit 1; } + if [ "$xseed_resp" == "204" ]; then + echo "$unique_id" >>"$LOG_FILE" + echo "Process completed successfully." + else + echo "Process failed with API response: $xseed_resp" + exit 1 + fi } handle_operations diff --git a/zfsburn.sh b/zfsburn.sh index a89de8e..807e3c4 100755 --- a/zfsburn.sh +++ b/zfsburn.sh @@ -2,9 +2,19 @@ # Load .env file set -o allexport -source ./.env +if [ -f ".env" ]; then + # shellcheck source=.env + source ".env" +fi set +o allexport0 +VERBOSE=${VERBOSE:-1} +MAX_FREQ=${MAX_FREQ:-4} +MAX_HOURLY=${MAX_HOURLY:-2} +MAX_DAILY=${MAX_DAILY:-7} +MAX_WEEKLY=${MAX_WEEKLY:-4} +MAX_MONTHLY=${MAX_MONTHLY:-3} + # Logging function based on verbosity level log() { local level="$1" @@ -19,16 +29,15 @@ bytes_to_human_readable() { local bytes=$1 local units=('B' 'KB' 'MB' 'GB' 'TB' 'PB' 'EB' 'ZB' 'YB') local unit=0 - - while (( bytes > 1024 )); do - (( bytes /= 1024 )) - (( unit++ )) + + while ((bytes > 1024)); do + ((bytes /= 1024)) + ((unit++)) done - + echo "${bytes} ${units[unit]}" } - # Function to retrieve snapshot counts for a specific snapshot type get_snapshot_count() { local snapshot_type="$1" From 0780d26179888a294eeffda620c3a521dc992d29 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Fri, 26 Apr 2024 14:28:06 -0500 Subject: [PATCH 13/18] .ENV SAMPLE FIXES --- .env.sample | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/.env.sample b/.env.sample index d6ae8d1..709cf84 100644 --- a/.env.sample +++ b/.env.sample @@ -3,15 +3,15 @@ # Rename this file to .env and fill in the values accordingly. # Xseed ## Download Client Names -torrentclientname="" # Example: "Qbit" -usenetclientname="" # Example: "SABnzbd" +TORRENT_CLIENT_NAME="" # Example: "Qbit" +USENET_CLIENT_NAME="" # Example: "SABnzbd" ## Cross Seed API configuration -xseed_host="" # Example: "crossseed" -xseed_port="" # Example: "2468" +XSEED_HOST="" # Example: "crossseed" +XSEED_PORT="" # Example: "2468" ## API Key for Cross Seed, if applicable -xseed_apikey="" # Example: "your-api-key" +XSEED_APIKEY="" # Example: "your-api-key" ## Path to store the script's database of prior searches -log_file="" # Example: "/config/xseed_db.log" +LOG_FILE="" # Example: "/config/xseed_db.log" # ZFS Destory VERBOSE=0 MAX_FREQ=2 @@ -20,14 +20,13 @@ MAX_DAILY=1 MAX_WEEKLY=0 MAX_MONTHLY=0 # Jdupes -JDUPES_OUTPUT_LOG=/.config/jdupes.log -JDUPES_SOURCE_DIR=/mnt/data/media/ -JDUPES_DESTINATION_DIR=/mnt/data/torrents/ -JDUPES_HASH_DB=/.config/jdupes_hashdb -# Qbittorrent Manage Trigger -# .env file - Set environment variables for the qbit_manage script -QBIT_MANAGE_LOCK_FILE_PATH=/var/lock/qbm-qbit.lock -QBIT_MANAGE_PATH=/opt/qbit-manage -QBIT_MANAGE_VENV_PATH=/opt/qbit-manage/.venv -QBIT_MANAGE_CONFIG_PATH=/opt/qbit-manage/config.yml -QBIT_MANAGE_OPTIONS="-cs -re -cu -tu -ru -sl -r" +JDUPES_OUTPUT_LOG="" # Example: "/.config/jdupes.log" +JDUPES_SOURCE_DIR="" # Example: "/mnt/data/media/" +JDUPES_DESTINATION_DIR="" # Example: "/mnt/data/torrents/" +JDUPES_HASH_DB="" # Example: "/.config/jdupes_hashdb" +# Qbittorrent Manage +QBIT_MANAGE_LOCK_FILE_PATH="" # Example: "/var/lock/qbm-qbit.lock" +QBIT_MANAGE_PATH="" # Example: "/opt/qbit-manage" +QBIT_MANAGE_VENV_PATH="" # Example: "/opt/qbit-manage/.venv" +QBIT_MANAGE_CONFIG_PATH="" # Example: "/opt/qbit-manage/config.yml" +QBIT_MANAGE_OPTIONS="" # Example: "-cs -re -cu -tu -ru -sl -r" From d67122bf65c20d477b68559fde2a93698f668cd9 Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Fri, 26 Apr 2024 14:38:27 -0500 Subject: [PATCH 14/18] fix dupe.sh --- dupe.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dupe.sh b/dupe.sh index 3f7d803..0848c20 100755 --- a/dupe.sh +++ b/dupe.sh @@ -21,8 +21,9 @@ INCLUDE_EXT=${INCLUDE_EXT:-"-X onlyext:mp4,mkv,avi"} # Logging the start of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") echo "[$timestamp] Duplicate search started for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >>"$JDUPES_OUTPUT_LOG" - +echo "command is" # Running jdupes with the loaded environment variables +echo $JDUPES_COMMAND "$EXCLUDE_DIRS" "$INCLUDE_EXT" -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" $JDUPES_COMMAND "$EXCLUDE_DIRS" "$INCLUDE_EXT" -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >>"$JDUPES_OUTPUT_LOG" # Logging the completion of the operation From d08f0f4e5b65c12d3e83e63a5fe7b21d1f8334aa Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Fri, 26 Apr 2024 14:45:32 -0500 Subject: [PATCH 15/18] fix dupe.sh --- dupe.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/dupe.sh b/dupe.sh index 0848c20..c74d8f4 100755 --- a/dupe.sh +++ b/dupe.sh @@ -1,14 +1,11 @@ #!/bin/bash # Load environment variables from .env file -set -a # automatically export all variables # Load environment variables from .env file if it exists if [ -f ".env" ]; then # shellcheck source=.env source ".env" fi -# Use environment variables with descriptive default values -set +a JDUPES_OUTPUT_LOG=${JDUPES_OUTPUT_LOG:-/var/log/jdupes.log} JDUPES_SOURCE_DIR=${JDUPES_SOURCE_DIR:-/mnt/data/media/} From 3958d11e6d881a6b4409cf02e2bba43194c5010d Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Fri, 26 Apr 2024 14:56:46 -0500 Subject: [PATCH 16/18] dupe fixes --- dupe.sh | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/dupe.sh b/dupe.sh index c74d8f4..6213409 100755 --- a/dupe.sh +++ b/dupe.sh @@ -6,22 +6,23 @@ if [ -f ".env" ]; then # shellcheck source=.env source ".env" fi - -JDUPES_OUTPUT_LOG=${JDUPES_OUTPUT_LOG:-/var/log/jdupes.log} -JDUPES_SOURCE_DIR=${JDUPES_SOURCE_DIR:-/mnt/data/media/} -JDUPES_DESTINATION_DIR=${JDUPES_DESTINATION_DIR:-/mnt/data/torrents/} -JDUPES_HASH_DB=${JDUPES_HASH_DB:-/var/lib/jdupes_hashdb} -JDUPES_COMMAND=${JDUPES_COMMAND:-/usr/bin/jdupes} -EXCLUDE_DIRS=${EXCLUDE_DIRS:-"-X nostr:.RecycleBin -X nostr:.trash"} -INCLUDE_EXT=${INCLUDE_EXT:-"-X onlyext:mp4,mkv,avi"} +# Variables +JDUPES_OUTPUT_LOG=${JDUPES_OUTPUT_LOG:-"/var/log/jdupes.log"} +JDUPES_SOURCE_DIR=${JDUPES_SOURCE_DIR:-"/mnt/data/media/"} +JDUPES_DESTINATION_DIR=${JDUPES_DESTINATION_DIR:-"/mnt/data/torrents/"} +JDUPES_HASH_DB=${JDUPES_HASH_DB:-"/var/lib/jdupes_hashdb"} +## Secret Variables +JDUPES_COMMAND=${JDUPES_COMMAND:-"/usr/bin/jdupes"} +JDUPES_EXCLUDE_DIRS=${JDUPES_EXCLUDE_DIRS:-"-X nostr:.RecycleBin -X nostr:.trash"} +JDUPES_INCLUDE_EXT=${JDUPES_INCLUDE_EXT:-"mp4,mkv,avi"} # Logging the start of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") echo "[$timestamp] Duplicate search started for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >>"$JDUPES_OUTPUT_LOG" echo "command is" # Running jdupes with the loaded environment variables -echo $JDUPES_COMMAND "$EXCLUDE_DIRS" "$INCLUDE_EXT" -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" -$JDUPES_COMMAND "$EXCLUDE_DIRS" "$INCLUDE_EXT" -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >>"$JDUPES_OUTPUT_LOG" +echo $JDUPES_COMMAND "$JDUPES_EXCLUDE_DIRS" "$JDUPES_INCLUDE_EXT" -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" +$JDUPES_COMMAND $JDUPES_EXCLUDE_DIRS -X onlyext:$JDUPES_INCLUDE_EXT -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >>"$JDUPES_OUTPUT_LOG" # Logging the completion of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") From cb277bb31a6843c855b79742d1de52b8ae61609e Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Fri, 26 Apr 2024 15:03:34 -0500 Subject: [PATCH 17/18] Update qbm-qbit.sh --- qbm-qbit.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/qbm-qbit.sh b/qbm-qbit.sh index 4a233c3..4436ad8 100755 --- a/qbm-qbit.sh +++ b/qbm-qbit.sh @@ -1,5 +1,11 @@ #!/bin/bash +# Check if lockfile command exists +if ! command -v lockfile &>/dev/null; then + echo "Error: lockfile command not found. Please install the procmail package." >&2 + exit 1 +fi + # Load environment variables from .env file if it exists if [ -f ".env" ]; then source ".env" From 43aba9f247a14d8c231bb38ed709e046d5c23b4f Mon Sep 17 00:00:00 2001 From: bakerboy448 <55419169+bakerboy448@users.noreply.github.com> Date: Fri, 26 Apr 2024 15:09:31 -0500 Subject: [PATCH 18/18] better qbm-qbit logging --- qbm-qbit.sh | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/qbm-qbit.sh b/qbm-qbit.sh index 4436ad8..951ffb6 100755 --- a/qbm-qbit.sh +++ b/qbm-qbit.sh @@ -12,11 +12,13 @@ if [ -f ".env" ]; then fi # Use environment variables with descriptive default values -LOCK=${QBIT_MANAGE_LOCK_FILE_PATH:-/var/lock/qbm-qbit.lock} -PATH_QBM=${QBIT_MANAGE_PATH:-/opt/qbit-manage} -VENV_PATH=${QBIT_MANAGE_VENV_PATH:-/opt/qbit-manage/.venv} -CONFIG_PATH=${QBIT_MANAGE_CONFIG_PATH:-/opt/qbit-manage/config.yml} -QBIT_OPTIONS=${QBIT_MANAGE_OPTIONS:-"-cs -re -cu -tu -ru -sl -r"} +QBQBM_LOCK=${QBIT_MANAGE_LOCK_FILE_PATH:-/var/lock/qbm-qbit.lock} +QBQBM_PATH_QBM=${QBIT_MANAGE_PATH:-/opt/qbit-manage} +QBQBM_VENV_PATH=${QBIT_MANAGE_VENV_PATH:-/opt/qbit-manage/.venv} +QBQBM_CONFIG_PATH=${QBIT_MANAGE_CONFIG_PATH:-/opt/qbit-manage/config.yml} +QBQBM_QBIT_OPTIONS=${QBIT_MANAGE_OPTIONS:-"-cs -re -cu -tu -ru -sl -r"} +QBQBM_SLEEP_TIME=600 +QBQBM_LOCK_TIME=3600 # Function to remove the lock file remove_lock() { @@ -29,14 +31,18 @@ another_instance() { exit 1 } +echo "Acquiring Lock" # Acquire a lock to prevent concurrent execution, with a timeout and lease time -lockfile -r 0 -l 3600 "$LOCK" || another_instance +lockfile -r 0 -l "$QBQBM_SLEEP_TIME" "$QBQBM_LOCK" || another_instance # Ensure the lock is removed when the script exits trap remove_lock EXIT -# Pause the script to wait for any pending operations (demonstrative purpose) -sleep 600 +echo "sleeping for $QBQBM_SLEEP_TIME" +# Pause the script to wait for any pending operations (i.e. Starr Imports) + +sleep $QBQBM_SLEEP_TIME # Execute qbit_manage with configurable options +echo "Executing Command" "$VENV_PATH"/bin/python "$PATH_QBM"/qbit_manage.py "$QBIT_OPTIONS" --config-file "$CONFIG_PATH"