From b89f78bbdaf0eb9b0b968529717a03a4788a107f Mon Sep 17 00:00:00 2001 From: Rohit Thakur Date: Sun, 1 Feb 2026 06:27:43 +0100 Subject: [PATCH 1/2] E1e demo 001 br (#52) * Updates made by ansible with play: Demo - Network Backup and Restore Workflow * Updates made by ansible with play: Demo - Network Backup and Restore Workflow * Updates made by ansible with play: Demo - Network Backup and Restore Workflow * Updates made by ansible with play: Demo - Network Backup and Restore Workflow * Updates made by ansible with play: Demo - Network Backup and Restore Workflow * update tasks Signed-off-by: rohitthakur2590 * update tasks Signed-off-by: rohitthakur2590 --------- Signed-off-by: rohitthakur2590 --- logs.log | 12 +++ roles/backup/README.md | 31 +++++- roles/backup/meta/argument_specs.yml | 7 ++ roles/backup/tasks/backup.yaml | 39 +++++++- roles/backup/tasks/cli_backup.yaml | 1 + roles/backup/tasks/differential_scm.yaml | 99 +++++++++++++++++++ .../tasks/differential_scm_read_previous.yaml | 55 +++++++++++ roles/backup/tasks/main.yaml | 2 + roles/backup/tasks/network.yaml | 1 + roles/backup/tasks/publish.yaml | 10 +- roles/backup/tasks/retrieve.yaml | 45 ++++++++- roles/backup/tasks/validation.yaml | 2 + roles/restore/tasks/cli_restore.yaml | 15 ++- roles/restore/tasks/cli_restore_config.yaml | 2 + roles/restore/tasks/common/validation.yaml | 2 + roles/restore/tasks/main.yaml | 2 + roles/restore/tasks/network.yaml | 3 + roles/restore/tasks/prepare/ios.yaml | 4 + roles/restore/tasks/restore.yaml | 9 +- roles/restore/tasks/retrieve.yaml | 6 +- .../tmp_backup/network_automation_tools | 1 + 21 files changed, 337 insertions(+), 11 deletions(-) create mode 100644 logs.log create mode 100644 roles/backup/tasks/differential_scm.yaml create mode 100644 roles/backup/tasks/differential_scm_read_previous.yaml create mode 160000 roles/restore/tmp_backup/network_automation_tools diff --git a/logs.log b/logs.log new file mode 100644 index 0000000..a17a801 --- /dev/null +++ b/logs.log @@ -0,0 +1,12 @@ +2026-01-25 22:12:41,674 p=57299 u=rohit n=ansible INFO| ansible-playbook [core 2.19.4] + config file = None + configured module search path = ['/Users/rohit/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] + ansible python module location = /Users/rohit/venvs/ansible312/lib/python3.12/site-packages/ansible + ansible collection location = /Users/rohit/.ansible/collections:/usr/share/ansible/collections + executable location = /Users/rohit/venvs/ansible312/bin/ansible-playbook + python version = 3.12.12 (main, Oct 9 2025, 11:07:00) [Clang 17.0.0 (clang-1700.0.13.3)] (/Users/rohit/venvs/ansible312/bin/python3.12) + jinja version = 3.1.6 + pyyaml version = 6.0.3 (with libyaml v0.2.5) +2026-01-25 22:12:41,674 p=57299 u=rohit n=ansible INFO| No config file found; using defaults +2026-01-25 22:12:41,674 p=57299 u=rohit n=ansible ERROR| [ERROR]: the playbook: demo_backup_restore.yml could not be found + diff --git a/roles/backup/README.md b/roles/backup/README.md index 24fb5b7..e8a3355 100644 --- a/roles/backup/README.md +++ b/roles/backup/README.md @@ -17,6 +17,7 @@ This role supports full and differential backups, storing them locally or in a r - Backs up the configuration **only if** there are changes compared to the last saved version. - Works with both local and Git-based data stores. - Helps reduce storage and SCM noise by saving only when diff exists. +- **Ignores timestamps and metadata** - only detects actual configuration changes. --- @@ -34,8 +35,8 @@ This role supports full and differential backups, storing them locally or in a r | `data_store.scm.origin.path` | Directory path inside the repo to save backup | `str` | No | N/A | | `data_store.scm.origin.ssh_key_file` | Path to the SSH private key file for Git authentication | `str` | Yes (if using SCM SSH) | N/A | | `data_store.scm.origin.ssh_key_content` | The content of the SSH private key | `str` | Yes (if using SCM SSH) | N/A | +| `type` | Type of backup to perform. Options: `"full"`, `"incremental"`, or `"diff"` | `str` | No | `"full"` | -> Note: Either `data_store.local` or `data_store.scm` must be provided. --- @@ -136,6 +137,32 @@ This role supports full and differential backups, storing them locally or in a r path: "backups/{{ ansible_date_time.date }}/{{ inventory_hostname }}" ``` +### Create Differential Backup (Only Publish if Config Changed) + +```yaml +- name: Create Network Backup and Push to GitHub + hosts: network + gather_facts: false + tasks: + - name: Create Network Backup + ansible.builtin.include_role: + name: network.backup.backup + vars: + type: "diff" # Enable differential backup + data_store: + scm: + origin: + user: + name: "your_name" + email: "your_email@example.com" + url: "git@github.com:youruser/your-backup-repo.git" + ssh_key_file: "/path/to/ssh/key" + filename: "{{ ansible_date_time.date }}_{{ inventory_hostname }}.txt" + path: "backups/{{ ansible_date_time.date }}/{{ inventory_hostname }}" +``` + +> **Note**: With `type: "diff"`, the backup will only be published to SCM if actual configuration changes are detected. Timestamps and metadata differences are ignored. See [Differential Backup Documentation](Differential_Backup_Documentation.md) for more details. + ## License GNU General Public License v3.0 or later. @@ -144,4 +171,4 @@ See [LICENSE](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text. ## Author Information -- Ansible Network Content Team \ No newline at end of file +- Ansible Network Content Team diff --git a/roles/backup/meta/argument_specs.yml b/roles/backup/meta/argument_specs.yml index 04cee10..0b5d0d3 100644 --- a/roles/backup/meta/argument_specs.yml +++ b/roles/backup/meta/argument_specs.yml @@ -45,6 +45,13 @@ argument_specs: type: dict required: false options: + parent_directory: + type: str + required: false + description: + - Parent directory where the Git repository will be cloned (e.g., /tmp or role_path). + - If not specified, defaults to role_path. + - Best practice: Use temp directory (e.g., /tmp) for isolated operations. origin: type: dict required: true diff --git a/roles/backup/tasks/backup.yaml b/roles/backup/tasks/backup.yaml index 57cc04a..875fb3d 100644 --- a/roles/backup/tasks/backup.yaml +++ b/roles/backup/tasks/backup.yaml @@ -2,36 +2,69 @@ - name: Build Local Backup Dir Path ansible.builtin.include_tasks: path.yaml when: data_store.scm.origin is not defined + tags: always - name: Include retrieve tasks ansible.builtin.include_tasks: retrieve.yaml when: data_store['scm']['origin'] is defined run_once: true + tags: always -- name: Get scm url +- name: Get scm url - Use actual repo path from retrieve.yaml ansible.builtin.set_fact: - network_backup_path_root: "{{ role_path }}/{{ data_store.scm.origin.url.split('/')[-1] | regex_replace('\\.git$', '') }}" + network_backup_path_root: "{{ network_backup_path | default(role_path ~ '/' ~ (data_store.scm.origin.url.split('/')[-1] | regex_replace('\\.git$', ''))) }}" when: data_store['scm']['origin'] is defined + tags: always - name: Get file name ansible.builtin.set_fact: network_backup_path: "{{ network_backup_path_root }}/{{ data_store.scm.origin.path | default(role_path, true) }}" when: data_store['scm']['origin'] is defined + tags: always - name: Get timestamp ansible.builtin.set_fact: timestamp: "{{ lookup('pipe', 'date +%Y-%m-%d_%H-%M-%S') }}" + tags: always - name: Set default filename ansible.builtin.set_fact: network_backup_filename: >- {{ data_store.scm.origin.filename | default(inventory_hostname ~ '_' ~ timestamp ~ '.txt', true) }} when: network_backup_filename is undefined + tags: always + +- name: Read previous backup for differential comparison (before current backup is created) + ansible.builtin.include_tasks: differential_scm_read_previous.yaml + when: + - data_store['scm']['origin'] is defined + - type | default('full') == "diff" + tags: always - name: Include tasks ansible.builtin.include_tasks: network.yaml + tags: always + +- name: Check for differential backup (SCM only) + ansible.builtin.include_tasks: differential_scm.yaml + when: data_store['scm']['origin'] is defined + tags: always - name: Include build tasks ansible.builtin.include_tasks: publish.yaml - when: data_store.scm.origin is defined + when: + - data_store.scm.origin is defined + - backup_has_changes | default(true) + run_once: true + tags: always + +- name: Cleanup cloned repository when publish is skipped + ansible.builtin.file: + path: "{{ network_backup_path_root }}" + state: absent + when: + - data_store.scm.origin is defined + - not (backup_has_changes | default(true)) run_once: true + delegate_to: localhost + tags: always diff --git a/roles/backup/tasks/cli_backup.yaml b/roles/backup/tasks/cli_backup.yaml index 6f45e47..07651f3 100644 --- a/roles/backup/tasks/cli_backup.yaml +++ b/roles/backup/tasks/cli_backup.yaml @@ -4,3 +4,4 @@ dir_path: "{{ network_backup_path }}" filename: "{{ network_backup_filename | default(inventory_hostname) }}" register: network_backup + tags: always diff --git a/roles/backup/tasks/differential_scm.yaml b/roles/backup/tasks/differential_scm.yaml new file mode 100644 index 0000000..16f7afd --- /dev/null +++ b/roles/backup/tasks/differential_scm.yaml @@ -0,0 +1,99 @@ +--- +# Differential backup logic for SCM (Git) data stores +# Compares current backup with previous backup, normalizing timestamps/metadata +# Only publishes if there are actual configuration changes + +- name: Set default backup type to full if not specified + ansible.builtin.set_fact: + backup_type: "{{ type | default('full') }}" + tags: always + +# Previous backup was already read in differential_scm_read_previous.yaml +# before the current backup was created (to avoid overwriting the file) +# Check if previous backup exists (for conditional logic) +- name: Check if previous backup file exists + ansible.builtin.stat: + path: "{{ network_backup_path }}/{{ network_backup_filename }}" + register: previous_backup_stat + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + tags: always + +- name: Debug normalized previous backup result (from pre-read) + ansible.builtin.debug: + msg: "Normalized previous backup result (read before current backup): {{ normalized_previous_backup | default('NOT SET - previous backup may not exist') }}" + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + tags: always + +- name: Normalize current backup file (remove timestamps and metadata) + ansible.builtin.shell: | + sed -E \ + -e '/^!Command:/d' \ + -e '/^!Running configuration last done at:/d' \ + -e '/^! Last configuration change at /d' \ + -e '/^!Time:/d' \ + -e '/^!NVRAM config last updated at:/d' \ + -e '/^!No configuration change since last restart/d' \ + "{{ network_backup_path }}/{{ network_backup_filename }}" | grep -v '^[[:space:]]*$' + register: normalized_current_result + changed_when: false + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + tags: always + +- name: Debug normalized current backup result + ansible.builtin.debug: + msg: "Normalized current backup result: {{ normalized_current_result.stdout }}" + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + tags: always + +- name: Set normalized content facts (use pre-read previous backup) + ansible.builtin.set_fact: + normalized_previous: "{{ normalized_previous_backup | default('') }}" + normalized_current: "{{ normalized_current_result.stdout | default('') }}" + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + tags: always + +- name: Compare normalized backups (using pre-read previous backup) + ansible.builtin.set_fact: + backup_has_changes: "{{ normalized_previous != normalized_current }}" + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + - normalized_previous_backup is defined + - normalized_previous_backup != "" + tags: always + +- name: Set backup_has_changes to true if no previous backup exists + ansible.builtin.set_fact: + backup_has_changes: true + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + - normalized_previous_backup is not defined or normalized_previous_backup == "" + tags: always + +- name: Set backup_has_changes to true for full backup type + ansible.builtin.set_fact: + backup_has_changes: true + when: + - data_store['scm']['origin'] is defined + - backup_type != "diff" + tags: always + +- name: Display differential backup result + ansible.builtin.debug: + msg: "Differential backup: {{ 'Changes detected - will publish' if backup_has_changes else 'No changes detected - skipping publish' }}" + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + tags: always + diff --git a/roles/backup/tasks/differential_scm_read_previous.yaml b/roles/backup/tasks/differential_scm_read_previous.yaml new file mode 100644 index 0000000..c84cc5f --- /dev/null +++ b/roles/backup/tasks/differential_scm_read_previous.yaml @@ -0,0 +1,55 @@ +--- +# Read previous backup BEFORE current backup is created +# This prevents the current backup from overwriting the previous backup file +# before we can read it for comparison + +- name: Set default backup type to full if not specified + ansible.builtin.set_fact: + backup_type: "{{ type | default('full') }}" + tags: always + +- name: Check if previous backup file exists + ansible.builtin.stat: + path: "{{ network_backup_path }}/{{ network_backup_filename }}" + register: previous_backup_stat + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + tags: always + +- name: Normalize previous backup file (remove timestamps and metadata) + ansible.builtin.shell: | + sed -E \ + -e '/^!Command:/d' \ + -e '/^!Running configuration last done at:/d' \ + -e '/^! Last configuration change at /d' \ + -e '/^!Time:/d' \ + -e '/^!NVRAM config last updated at:/d' \ + -e '/^!No configuration change since last restart/d' \ + "{{ network_backup_path }}/{{ network_backup_filename }}" | grep -v '^[[:space:]]*$' + register: normalized_previous_result + changed_when: false + failed_when: false + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + - previous_backup_stat.stat.exists | default(false) + tags: always + +- name: Store normalized previous backup for later comparison + ansible.builtin.set_fact: + normalized_previous_backup: "{{ normalized_previous_result.stdout | default('') }}" + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + - previous_backup_stat.stat.exists | default(false) + tags: always + +- name: Set normalized_previous to empty if no previous backup exists + ansible.builtin.set_fact: + normalized_previous_backup: "" + when: + - data_store['scm']['origin'] is defined + - backup_type == "diff" + - not (previous_backup_stat.stat.exists | default(false)) + tags: always diff --git a/roles/backup/tasks/main.yaml b/roles/backup/tasks/main.yaml index c2514ed..aef7987 100644 --- a/roles/backup/tasks/main.yaml +++ b/roles/backup/tasks/main.yaml @@ -1,6 +1,8 @@ --- - name: Include tasks ansible.builtin.include_tasks: validation.yaml + tags: always - name: Run the platform specific tasks ansible.builtin.include_tasks: "backup.yaml" + tags: always diff --git a/roles/backup/tasks/network.yaml b/roles/backup/tasks/network.yaml index 7464287..08434db 100644 --- a/roles/backup/tasks/network.yaml +++ b/roles/backup/tasks/network.yaml @@ -1,3 +1,4 @@ --- - name: Invoke backup task ansible.builtin.include_tasks: cli_backup.yaml + tags: always diff --git a/roles/backup/tasks/publish.yaml b/roles/backup/tasks/publish.yaml index 6fbc755..e2968d9 100644 --- a/roles/backup/tasks/publish.yaml +++ b/roles/backup/tasks/publish.yaml @@ -3,12 +3,14 @@ ansible.builtin.set_fact: time: "{{ lookup('pipe', 'date \"+%Y-%m-%d-%H-%M\"') }}" run_once: true + tags: always - name: Create default tag ansible.builtin.set_fact: default_tag: annotation: "{{ time }}" message: "backup_on: {{ time }}" + tags: always - name: Set default tag ansible.builtin.set_fact: @@ -16,10 +18,11 @@ when: - tag is defined - tag == "default" + tags: always - name: Publish the changes with tag ansible.scm.git_publish: - path: "{{ network_backup_path }}" + path: "{{ network_backup_path_root }}" token: "{{ data_store.scm.origin.get('token') if data_store.scm.origin.get('token') else omit }}" user: "{{ data_store['scm']['origin']['user'] | d({}) }}" tag: "{{ tag }}" @@ -27,18 +30,21 @@ ssh_key_file: "{{ data_store.scm.origin.get('ssh_key_file') if data_store.scm.origin.get('ssh_key_file') else omit }}" ssh_key_content: "{{ data_store.scm.origin.get('ssh_key_content') if data_store.scm.origin.get('ssh_key_content') else omit }}" when: tag is defined + tags: always - name: Publish the changes ansible.scm.git_publish: - path: "{{ network_backup_path }}" + path: "{{ network_backup_path_root }}" token: "{{ data_store.scm.origin.get('token') if data_store.scm.origin.get('token') else omit }}" user: "{{ data_store['scm']['origin']['user'] | d({}) }}" timeout: 120 ssh_key_file: "{{ data_store.scm.origin.get('ssh_key_file') if data_store.scm.origin.get('ssh_key_file') else omit }}" ssh_key_content: "{{ data_store.scm.origin.get('ssh_key_content') if data_store.scm.origin.get('ssh_key_content') else omit }}" when: tag is not defined + tags: always - name: Remove cloned repository directory ansible.builtin.file: path: "{{ network_backup_path_root }}" state: absent + tags: always diff --git a/roles/backup/tasks/retrieve.yaml b/roles/backup/tasks/retrieve.yaml index 4c02e0c..3e5a426 100644 --- a/roles/backup/tasks/retrieve.yaml +++ b/roles/backup/tasks/retrieve.yaml @@ -2,9 +2,11 @@ - name: Set Default Path ansible.builtin.set_fact: default_path: "{{ role_path }}" + tags: always - name: Perform retrieve with provided credentials when: data_store.scm.origin.token is defined + tags: always block: - name: Retrieve the host vars with provided token access ansible.scm.git_retrieve: @@ -13,14 +15,37 @@ token: "{{ data_store['scm']['origin']['token'] }}" parent_directory: "{{ data_store.scm.parent_directory | default(default_path) }}" register: resource_manager_result + tags: always - name: Update data store path ansible.builtin.set_fact: network_backup_repository: "{{ resource_manager_result }}" + tags: always - name: Perform retrieve with default settings or SSH key when: data_store.scm.origin.token is undefined + tags: always block: + - name: Set repository path + ansible.builtin.set_fact: + repo_local_path: "{{ data_store.scm.parent_directory | default(default_path) }}/{{ data_store['scm']['origin']['url'].split('/')[-1] | regex_replace('\\.git$', '') }}" + tags: always + + - name: Check if repository directory already exists + ansible.builtin.stat: + path: "{{ repo_local_path }}" + register: repo_dir_stat + tags: always + + - name: Pull latest changes if repository exists + ansible.builtin.command: + cmd: git -C {{ repo_local_path }} pull origin main + register: git_pull_result + changed_when: "'Already up to date' not in git_pull_result.stdout" + failed_when: false + when: repo_dir_stat.stat.exists | default(false) + tags: always + - name: Retrieve host vars with default access or SSH key ansible.scm.git_retrieve: origin: @@ -29,11 +54,29 @@ ssh_key_content: "{{ data_store['scm']['origin'].ssh_key_content | default(omit) }}" parent_directory: "{{ data_store.scm.parent_directory | default(default_path) }}" register: resource_manager_result + failed_when: false + when: not (repo_dir_stat.stat.exists | default(false)) + tags: always - - name: Update data store path + - name: Set repository path when directory already exists + ansible.builtin.set_fact: + network_backup_repository: + path: "{{ repo_local_path }}" + when: repo_dir_stat.stat.exists | default(false) + tags: always + + - name: Update data store path from git_retrieve result ansible.builtin.set_fact: network_backup_repository: "{{ resource_manager_result }}" + when: + - resource_manager_result is defined + - resource_manager_result.path is defined + - not (repo_dir_stat.stat.exists | default(false)) + tags: always - name: Update Inventory Path ansible.builtin.set_fact: + # Use the actual repository path from git_retrieve or existing repo + # This path is used by backup.yaml to set network_backup_path_root network_backup_path: "{{ network_backup_repository['path'] }}" + tags: always diff --git a/roles/backup/tasks/validation.yaml b/roles/backup/tasks/validation.yaml index 4598f4d..b05a844 100644 --- a/roles/backup/tasks/validation.yaml +++ b/roles/backup/tasks/validation.yaml @@ -8,7 +8,9 @@ - "nxos" - "iosxr" - "vyos" + tags: always - name: Conditional test ansible.builtin.include_tasks: "unsupported_platform.yaml" when: ansible_network_os.split('.')[-1] not in supported_platforms + tags: always diff --git a/roles/restore/tasks/cli_restore.yaml b/roles/restore/tasks/cli_restore.yaml index 6d2f280..fd0f080 100644 --- a/roles/restore/tasks/cli_restore.yaml +++ b/roles/restore/tasks/cli_restore.yaml @@ -2,6 +2,7 @@ - name: Get current timestamp ansible.builtin.set_fact: timestamp: "{{ lookup('pipe', 'date +%Y%m%d%H%M%S') }}" + tags: always - name: Set content specific facts ansible.builtin.set_fact: @@ -11,16 +12,18 @@ delete_path: "{{ role_path }}/tasks/delete" health_check_path: "{{ role_path }}/tasks/health_checks" delete_backup_from_dest: true + tags: always - name: Check if file copy is possible ansible.builtin.include_tasks: "{{ health_check_path }}/{{ network_os }}.yaml" + tags: always - name: Copy file from src to a network device ansible.netcommon.net_put: src: "{{ network_restore_backup_path }}/{{ network_backup_restore_filename }}" dest: "{{ file_name }}.txt" when: network_os in ['iosxr', 'nxos', 'eos', 'junos'] - + tags: always - name: Copy file using system SCP CLI as a workaround for ios quirk and libssh SCP ansible.builtin.command: > @@ -32,21 +35,31 @@ delegate_to: localhost when: network_os == "ios" changed_when: true + tags: always - name: Prepare appliance for a restore operation ansible.builtin.include_tasks: "{{ prepare_path }}/{{ network_os }}.yaml" + tags: always + +- name: Set default appliance_dir per network OS if not provided + ansible.builtin.set_fact: + appliance_dir: "{{ appliance_dir | default('flash://' if network_os == 'ios' else 'flash:' if network_os == 'iosxr' else '/var/tmp' if network_os == 'junos' else 'flash:' if network_os == 'nxos' else 'flash:' if network_os == 'eos' else 'flash:') }}" + tags: always - name: Restore operation for {{ network_os }} ansible.netcommon.cli_restore: filename: "{{ file_name }}.txt" path: "{{ appliance_dir }}" + tags: always - name: Delete backup from appliance ansible.builtin.include_tasks: "{{ delete_path }}/{{ network_os }}.yaml" when: delete_backup_from_dest + tags: always - name: Remove locally cloned repo ansible.builtin.file: path: "{{ network_restore_backup_repo }}" state: absent when: data_store.scm.origin.path is defined + tags: always \ No newline at end of file diff --git a/roles/restore/tasks/cli_restore_config.yaml b/roles/restore/tasks/cli_restore_config.yaml index 57f6824..432595f 100644 --- a/roles/restore/tasks/cli_restore_config.yaml +++ b/roles/restore/tasks/cli_restore_config.yaml @@ -2,6 +2,8 @@ - name: Set content specific facts ansible.builtin.set_fact: cli_restore_path: "{{ role_path }}/cli_restore" + tags: always - name: Invoke restore operation ansible.builtin.include_tasks: "{{ cli_restore_path }}/{{ network_os }}.yaml" + tags: always \ No newline at end of file diff --git a/roles/restore/tasks/common/validation.yaml b/roles/restore/tasks/common/validation.yaml index cd0bd2c..ac4ed7f 100644 --- a/roles/restore/tasks/common/validation.yaml +++ b/roles/restore/tasks/common/validation.yaml @@ -7,7 +7,9 @@ - "eos" - "nxos" - "iosxr" + tags: always - name: Conditional test ansible.builtin.include_tasks: "common/unsupported_platform.yaml" when: ansible_network_os.split('.')[-1] not in supported_platforms + tags: always \ No newline at end of file diff --git a/roles/restore/tasks/main.yaml b/roles/restore/tasks/main.yaml index f45aa21..720dd51 100644 --- a/roles/restore/tasks/main.yaml +++ b/roles/restore/tasks/main.yaml @@ -1,6 +1,8 @@ --- - name: Include tasks ansible.builtin.include_tasks: common/validation.yaml + tags: always - name: Run the platform specific tasks ansible.builtin.include_tasks: "restore.yaml" + tags: always \ No newline at end of file diff --git a/roles/restore/tasks/network.yaml b/roles/restore/tasks/network.yaml index cc87c5d..db03e3c 100644 --- a/roles/restore/tasks/network.yaml +++ b/roles/restore/tasks/network.yaml @@ -9,11 +9,14 @@ - "ios" - "junos" network_os: "{{ ansible_network_os.split('.')[2] }}" + tags: always - name: Invoke restore ansible.builtin.include_tasks: "cli_restore.yaml" when: network_os not in supported_cli_restore + tags: always - name: Invoke cli specific backup task ansible.builtin.include_tasks: "cli_restore_config.yaml" when: network_os not in supported_plugin_restore + tags: always \ No newline at end of file diff --git a/roles/restore/tasks/prepare/ios.yaml b/roles/restore/tasks/prepare/ios.yaml index be86349..75162e4 100644 --- a/roles/restore/tasks/prepare/ios.yaml +++ b/roles/restore/tasks/prepare/ios.yaml @@ -4,6 +4,10 @@ msg: - "Task to prepare appliance for restore operation" +- name: Set IOS directory for restore operation + ansible.builtin.set_fact: + appliance_dir: "flash://" + - name: Overwrite startup config - archive cisco.ios.ios_config: lines: diff --git a/roles/restore/tasks/restore.yaml b/roles/restore/tasks/restore.yaml index 5441e1d..12b57bd 100644 --- a/roles/restore/tasks/restore.yaml +++ b/roles/restore/tasks/restore.yaml @@ -3,30 +3,37 @@ ansible.builtin.set_fact: network_restore_backup_path: "./tmp_backup" when: data_store.local is not defined + tags: always - name: Set default host vars path ansible.builtin.set_fact: network_restore_backup_path: "{{ data_store['local']['path'] }}" when: data_store['local'] is defined + tags: always - name: Set default host vars filename ansible.builtin.set_fact: network_backup_restore_filename: "{{ data_store['local']['filename'] }}" when: data_store['local'] is defined + tags: always - name: Set default host vars path ansible.builtin.set_fact: - network_backup_restore_relative_path: "{{ data_store['scm']['origin']['path'] }}" + network_restore_backup_relative_path: "{{ data_store['scm']['origin']['path'] }}" when: data_store['scm'] is defined + tags: always - name: Set default host vars filename ansible.builtin.set_fact: network_backup_restore_filename: "{{ data_store['scm']['origin']['filename'] }}" when: data_store['scm'] is defined + tags: always - name: Retrieve a repository from a distant location and make it available locally ansible.builtin.include_tasks: retrieve.yaml when: data_store.scm is defined + tags: always - name: Include tasks ansible.builtin.include_tasks: network.yaml + tags: always \ No newline at end of file diff --git a/roles/restore/tasks/retrieve.yaml b/roles/restore/tasks/retrieve.yaml index 5c39f3b..92aa2fb 100644 --- a/roles/restore/tasks/retrieve.yaml +++ b/roles/restore/tasks/retrieve.yaml @@ -6,14 +6,18 @@ token: "{{ data_store.scm.origin.get('token') if data_store.scm.origin.get('token') else omit }}" ssh_key_file: "{{ data_store.scm.origin.get('ssh_key_file') if data_store.scm.origin.get('ssh_key_file') else omit }}" ssh_key_content: "{{ data_store.scm.origin.get('ssh_key_content') if data_store.scm.origin.get('ssh_key_content') else omit }}" - parent_directory: "{{ role_path }}/{{ network_restore_backup_path }}" + # Use parent_directory from data_store.scm if provided, otherwise default to role_path + parent_directory: "{{ data_store.scm.parent_directory | default(role_path ~ '/' ~ network_restore_backup_path) }}" changed_when: false register: resource_manager_result + tags: always - name: Update Inventory Path ansible.builtin.set_fact: network_restore_backup_repo: "{{ resource_manager_result['path'] }}" + tags: always - name: Update Inventory Path ansible.builtin.set_fact: network_restore_backup_path: "{{ resource_manager_result['path'] }}/{{ data_store['scm']['origin']['path'] }}" + tags: always \ No newline at end of file diff --git a/roles/restore/tmp_backup/network_automation_tools b/roles/restore/tmp_backup/network_automation_tools new file mode 160000 index 0000000..cce4a3d --- /dev/null +++ b/roles/restore/tmp_backup/network_automation_tools @@ -0,0 +1 @@ +Subproject commit cce4a3d4ff5f9fbc801aa8d747e2713f90e04cde From 5c754722a123a9dfc004fa178c47b368f5912c7b Mon Sep 17 00:00:00 2001 From: rohitthakur2590 Date: Tue, 3 Feb 2026 11:58:51 +0100 Subject: [PATCH 2/2] ai_demo_work Signed-off-by: rohitthakur2590 --- README.md | 11 +- roles/backup/README.md | 35 ++ roles/backup/parsers/README.md | 265 +++++++++++++++ roles/backup/parsers/acl_parser.yaml | 19 ++ roles/backup/parsers/bgp_parser.yaml | 17 + roles/backup/parsers/description_parser.yaml | 19 ++ roles/backup/parsers/interface_parser.yaml | 20 ++ roles/backup/parsers/routing_parser.yaml | 19 ++ roles/backup/parsers/security_parser.yaml | 25 ++ roles/backup/parsers/vlan_parser.yaml | 17 + roles/backup/tasks/backup.yaml | 102 +++++- roles/backup/tasks/diff_severity_scoring.yaml | 309 ++++++++++++++++++ roles/backup/tasks/differential_scm.yaml | 2 + .../tasks/differential_scm_read_previous.yaml | 2 + roles/backup/tasks/load_parsers.yaml | 107 ++++++ roles/backup/tasks/publish.yaml | 121 ++++++- roles/backup/tasks/publish_bef_ai.yaml | 75 +++++ roles/restore/README.md | 63 ++++ roles/restore/tasks/hash_verification.yaml | 66 ++++ roles/restore/tasks/restore.yaml | 4 + .../tmp_backup/network_automation_tools | 1 - 21 files changed, 1289 insertions(+), 10 deletions(-) create mode 100644 roles/backup/parsers/README.md create mode 100644 roles/backup/parsers/acl_parser.yaml create mode 100644 roles/backup/parsers/bgp_parser.yaml create mode 100644 roles/backup/parsers/description_parser.yaml create mode 100644 roles/backup/parsers/interface_parser.yaml create mode 100644 roles/backup/parsers/routing_parser.yaml create mode 100644 roles/backup/parsers/security_parser.yaml create mode 100644 roles/backup/parsers/vlan_parser.yaml create mode 100644 roles/backup/tasks/diff_severity_scoring.yaml create mode 100644 roles/backup/tasks/load_parsers.yaml create mode 100644 roles/backup/tasks/publish_bef_ai.yaml create mode 100644 roles/restore/tasks/hash_verification.yaml delete mode 160000 roles/restore/tmp_backup/network_automation_tools diff --git a/README.md b/README.md index 4d00062..cb0802b 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@ ## About - The Ansible Network Backup Validated Content provides a comprehensive solution for managing network backups and restores across supported network platforms. This validated content offers two key functionalities: `backup` and `restore`, each designed to be platform-agnostic and user-friendly. -- The `backup` role allows users to create, compare, and tag backups, supporting both local and remote data stores. This ensures that network configurations are regularly and securely backed up, providing a reliable method to safeguard network infrastructure. +- The `backup` role allows users to create, compare, and tag backups, supporting both local and remote data stores. This ensures that network configurations are regularly and securely backed up, providing a reliable method to safeguard network infrastructure. The role includes SHA-256 hash verification to ensure backup file integrity and detect tampering or corruption. -- The `restore` role enables users to fetch backups from local or remote data stores and perform configuration restores. This functionality ensures that network configurations can be swiftly and accurately restored when needed, minimizing downtime and maintaining network stability. +- The `restore` role enables users to fetch backups from local or remote data stores and perform configuration restores. This functionality ensures that network configurations can be swiftly and accurately restored when needed, minimizing downtime and maintaining network stability. The role automatically verifies backup file integrity using SHA-256 hashes before restoring, preventing corrupted or tampered configurations from being applied. - The Network Backup Content is ideal for system administrators and IT professionals who need to manage and maintain network infrastructure, automate the backup and restore process, and ensure data is regularly and securely backed up and available for restoration as required. @@ -71,6 +71,13 @@ ansible-galaxy collection install network.backup **Restore Configuration**: - Allows users to restore a previously backed-up configuration. - Users can compare the running configuration with the backup to identify differences and restore the configuration only if differences are found. +- Automatically verifies backup file integrity using SHA-256 hashes before restoring, ensuring corrupted or tampered files are not applied. + +**Hash Verification**: +- Calculates SHA-256 hashes for all backup files to ensure data integrity. +- Stores hash files alongside backup files for easy verification. +- Automatically verifies backup file integrity during restore operations. +- Prevents restoring corrupted or tampered configurations. ## Testing diff --git a/roles/backup/README.md b/roles/backup/README.md index e8a3355..a6f0f12 100644 --- a/roles/backup/README.md +++ b/roles/backup/README.md @@ -19,6 +19,13 @@ This role supports full and differential backups, storing them locally or in a r - Helps reduce storage and SCM noise by saving only when diff exists. - **Ignores timestamps and metadata** - only detects actual configuration changes. +### SHA-256 Hash Verification +- Calculates SHA-256 hash for every backup file to ensure data integrity. +- Stores hash in a separate `.sha256` file alongside the backup file. +- Provides cryptographic proof of backup file integrity. +- Enables detection of file corruption or tampering. +- Hash files are automatically created and stored with backups. + --- ## Role Variables @@ -36,7 +43,9 @@ This role supports full and differential backups, storing them locally or in a r | `data_store.scm.origin.ssh_key_file` | Path to the SSH private key file for Git authentication | `str` | Yes (if using SCM SSH) | N/A | | `data_store.scm.origin.ssh_key_content` | The content of the SSH private key | `str` | Yes (if using SCM SSH) | N/A | | `type` | Type of backup to perform. Options: `"full"`, `"incremental"`, or `"diff"` | `str` | No | `"full"` | +| `enable_hash_file` | Enable SHA-256 hash file creation. When `true`, creates a `.sha256` file alongside the backup file | `bool` | No | `true` | +> **Note**: When `enable_hash_file` is enabled (default), the role creates a hash file with the same name as the backup file but with a `.sha256` extension. For example, if the backup file is `ios_device_backup.txt`, the hash file will be `ios_device_backup.txt.sha256`. The hash file contains the SHA-256 hash of the backup file and can be used to verify backup integrity during restore operations. --- @@ -163,6 +172,32 @@ This role supports full and differential backups, storing them locally or in a r > **Note**: With `type: "diff"`, the backup will only be published to SCM if actual configuration changes are detected. Timestamps and metadata differences are ignored. See [Differential Backup Documentation](Differential_Backup_Documentation.md) for more details. +### Create Backup with Hash Verification + +```yaml +- name: Create Network Backup with Hash Verification + hosts: network + gather_facts: false + tasks: + - name: Create Network Backup + ansible.builtin.include_role: + name: network.backup.backup + vars: + enable_hash_file: true # Enable hash file creation (default) + data_store: + scm: + origin: + user: + name: "your_name" + email: "your_email@example.com" + url: "git@github.com:youruser/your-backup-repo.git" + ssh_key_file: "/path/to/ssh/key" + filename: "{{ ansible_date_time.date }}_{{ inventory_hostname }}.txt" + path: "backups/{{ ansible_date_time.date }}/{{ inventory_hostname }}" +``` + +> **Note**: When `enable_hash_file: true` (default), the role creates a `.sha256` file alongside the backup file. This hash file contains the SHA-256 hash of the backup file and is used by the restore role to verify backup integrity before restoring. The hash file is automatically stored in the same location as the backup file. + ## License GNU General Public License v3.0 or later. diff --git a/roles/backup/parsers/README.md b/roles/backup/parsers/README.md new file mode 100644 index 0000000..6e70639 --- /dev/null +++ b/roles/backup/parsers/README.md @@ -0,0 +1,265 @@ +# Parser-Based Feature Extraction + +This directory contains parser definitions for extracting configuration change features from diff output. The parser-based approach makes it easy to customize and extend feature detection. + +## Overview + +Instead of hardcoding regex patterns in the main task file, parsers are defined as separate YAML files. Each parser: +- Defines patterns to detect specific configuration changes +- Has a configurable weight for severity scoring +- Can be easily added, modified, or disabled + +## Parser Structure + +Each parser file (`*_parser.yaml`) follows this structure: + +```yaml +--- +parser_name: parser_name # Unique identifier +feature_name: feature_changes # Key in diff_features dictionary +weight: 10 # Points per change (for scoring) +description: "Human readable description" + +patterns: + add: # Patterns for added lines + - '^\\+pattern1' + - '^\\+pattern2' + remove: # Patterns for removed lines + - '^-pattern1' + - '^-pattern2' + +# Optional: Exclude patterns +exclude_patterns: + - 'pattern_to_exclude' +``` + +## Built-in Parsers + +### 1. `bgp_parser.yaml` +- **Feature**: `bgp_changes` +- **Weight**: 10 points +- **Detects**: BGP router configuration changes + +### 2. `acl_parser.yaml` +- **Feature**: `acl_changes` +- **Weight**: 10 points +- **Detects**: Access Control List configuration changes + +### 3. `security_parser.yaml` +- **Feature**: `security_changes` +- **Weight**: 10 points +- **Detects**: Security-related changes (AAA, TACACS, RADIUS, usernames, crypto keys) + +### 4. `routing_parser.yaml` +- **Feature**: `routing_changes` +- **Weight**: 5 points +- **Detects**: Routing configuration changes (static routes, routing protocols) + +### 5. `vlan_parser.yaml` +- **Feature**: `vlan_changes` +- **Weight**: 5 points +- **Detects**: VLAN configuration changes + +### 6. `interface_parser.yaml` +- **Feature**: `interface_changes` +- **Weight**: 2 points +- **Detects**: Interface configuration changes (excluding descriptions) + +### 7. `description_parser.yaml` +- **Feature**: `description_changes` +- **Weight**: 1 point +- **Detects**: Interface description changes (lowest risk) + +## Creating Custom Parsers + +### Step 1: Create Parser File + +Create a new file in the `parsers/` directory following the naming convention: `{feature}_parser.yaml` + +Example: `ospf_parser.yaml` + +```yaml +--- +parser_name: ospf_parser +feature_name: ospf_changes +weight: 8 +description: "Detects OSPF routing protocol configuration changes" + +patterns: + add: + - '^\\+router\\s+ospf' + - '^\\+router\\s+ospfv3' + - '^\\+ip\\s+ospf' + remove: + - '^-router\\s+ospf' + - '^-router\\s+ospfv3' + - '^-ip\\s+ospf' +``` + +### Step 2: Parser is Automatically Loaded + +The `load_parsers.yaml` task automatically discovers and loads all `*_parser.yaml` files in this directory. No additional configuration needed! + +### Step 3: Use in Scoring (Optional) + +If you want to use the parser's weight dynamically in scoring, enable dynamic scoring: + +```yaml +vars: + use_dynamic_scoring: true +``` + +Otherwise, the weight is used for reference and you can manually add it to the scoring formula in `diff_severity_scoring.yaml`. + +## Pattern Syntax + +Patterns use Python regex syntax (as used by Ansible's `regex_findall` filter): + +- `^` - Start of line +- `\\+` - Literal `+` (for added lines in diff) +- `\\-` - Literal `-` (for removed lines in diff) +- `\\s+` - One or more whitespace characters +- `\\d+` - One or more digits +- `|` - OR operator (when joining patterns) + +### Common Patterns + +```yaml +# Match interface configuration +'^\\+interface\\s+' + +# Match with specific value +'^\\+vlan\\s+\\d+' + +# Match multiple options +'^\\+(ip|ipv6)\\s+access-list' + +# Match with word boundary +'^\\+router\\s+bgp\\b' +``` + +## Advanced: Exclude Patterns + +Some parsers may need to exclude certain patterns. For example, interface changes shouldn't count descriptions: + +```yaml +exclude_patterns: + - 'description' +``` + +Note: Currently, exclusions are handled in the scoring formula. Future enhancements may add automatic exclusion logic. + +## Testing Parsers + +### Enable Debug Mode + +To see which parsers are loaded and what features are detected: + +```yaml +vars: + debug_parsers: true +``` + +### Manual Testing + +You can test parser patterns manually: + +```bash +# Create a test diff +echo "+router bgp 65000" > test_diff.txt +echo "-interface GigabitEthernet1" >> test_diff.txt + +# Test with ansible +ansible localhost -m debug -a "msg={{ lookup('file', 'test_diff.txt') | regex_findall('^\\+router\\s+bgp', multiline=True) }}" +``` + +## Disabling Parsers + +To temporarily disable a parser, rename it (remove `_parser.yaml` suffix) or move it outside the `parsers/` directory. + +Example: +```bash +mv bgp_parser.yaml bgp_parser.yaml.disabled +``` + +## Parser Priority + +Parsers are loaded in alphabetical order. If multiple parsers match the same line, all matches are counted. This is usually desired behavior (e.g., a BGP change might also match a routing change). + +## Best Practices + +1. **Use Descriptive Names**: `ospf_parser.yaml` is better than `parser1.yaml` +2. **Document Patterns**: Add comments explaining complex patterns +3. **Test Thoroughly**: Test with real diff output from your devices +4. **Keep Patterns Specific**: Avoid overly broad patterns that match unintended changes +5. **Consider Weights**: Higher weights for more critical changes +6. **Version Control**: Track parser changes in git for auditability + +## Example: Adding OSPF Parser + +1. Create `parsers/ospf_parser.yaml`: +```yaml +--- +parser_name: ospf_parser +feature_name: ospf_changes +weight: 8 +description: "Detects OSPF routing protocol configuration changes" + +patterns: + add: + - '^\\+router\\s+ospf' + - '^\\+router\\s+ospfv3' + remove: + - '^-router\\s+ospf' + - '^-router\\s+ospfv3' +``` + +2. Update scoring formula in `diff_severity_scoring.yaml` (if not using dynamic scoring): +```yaml +severity_score: >- + {{ + (diff_features.bgp_changes | default(0) | int) * 10 + + (diff_features.ospf_changes | default(0) | int) * 8 + # New parser + # ... rest of formula + }} +``` + +3. That's it! The parser is automatically loaded and used. + +## Troubleshooting + +### Parser Not Detected +- Check file naming: Must end with `_parser.yaml` +- Check file location: Must be in `parsers/` directory +- Check YAML syntax: Use `ansible-lint` or `yamllint` + +### Patterns Not Matching +- Test patterns manually with `regex_findall` +- Check for escaped characters (`\\+` not `+`) +- Verify multiline mode is enabled +- Check diff format (unified diff with `+`/`-` prefixes) + +### Wrong Feature Counts +- Check for overlapping patterns between parsers +- Verify exclude patterns are working +- Enable `debug_parsers: true` to see execution summary + +## Migration from Hardcoded Patterns + +The old hardcoded approach: +```yaml +bgp_changes: "{{ (diff_output.stdout | regex_findall('^\\+router\\s+bgp', multiline=True) | length) + ... }}" +``` + +New parser-based approach: +```yaml +# Defined in bgp_parser.yaml +# Automatically loaded and executed +``` + +Benefits: +- ✅ Easier to maintain +- ✅ Easier to extend +- ✅ Better organization +- ✅ Reusable patterns +- ✅ Self-documenting diff --git a/roles/backup/parsers/acl_parser.yaml b/roles/backup/parsers/acl_parser.yaml new file mode 100644 index 0000000..87bd9b5 --- /dev/null +++ b/roles/backup/parsers/acl_parser.yaml @@ -0,0 +1,19 @@ +--- +# ACL Configuration Parser +# Detects Access Control List configuration changes +# Weight: 10 points per change + +parser_name: acl_parser +feature_name: acl_changes +weight: 10 +description: "Detects ACL (Access Control List) configuration changes" + +patterns: + add: + - '^\\+(ip|ipv6)\\s+access-list' + - '^\\+ip\\s+access-group' + - '^\\+ipv6\\s+access-group' + remove: + - '^-(ip|ipv6)\\s+access-list' + - '^-ip\\s+access-group' + - '^-ipv6\\s+access-group' diff --git a/roles/backup/parsers/bgp_parser.yaml b/roles/backup/parsers/bgp_parser.yaml new file mode 100644 index 0000000..0d6baeb --- /dev/null +++ b/roles/backup/parsers/bgp_parser.yaml @@ -0,0 +1,17 @@ +--- +# BGP Configuration Parser +# Detects BGP-related configuration changes +# Weight: 10 points per change + +parser_name: bgp_parser +feature_name: bgp_changes +weight: 10 +description: "Detects BGP router configuration changes" + +patterns: + add: + - '^\\+router\\s+bgp' + - '^\\+router\\s+bgp\\s+\\d+' + remove: + - '^-router\\s+bgp' + - '^-router\\s+bgp\\s+\\d+' diff --git a/roles/backup/parsers/description_parser.yaml b/roles/backup/parsers/description_parser.yaml new file mode 100644 index 0000000..0932ffe --- /dev/null +++ b/roles/backup/parsers/description_parser.yaml @@ -0,0 +1,19 @@ +--- +# Description Configuration Parser +# Detects interface description changes (lowest risk) +# Weight: 1 point per change + +parser_name: description_parser +feature_name: description_changes +weight: 1 +description: "Detects interface description changes (lowest risk changes)" + +patterns: + add: + # Match: + description or +description + # Pattern: + (optional space) description (space or end of line) + - "^\\+\\s*description(\\s|$)" + remove: + # Match: - description or -description + # Pattern: - (optional space) description (space or end of line) + - "^-\\s*description(\\s|$)" diff --git a/roles/backup/parsers/interface_parser.yaml b/roles/backup/parsers/interface_parser.yaml new file mode 100644 index 0000000..04ef887 --- /dev/null +++ b/roles/backup/parsers/interface_parser.yaml @@ -0,0 +1,20 @@ +--- +# Interface Configuration Parser +# Detects interface-related configuration changes +# Weight: 2 points per change (excluding descriptions) +# Note: Description changes are handled separately + +parser_name: interface_parser +feature_name: interface_changes +weight: 2 +description: "Detects interface configuration changes (excluding descriptions)" + +patterns: + add: + - '^\\+interface\\s+' + remove: + - '^-interface\\s+' + +# Exclude patterns (these should not count as interface changes) +exclude_patterns: + - 'description' diff --git a/roles/backup/parsers/routing_parser.yaml b/roles/backup/parsers/routing_parser.yaml new file mode 100644 index 0000000..3c6b64c --- /dev/null +++ b/roles/backup/parsers/routing_parser.yaml @@ -0,0 +1,19 @@ +--- +# Routing Configuration Parser +# Detects routing-related configuration changes +# Weight: 5 points per change + +parser_name: routing_parser +feature_name: routing_changes +weight: 5 +description: "Detects routing configuration changes (static routes, routing protocols)" + +patterns: + add: + - '^\\+ip\\s+route' + - '^\\+router\\s+' + - '^\\+ipv6\\s+route' + remove: + - '^-ip\\s+route' + - '^-router\\s+' + - '^-ipv6\\s+route' diff --git a/roles/backup/parsers/security_parser.yaml b/roles/backup/parsers/security_parser.yaml new file mode 100644 index 0000000..7e494d5 --- /dev/null +++ b/roles/backup/parsers/security_parser.yaml @@ -0,0 +1,25 @@ +--- +# Security Configuration Parser +# Detects security-related configuration changes (AAA, TACACS, RADIUS, usernames) +# Weight: 10 points per change + +parser_name: security_parser +feature_name: security_changes +weight: 10 +description: "Detects security-related configuration changes (AAA, TACACS, RADIUS, usernames)" + +patterns: + add: + - '^\\+aaa\\s+' + - '^\\+tacacs\\s+' + - '^\\+radius\\s+' + - '^\\+username\\s+' + - '^\\+crypto\\s+key' + - '^\\+enable\\s+secret' + remove: + - '^-aaa\\s+' + - '^-tacacs\\s+' + - '^-radius\\s+' + - '^-username\\s+' + - '^-crypto\\s+key' + - '^-enable\\s+secret' diff --git a/roles/backup/parsers/vlan_parser.yaml b/roles/backup/parsers/vlan_parser.yaml new file mode 100644 index 0000000..33f702a --- /dev/null +++ b/roles/backup/parsers/vlan_parser.yaml @@ -0,0 +1,17 @@ +--- +# VLAN Configuration Parser +# Detects VLAN-related configuration changes +# Weight: 5 points per change + +parser_name: vlan_parser +feature_name: vlan_changes +weight: 5 +description: "Detects VLAN configuration changes" + +patterns: + add: + - '^\\+vlan\\s+\\d+' + - '^\\+vlan\\s+\\d+\\s+name' + remove: + - '^-vlan\\s+\\d+' + - '^-vlan\\s+\\d+\\s+name' diff --git a/roles/backup/tasks/backup.yaml b/roles/backup/tasks/backup.yaml index 875fb3d..3ad9fda 100644 --- a/roles/backup/tasks/backup.yaml +++ b/roles/backup/tasks/backup.yaml @@ -10,18 +10,99 @@ run_once: true tags: always -- name: Get scm url - Use actual repo path from retrieve.yaml +- name: Propagate network_backup_path to all hosts (from first host that ran retrieve) ansible.builtin.set_fact: - network_backup_path_root: "{{ network_backup_path | default(role_path ~ '/' ~ (data_store.scm.origin.url.split('/')[-1] | regex_replace('\\.git$', ''))) }}" - when: data_store['scm']['origin'] is defined + network_backup_path: "{{ hostvars[groups['all'][0]]['network_backup_path'] | default(network_backup_path) }}" + when: + - data_store['scm']['origin'] is defined + - network_backup_path is not defined or network_backup_path == "" tags: always -- name: Get file name +- name: Get scm url - Use actual repo path from retrieve.yaml or calculate from parent_directory ansible.builtin.set_fact: - network_backup_path: "{{ network_backup_path_root }}/{{ data_store.scm.origin.path | default(role_path, true) }}" + network_backup_path_root: >- + {{ + network_backup_path | default( + (data_store.scm.parent_directory | default(role_path)) ~ '/' ~ + (data_store.scm.origin.url.split('/')[-1] | regex_replace('\\.git$', '')) + ) + }} when: data_store['scm']['origin'] is defined tags: always +- name: Ensure network_backup_path_root is valid (not root or empty) + ansible.builtin.set_fact: + network_backup_path_root: "{{ role_path ~ '/' ~ (data_store.scm.origin.url.split('/')[-1] | regex_replace('\\.git$', '')) }}" + when: + - data_store['scm']['origin'] is defined + - network_backup_path_root is not defined or network_backup_path_root == "" or network_backup_path_root == "/" + tags: always + +- name: Debug path construction + ansible.builtin.debug: + msg: | + Path Debug Info: + - network_backup_path_root: {{ network_backup_path_root | default('NOT SET') }} + - data_store.scm.origin.path: {{ data_store.scm.origin.path | default('NOT SET') }} + - role_path: {{ role_path | default('NOT SET') }} + when: + - data_store['scm']['origin'] is defined + - debug_paths | default(false) + tags: always + +- name: Get file name - construct full backup path + ansible.builtin.set_fact: + network_backup_path: "{{ network_backup_path_root }}/{{ data_store.scm.origin.path | default('', true) }}" + when: + - data_store['scm']['origin'] is defined + - data_store.scm.origin.path is defined + - data_store.scm.origin.path != "" + tags: always + +- name: Set network_backup_path to root if path not provided + ansible.builtin.set_fact: + network_backup_path: "{{ network_backup_path_root }}" + when: + - data_store['scm']['origin'] is defined + - data_store.scm.origin.path is not defined or data_store.scm.origin.path == "" + tags: always + +- name: Ensure network_backup_path is absolute and expand user home + ansible.builtin.set_fact: + network_backup_path: "{{ network_backup_path | expanduser }}" + when: + - data_store['scm']['origin'] is defined + - network_backup_path is defined + tags: always + +- name: Validate network_backup_path is not root directory + ansible.builtin.assert: + that: + - network_backup_path != "/" + - network_backup_path != "" + - "'/backups' not in network_backup_path or network_backup_path != '/backups'" + fail_msg: | + Invalid backup path detected: {{ network_backup_path }} + Path cannot be root directory (/). + Check network_backup_path_root and data_store.scm.origin.path configuration. + success_msg: "Backup path validated: {{ network_backup_path }}" + when: + - data_store['scm']['origin'] is defined + - network_backup_path is defined + tags: always + +- name: Debug final backup path (enable with -e debug_paths=true) + ansible.builtin.debug: + msg: | + Final Backup Path Debug: + - network_backup_path_root: {{ network_backup_path_root | default('NOT SET') }} + - network_backup_path: {{ network_backup_path | default('NOT SET') }} + - data_store.scm.origin.path: {{ data_store.scm.origin.path | default('NOT SET') }} + when: + - data_store['scm']['origin'] is defined + - debug_paths | default(false) + tags: always + - name: Get timestamp ansible.builtin.set_fact: timestamp: "{{ lookup('pipe', 'date +%Y-%m-%d_%H-%M-%S') }}" @@ -45,11 +126,22 @@ ansible.builtin.include_tasks: network.yaml tags: always +- name: Calculate SHA-256 hash of backup file + ansible.builtin.include_tasks: hash_verification.yaml + tags: always + - name: Check for differential backup (SCM only) ansible.builtin.include_tasks: differential_scm.yaml when: data_store['scm']['origin'] is defined tags: always +- name: Calculate diff severity score + ansible.builtin.include_tasks: diff_severity_scoring.yaml + when: + - data_store['scm']['origin'] is defined + - backup_has_changes | default(true) + tags: always + - name: Include build tasks ansible.builtin.include_tasks: publish.yaml when: diff --git a/roles/backup/tasks/diff_severity_scoring.yaml b/roles/backup/tasks/diff_severity_scoring.yaml new file mode 100644 index 0000000..9a67d7c --- /dev/null +++ b/roles/backup/tasks/diff_severity_scoring.yaml @@ -0,0 +1,309 @@ +--- +# Diff severity scoring for configuration changes +# Analyzes configuration diffs and assigns severity scores +# Uses rules-based approach with optional ML enhancement +# +# To enable ML: Set enable_ml_scoring=true and provide ml_model_path +# ML requires: scikit-learn, joblib (install via pip) + +- name: Set default backup type + ansible.builtin.set_fact: + backup_type: "{{ type | default('full') }}" + tags: always + +- name: Check if severity scoring is enabled + ansible.builtin.set_fact: + severity_scoring_enabled: "{{ enable_severity_scoring | default(false) }}" + tags: always + +- name: Skip severity scoring if not enabled or no previous backup + ansible.builtin.set_fact: + skip_severity_scoring: "{{ not severity_scoring_enabled or backup_type != 'diff' or normalized_previous_backup is not defined or normalized_previous_backup == '' }}" + tags: always + +- name: Debug severity scoring skip condition + ansible.builtin.debug: + msg: | + Severity Scoring Debug: + - severity_scoring_enabled: {{ severity_scoring_enabled | default('NOT SET') }} + - backup_type: {{ backup_type | default('NOT SET') }} + - normalized_previous_backup is defined: {{ normalized_previous_backup is defined }} + - normalized_previous_backup length: {{ normalized_previous_backup | default('') | length }} + - skip_severity_scoring: {{ skip_severity_scoring | default('NOT SET') }} + when: + - data_store['scm']['origin'] is defined + - debug_severity | default(false) | bool + tags: always + +- name: Write normalized previous backup to temp file + ansible.builtin.copy: + content: "{{ normalized_previous_backup }}" + dest: "/tmp/prev_backup_{{ inventory_hostname }}.txt" + mode: '0600' + delegate_to: localhost + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + tags: always + +- name: Write normalized current backup to temp file + ansible.builtin.copy: + content: "{{ normalized_current }}" + dest: "/tmp/curr_backup_{{ inventory_hostname }}.txt" + mode: '0600' + delegate_to: localhost + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + tags: always + +- name: Extract diff lines between previous and current backup + ansible.builtin.shell: | + diff -u /tmp/prev_backup_{{ inventory_hostname }}.txt /tmp/curr_backup_{{ inventory_hostname }}.txt || true + register: diff_output + changed_when: false + delegate_to: localhost + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + tags: always + +- name: Save diff output to file for ML processing + ansible.builtin.copy: + content: "{{ diff_output.stdout | default('') }}" + dest: "/tmp/diff_output_{{ inventory_hostname }}.txt" + mode: '0600' + delegate_to: localhost + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - enable_ml_scoring | default(false) | bool + tags: always + +- name: Cleanup temp backup files (keep diff file for ML) + ansible.builtin.file: + path: "{{ item }}" + state: absent + delegate_to: localhost + loop: + - "/tmp/prev_backup_{{ inventory_hostname }}.txt" + - "/tmp/curr_backup_{{ inventory_hostname }}.txt" + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + tags: always + +- name: Load and execute parsers for feature extraction + ansible.builtin.include_tasks: load_parsers.yaml + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + tags: alwayss + +- name: Calculate severity score (Rules-based scoring) + ansible.builtin.set_fact: + severity_score: >- + {{ + (diff_features.bgp_changes | default(0) | int) * 10 + + (diff_features.acl_changes | default(0) | int) * 10 + + (diff_features.security_changes | default(0) | int) * 10 + + (diff_features.routing_changes | default(0) | int) * 5 + + (diff_features.vlan_changes | default(0) | int) * 5 + + (diff_features.description_changes | default(0) | int) * 1 + + ([0, ((diff_features.interface_changes | default(0) | int) - (diff_features.description_changes | default(0) | int))] | max) * 2 + }} + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - diff_features is defined + tags: always + +- name: Determine severity level based on score + ansible.builtin.set_fact: + severity_level: >- + {{ + 'CRITICAL' if (severity_score | default(0) | int) >= 20 + else ('HIGH' if (severity_score | default(0) | int) >= 10 + else ('MEDIUM' if (severity_score | default(0) | int) >= 5 + else 'LOW')) + }} + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - severity_score is defined + tags: always + +- name: Check if ML scoring is enabled + ansible.builtin.set_fact: + ml_scoring_enabled: "{{ enable_ml_scoring | default(false) | bool }}" + ml_model_path_clean: "{{ ml_model_path | default('') | string }}" + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + tags: always + +- name: Debug ML configuration + ansible.builtin.debug: + msg: | + ML Configuration Debug: + - enable_ml_scoring: {{ enable_ml_scoring | default('NOT SET') }} + - ml_scoring_enabled: {{ ml_scoring_enabled | default('NOT SET') }} + - ml_model_path: {{ ml_model_path | default('NOT SET') }} + - ml_model_path is defined: {{ ml_model_path is defined }} + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - debug_ml | default(false) | bool + tags: always + +- name: Copy ML scoring script to temp location + ansible.builtin.copy: + src: "{{ role_path }}/files/diff_severity_ml.py" + dest: "/tmp/diff_severity_ml.py" + mode: '0755' + delegate_to: localhost + when: + - not skip_severity_scoring | default(true) + - ml_scoring_enabled | default(false) | bool + - data_store['scm']['origin'] is defined + tags: always + +- name: Check if ML model file exists + ansible.builtin.stat: + path: "{{ ml_model_path_clean }}" + register: ml_model_file + delegate_to: localhost + when: + - not skip_severity_scoring | default(true) + - ml_scoring_enabled | default(false) | bool + - data_store['scm']['origin'] is defined + - ml_model_path_clean is defined + - ml_model_path_clean != '' + tags: always + +- name: Run ML-based severity scoring (if enabled) + ansible.builtin.shell: | + python3 /tmp/diff_severity_ml.py /tmp/diff_output_{{ inventory_hostname }}.txt {{ ml_model_path_clean }} + register: ml_scoring_result + changed_when: false + delegate_to: localhost + when: + - not skip_severity_scoring | default(true) + - ml_scoring_enabled | default(false) | bool + - data_store['scm']['origin'] is defined + - ml_model_path_clean is defined + - ml_model_path_clean != '' + - ml_model_file.stat.exists | default(false) + failed_when: false + tags: always + +- name: Debug ML scoring result + ansible.builtin.debug: + msg: | + ML Scoring Result Debug: + - ml_scoring_result.rc: {{ ml_scoring_result.rc | default('NOT SET') }} + - ml_scoring_result.stdout: {{ ml_scoring_result.stdout | default('NOT SET') | truncate(200) }} + - ml_scoring_result.stderr: {{ ml_scoring_result.stderr | default('NOT SET') | truncate(200) }} + when: + - not skip_severity_scoring | default(true) + - ml_scoring_enabled | default(false) | bool + - ml_scoring_result is defined + - debug_ml | default(false) | bool + tags: always + +- name: Cleanup ML temp files (after ML processing) + ansible.builtin.file: + path: "{{ item }}" + state: absent + delegate_to: localhost + loop: + - "/tmp/diff_output_{{ inventory_hostname }}.txt" + - "/tmp/diff_severity_ml.py" + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + tags: always + +- name: Parse ML scoring results + ansible.builtin.set_fact: + ml_result_json: "{{ ml_scoring_result.stdout | from_json }}" + when: + - not skip_severity_scoring | default(true) + - ml_scoring_enabled | default(false) | bool + - ml_scoring_result.stdout is defined + - ml_scoring_result.rc == 0 + tags: always + +- name: Extract ML scoring values + ansible.builtin.set_fact: + ml_severity_level: "{{ ml_result_json.final.level | default('N/A') }}" + ml_severity_confidence: "{{ ml_result_json.final.confidence | default(omit) }}" + ml_scoring_method: "{{ ml_result_json.final.method | default('ML') }}" + when: + - not skip_severity_scoring | default(true) + - ml_scoring_enabled | default(false) | bool + - ml_result_json is defined + - ml_result_json.final is defined + tags: always + +- name: Use ML results if available, otherwise use rules-based + ansible.builtin.set_fact: + final_severity_level: "{{ ml_severity_level | default(severity_level) }}" + final_severity_score: "{{ severity_score }}" + scoring_method: "{{ ml_scoring_method | default('Rules-based') }}" + ml_confidence: "{{ ml_severity_confidence | default(omit) }}" + when: + - not skip_severity_scoring | default(true) + - severity_level is defined + tags: always + +- name: Display severity scoring results + ansible.builtin.debug: + msg: | + ============================================ + DIFF SEVERITY SCORING RESULTS + ============================================ + Scoring Method: {{ scoring_method | default('Rules-based') }} + Severity Level: {{ final_severity_level | default('N/A') }} + Severity Score: {{ final_severity_score | default('N/A') }} + {% if ml_confidence is defined %} + ML Confidence: {{ ml_confidence | round(2) }} + {% endif %} + + Change Features Detected: + - BGP Changes: {{ diff_features.bgp_changes | default(0) }} + - ACL Changes: {{ diff_features.acl_changes | default(0) }} + - Security Changes: {{ diff_features.security_changes | default(0) }} + - Routing Changes: {{ diff_features.routing_changes | default(0) }} + - VLAN Changes: {{ diff_features.vlan_changes | default(0) }} + - Interface Changes: {{ diff_features.interface_changes | default(0) }} + - Description Changes: {{ diff_features.description_changes | default(0) }} + - Total Changes: {{ diff_features.total_changes | default(0) }} + + Recommended Action: + {% if final_severity_level | default('') == 'CRITICAL' %} + ⚠️ CRITICAL: Manual review required before merge + {% elif final_severity_level | default('') == 'HIGH' %} + ⚠️ HIGH: Review recommended + {% elif final_severity_level | default('') == 'MEDIUM' %} + ℹ️ MEDIUM: Team notification sent + {% else %} + ✓ LOW: Auto-approve safe + {% endif %} + ============================================ + when: + - not skip_severity_scoring | default(true) + - final_severity_level is defined + tags: always + +- name: Fail on critical severity if auto-rollback enabled + ansible.builtin.fail: + msg: | + CRITICAL severity change detected (Score: {{ severity_score }}). + Auto-rollback is enabled. Backup will not be published. + Please review changes manually before proceeding. + when: + - not skip_severity_scoring | default(true) + - severity_level | default('') == 'CRITICAL' + - auto_rollback_on_critical | default(false) + tags: always diff --git a/roles/backup/tasks/differential_scm.yaml b/roles/backup/tasks/differential_scm.yaml index 16f7afd..8d445dc 100644 --- a/roles/backup/tasks/differential_scm.yaml +++ b/roles/backup/tasks/differential_scm.yaml @@ -37,6 +37,8 @@ -e '/^!Time:/d' \ -e '/^!NVRAM config last updated at:/d' \ -e '/^!No configuration change since last restart/d' \ + -e '/^Current configuration :/d' \ + -e '/^Building configuration/d' \ "{{ network_backup_path }}/{{ network_backup_filename }}" | grep -v '^[[:space:]]*$' register: normalized_current_result changed_when: false diff --git a/roles/backup/tasks/differential_scm_read_previous.yaml b/roles/backup/tasks/differential_scm_read_previous.yaml index c84cc5f..da324e1 100644 --- a/roles/backup/tasks/differential_scm_read_previous.yaml +++ b/roles/backup/tasks/differential_scm_read_previous.yaml @@ -26,6 +26,8 @@ -e '/^!Time:/d' \ -e '/^!NVRAM config last updated at:/d' \ -e '/^!No configuration change since last restart/d' \ + -e '/^Current configuration :/d' \ + -e '/^Building configuration/d' \ "{{ network_backup_path }}/{{ network_backup_filename }}" | grep -v '^[[:space:]]*$' register: normalized_previous_result changed_when: false diff --git a/roles/backup/tasks/load_parsers.yaml b/roles/backup/tasks/load_parsers.yaml new file mode 100644 index 0000000..a930dac --- /dev/null +++ b/roles/backup/tasks/load_parsers.yaml @@ -0,0 +1,107 @@ +--- +# Load and execute parsers for feature extraction +# This task loads all parser definitions and applies them to the diff output + +- name: Find all parser definition files + ansible.builtin.find: + paths: "{{ role_path }}/parsers" + patterns: "*_parser.yaml" + recurse: false + register: parser_files + delegate_to: localhost + changed_when: false + tags: always + +- name: Load parser definitions + ansible.builtin.set_fact: + loaded_parsers: "{{ loaded_parsers | default([]) + [lookup('file', item.path) | from_yaml] }}" + loop: "{{ parser_files.files | default([]) }}" + loop_control: + label: "{{ item.path | basename }}" + delegate_to: localhost + when: parser_files.files is defined + tags: always + +- name: Initialize diff_features dictionary + ansible.builtin.set_fact: + diff_features: {} + tags: always + +- name: Execute each parser to extract features + ansible.builtin.set_fact: + diff_features: "{{ diff_features | combine({item.feature_name: ((diff_output.stdout | default('') | regex_findall(item.patterns.add | join('|'), multiline=True) | length) + (diff_output.stdout | default('') | regex_findall(item.patterns.remove | join('|'), multiline=True) | length))}) }}" + loop: "{{ loaded_parsers | default([]) }}" + loop_control: + label: "{{ item.parser_name }}" + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - loaded_parsers is defined + tags: always + +- name: Fix description_changes counting (count modifications as 1, not 2) + ansible.builtin.set_fact: + # For description changes: count as max(add, remove) to avoid double-counting modifications + # When a description is changed: 1 remove + 1 add = 1 change (not 2) + description_add_count: "{{ diff_output.stdout | default('') | regex_findall('^\\+\\s*description', multiline=True) | length }}" + description_remove_count: "{{ diff_output.stdout | default('') | regex_findall('^-\\s*description', multiline=True) | length }}" + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - diff_features is defined + - diff_features.description_changes is defined + tags: always + +- name: Set description_changes to max of add/remove counts + ansible.builtin.set_fact: + diff_features: "{{ diff_features | combine({'description_changes': ( + [description_add_count | default(0) | int, description_remove_count | default(0) | int] | max + )}) }}" + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - diff_features is defined + - description_add_count is defined + - description_remove_count is defined + tags: always + +- name: Calculate total changes count (sum of all detected features) + ansible.builtin.set_fact: + # Total changes = sum of all detected feature changes + # This ensures we only count actual config changes, not metadata + diff_features: "{{ diff_features | combine({'total_changes': (diff_features.bgp_changes | default(0) | int) + (diff_features.acl_changes | default(0) | int) + (diff_features.security_changes | default(0) | int) + (diff_features.routing_changes | default(0) | int) + (diff_features.vlan_changes | default(0) | int) + (diff_features.interface_changes | default(0) | int) + (diff_features.description_changes | default(0) | int)}) }}" + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - diff_features is defined + tags: always + +- name: Display parser execution summary + ansible.builtin.debug: + msg: | + Parser Execution Summary: + Parsers Loaded: {{ loaded_parsers | default([]) | length }} + Features Detected: + {% for feature, count in diff_features.items() %} + - {{ feature }}: {{ count }} + {% endfor %} + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - debug_parsers | default(false) + tags: always + +- name: Debug description parser pattern matching + ansible.builtin.debug: + msg: | + Description Parser Debug: + - Pattern (add): {{ loaded_parsers | selectattr('parser_name', 'equalto', 'description_parser') | map(attribute='patterns.add') | first | default('NOT FOUND') }} + - Pattern (remove): {{ loaded_parsers | selectattr('parser_name', 'equalto', 'description_parser') | map(attribute='patterns.remove') | first | default('NOT FOUND') }} + - Diff lines matching add pattern: {{ diff_output.stdout | default('') | regex_findall('^\\+\\s*description', multiline=True) | length }} + - Diff lines matching remove pattern: {{ diff_output.stdout | default('') | regex_findall('^-\\s*description', multiline=True) | length }} + - Sample diff lines with 'description': {{ diff_output.stdout | default('') | regex_findall('.*description.*', multiline=True) | list | first(5) | join('\n') }} + when: + - not skip_severity_scoring | default(true) + - data_store['scm']['origin'] is defined + - debug_parsers | default(false) + tags: always diff --git a/roles/backup/tasks/publish.yaml b/roles/backup/tasks/publish.yaml index e2968d9..d360c87 100644 --- a/roles/backup/tasks/publish.yaml +++ b/roles/backup/tasks/publish.yaml @@ -20,6 +20,111 @@ - tag == "default" tags: always +- name: Clean up any stale Git lock files + ansible.builtin.file: + path: "{{ network_backup_path_root }}/.git/{{ item }}" + state: absent + loop: + - "HEAD.lock" + - "index.lock" + - "refs/heads/main.lock" + - "refs/heads/master.lock" + failed_when: false + changed_when: false + tags: always + +- name: Find and remove all Git lock files + ansible.builtin.find: + paths: "{{ network_backup_path_root }}/.git" + patterns: "*.lock" + recurse: true + register: git_lock_files + failed_when: false + changed_when: false + tags: always + +- name: Remove found Git lock files + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: "{{ git_lock_files.files | default([]) }}" + failed_when: false + tags: always + +- name: Check if there are actual changes to commit + ansible.builtin.command: + cmd: git -C "{{ network_backup_path_root }}" status --porcelain + register: git_status_check + changed_when: false + failed_when: false + tags: always + +- name: Set fact if there are changes + ansible.builtin.set_fact: + git_has_changes: "{{ git_status_check.stdout | length > 0 }}" + tags: always + +- name: Final cleanup of Git lock files before publish + ansible.builtin.find: + paths: "{{ network_backup_path_root }}/.git" + patterns: "*.lock" + recurse: true + register: final_git_lock_files + failed_when: false + changed_when: false + tags: always + +- name: Remove any remaining Git lock files before publish + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: "{{ final_git_lock_files.files | default([]) }}" + failed_when: false + tags: always + +- name: Get current branch name + ansible.builtin.command: + cmd: git -C "{{ network_backup_path_root }}" rev-parse --abbrev-ref HEAD + register: current_branch + changed_when: false + failed_when: false + tags: always + +- name: Remove problematic HEAD reference file if it exists (should not exist in normal repos) + ansible.builtin.file: + path: "{{ network_backup_path_root }}/.git/refs/heads/HEAD" + state: absent + failed_when: false + changed_when: false + tags: always + +- name: Remove any invalid or problematic files in refs/heads + ansible.builtin.file: + path: "{{ network_backup_path_root }}/.git/refs/heads/{{ item }}" + state: absent + loop: + - ".invalid" + - "HEAD" + failed_when: false + changed_when: false + tags: always + +- name: Force cleanup of HEAD lock using shell (more aggressive) + ansible.builtin.shell: | + cd "{{ network_backup_path_root }}" + rm -f .git/HEAD.lock + rm -f .git/refs/heads/*.lock + rm -f .git/refs/heads/HEAD + rm -f .git/refs/heads/.invalid + # Ensure HEAD is properly set + if ! git rev-parse --verify HEAD >/dev/null 2>&1; then + git symbolic-ref HEAD refs/heads/{{ current_branch.stdout | default('main') }} + fi + delegate_to: localhost + failed_when: false + changed_when: false + tags: always + - name: Publish the changes with tag ansible.scm.git_publish: path: "{{ network_backup_path_root }}" @@ -29,7 +134,9 @@ timeout: 120 ssh_key_file: "{{ data_store.scm.origin.get('ssh_key_file') if data_store.scm.origin.get('ssh_key_file') else omit }}" ssh_key_content: "{{ data_store.scm.origin.get('ssh_key_content') if data_store.scm.origin.get('ssh_key_content') else omit }}" - when: tag is defined + when: + - tag is defined + - git_has_changes | default(true) tags: always - name: Publish the changes @@ -40,7 +147,17 @@ timeout: 120 ssh_key_file: "{{ data_store.scm.origin.get('ssh_key_file') if data_store.scm.origin.get('ssh_key_file') else omit }}" ssh_key_content: "{{ data_store.scm.origin.get('ssh_key_content') if data_store.scm.origin.get('ssh_key_content') else omit }}" - when: tag is not defined + when: + - tag is not defined + - git_has_changes | default(true) + tags: always + +- name: Display message when no changes to publish + ansible.builtin.debug: + msg: "No changes detected in Git repository - skipping publish (file content is identical to existing backup)" + when: + - git_has_changes is defined + - not (git_has_changes | default(true)) tags: always - name: Remove cloned repository directory diff --git a/roles/backup/tasks/publish_bef_ai.yaml b/roles/backup/tasks/publish_bef_ai.yaml new file mode 100644 index 0000000..a9c4a5f --- /dev/null +++ b/roles/backup/tasks/publish_bef_ai.yaml @@ -0,0 +1,75 @@ +--- +- name: Set Timestamp + ansible.builtin.set_fact: + time: "{{ lookup('pipe', 'date \"+%Y-%m-%d-%H-%M\"') }}" + run_once: true + tags: always + +- name: Create default tag + ansible.builtin.set_fact: + default_tag: + annotation: "{{ time }}" + message: "backup_on: {{ time }}" + tags: always + +- name: Set default tag + ansible.builtin.set_fact: + default_tag: "{}" + when: + - tag is defined + - tag == "default" + tags: always + +- name: Check if there are actual changes to commit + ansible.builtin.command: + cmd: git -C "{{ network_backup_path_root }}" status --porcelain + register: git_status_check + changed_when: false + failed_when: false + tags: always + +- name: Set fact if there are changes + ansible.builtin.set_fact: + git_has_changes: "{{ git_status_check.stdout | length > 0 }}" + tags: always + +- name: Publish the changes with tag + ansible.scm.git_publish: + path: "{{ network_backup_path_root }}" + token: "{{ data_store.scm.origin.get('token') if data_store.scm.origin.get('token') else omit }}" + user: "{{ data_store['scm']['origin']['user'] | d({}) }}" + tag: "{{ tag }}" + timeout: 120 + ssh_key_file: "{{ data_store.scm.origin.get('ssh_key_file') if data_store.scm.origin.get('ssh_key_file') else omit }}" + ssh_key_content: "{{ data_store.scm.origin.get('ssh_key_content') if data_store.scm.origin.get('ssh_key_content') else omit }}" + when: + - tag is defined + - git_has_changes | default(true) + tags: always + +- name: Publish the changes + ansible.scm.git_publish: + path: "{{ network_backup_path_root }}" + token: "{{ data_store.scm.origin.get('token') if data_store.scm.origin.get('token') else omit }}" + user: "{{ data_store['scm']['origin']['user'] | d({}) }}" + timeout: 120 + ssh_key_file: "{{ data_store.scm.origin.get('ssh_key_file') if data_store.scm.origin.get('ssh_key_file') else omit }}" + ssh_key_content: "{{ data_store.scm.origin.get('ssh_key_content') if data_store.scm.origin.get('ssh_key_content') else omit }}" + when: + - tag is not defined + - git_has_changes | default(true) + tags: always + +- name: Display message when no changes to publish + ansible.builtin.debug: + msg: "No changes detected in Git repository - skipping publish (file content is identical to existing backup)" + when: + - git_has_changes is defined + - not (git_has_changes | default(true)) + tags: always + +- name: Remove cloned repository directory + ansible.builtin.file: + path: "{{ network_backup_path_root }}" + state: absent + tags: always diff --git a/roles/restore/README.md b/roles/restore/README.md index 44e722a..e13bc53 100644 --- a/roles/restore/README.md +++ b/roles/restore/README.md @@ -13,6 +13,13 @@ This role supports restoring from both local and remote (e.g., Git-based) data s - Supports restoration from local file systems or remote Git repositories. - Optionally compares the current running configuration with the backup and only performs restore if differences are found. +### SHA-256 Hash Verification +- Automatically verifies backup file integrity before restoring using SHA-256 hashes. +- Reads expected hash from `.sha256` file (if available). +- Calculates actual hash of backup file and compares with expected hash. +- Aborts restore operation if hash mismatch is detected (prevents restoring corrupted or tampered files). +- Hash verification is enabled by default but can be disabled if needed. + --- ## Role Variables @@ -29,9 +36,12 @@ This role supports restoring from both local and remote (e.g., Git-based) data s | `data_store.scm.origin.path` | Path in the repo where the file is located | `str` | No | N?A | | `data_store.scm.origin.ssh_key_file` | Path to the SSH private key file for Git authentication | `str` | Yes (if using SCM SSH) | N/A | | `data_store.scm.origin.ssh_key_content` | The content of the SSH private key | `str` | Yes (if using SCM SSH) | N/A | +| `verify_backup_hash` | Enable hash verification before restore. When `true`, verifies backup file integrity using SHA-256 hash | `bool` | No | `true` | > Either `data_store.local` or `data_store.scm` must be provided. +> **Note**: When `verify_backup_hash` is enabled (default), the role automatically looks for a `.sha256` file with the same name as the backup file. For example, if the backup file is `ios_device_backup.txt`, it will look for `ios_device_backup.txt.sha256` in the same directory. If the hash file exists, the role verifies the backup file integrity before restoring. If the hashes don't match, the restore operation is aborted for safety. + --- ## Usage Examples @@ -125,6 +135,59 @@ This role supports restoring from both local and remote (e.g., Git-based) data s filename: "{{ ansible_date_time.date }}_{{ inventory_hostname }}.txt" path: "backups/{{ ansible_date_time.date }}/{{ inventory_hostname }}" ``` + +### Restore with Hash Verification + +```yaml +- name: Restore Network Configuration with Hash Verification + hosts: network + gather_facts: false + tasks: + - name: Run restore network config + ansible.builtin.include_role: + name: network.backup.restore + vars: + verify_backup_hash: true # Enable hash verification (default) + data_store: + scm: + origin: + user: + name: "your_name" + email: "your_email@example.com" + url: "https://github.com/youruser/your-backup-repo" + token: "{{ gh_token }}" + filename: "{{ ansible_date_time.date }}_{{ inventory_hostname }}.txt" + path: "backups/{{ ansible_date_time.date }}/{{ inventory_hostname }}" +``` + +> **Note**: When `verify_backup_hash: true` (default), the restore role automatically verifies the backup file integrity before restoring. It looks for a `.sha256` file with the same name as the backup file and compares the expected hash with the actual hash of the backup file. If the hashes don't match, the restore operation is aborted to prevent restoring corrupted or tampered configurations. + +### Disable Hash Verification (Not Recommended) + +```yaml +- name: Restore Network Configuration without Hash Verification + hosts: network + gather_facts: false + tasks: + - name: Run restore network config + ansible.builtin.include_role: + name: network.backup.restore + vars: + verify_backup_hash: false # Disable hash verification + data_store: + scm: + origin: + user: + name: "your_name" + email: "your_email@example.com" + url: "https://github.com/youruser/your-backup-repo" + token: "{{ gh_token }}" + filename: "{{ ansible_date_time.date }}_{{ inventory_hostname }}.txt" + path: "backups/{{ ansible_date_time.date }}/{{ inventory_hostname }}" +``` + +> **Warning**: Disabling hash verification is not recommended for production environments. Hash verification ensures backup file integrity and prevents restoring corrupted or tampered configurations. + ## License GNU General Public License v3.0 or later. diff --git a/roles/restore/tasks/hash_verification.yaml b/roles/restore/tasks/hash_verification.yaml new file mode 100644 index 0000000..06b488d --- /dev/null +++ b/roles/restore/tasks/hash_verification.yaml @@ -0,0 +1,66 @@ +--- +# Hash verification for restore operations +# Verifies backup file integrity before restore + +- name: Check if hash verification is enabled + ansible.builtin.set_fact: + verify_hash: "{{ verify_backup_hash | default(true) }}" + tags: always + +- name: Read SHA-256 hash file if it exists + ansible.builtin.stat: + path: "{{ network_restore_backup_path }}/{{ network_backup_restore_filename | regex_replace('\\.txt$', '') }}.sha256" + register: hash_file_stat + delegate_to: localhost + when: verify_hash + tags: always + +- name: Extract expected hash from hash file + ansible.builtin.shell: | + grep "SHA-256 Hash:" "{{ network_restore_backup_path }}/{{ network_backup_restore_filename | regex_replace('\\.txt$', '') }}.sha256" | awk '{print $3}' + register: expected_hash_result + changed_when: false + delegate_to: localhost + when: + - verify_hash + - hash_file_stat.stat.exists | default(false) + tags: always + +- name: Calculate actual hash of backup file + ansible.builtin.shell: | + sha256sum "{{ network_restore_backup_path }}/{{ network_backup_restore_filename }}" | awk '{print $1}' + register: actual_hash_result + changed_when: false + delegate_to: localhost + when: verify_hash + tags: always + +- name: Verify backup file integrity + ansible.builtin.assert: + that: + - not verify_hash or hash_file_stat.stat.exists | default(false) == false or expected_hash_result.stdout == actual_hash_result.stdout + fail_msg: | + Backup file integrity check FAILED! + Expected Hash: {{ expected_hash_result.stdout | default('N/A') }} + Actual Hash: {{ actual_hash_result.stdout | default('N/A') }} + The backup file may be corrupted or tampered with. + Restore operation aborted for safety. + success_msg: | + Backup file integrity verified successfully. + SHA-256 Hash: {{ actual_hash_result.stdout }} + when: verify_hash + tags: always + +- name: Display hash verification result + ansible.builtin.debug: + msg: | + ============================================ + BACKUP FILE INTEGRITY VERIFICATION + ============================================ + Backup File: {{ network_backup_restore_filename }} + Expected Hash: {{ expected_hash_result.stdout | default('Hash file not found - skipping verification') }} + Actual Hash: {{ actual_hash_result.stdout | default('N/A') }} + Status: {{ '✓ VERIFIED' if (expected_hash_result.stdout | default('') == actual_hash_result.stdout | default('')) else '⚠️ HASH FILE NOT FOUND' }} + ============================================ + when: verify_hash + tags: always diff --git a/roles/restore/tasks/restore.yaml b/roles/restore/tasks/restore.yaml index 12b57bd..2bef0f7 100644 --- a/roles/restore/tasks/restore.yaml +++ b/roles/restore/tasks/restore.yaml @@ -34,6 +34,10 @@ when: data_store.scm is defined tags: always +- name: Verify backup file hash before restore + ansible.builtin.include_tasks: hash_verification.yaml + tags: always + - name: Include tasks ansible.builtin.include_tasks: network.yaml tags: always \ No newline at end of file diff --git a/roles/restore/tmp_backup/network_automation_tools b/roles/restore/tmp_backup/network_automation_tools deleted file mode 160000 index cce4a3d..0000000 --- a/roles/restore/tmp_backup/network_automation_tools +++ /dev/null @@ -1 +0,0 @@ -Subproject commit cce4a3d4ff5f9fbc801aa8d747e2713f90e04cde