-
Notifications
You must be signed in to change notification settings - Fork 0
143 lines (125 loc) · 5.38 KB
/
experiment_validation.yml
File metadata and controls
143 lines (125 loc) · 5.38 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
name: Experiment Validation
"on":
issues:
types: [opened, edited, reopened, labeled]
workflow_dispatch:
inputs:
issue_number:
description: "Monthly optimization task issue number"
required: true
jobs:
validate:
if: contains(github.event.issue.labels.*.name, 'monthly-optimization-task') || inputs.issue_number != ''
runs-on:
- self-hosted
- Linux
- X64
permissions:
contents: read
issues: write
env:
DOWNLOAD_TOP_LIQUID: ${{ vars.DOWNLOAD_TOP_LIQUID || '90' }}
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Setup Python
uses: actions/setup-python@v6
with:
python-version: "3.11"
- name: Install dependencies
run: |
set -euo pipefail
REQ_FILE="requirements-lock.txt"
if [ ! -f "$REQ_FILE" ]; then REQ_FILE="requirements.txt"; fi
python -m pip install --upgrade pip
python -m pip install -r "$REQ_FILE"
- name: Load issue context
id: issue_context
run: |
python3 - <<'PY'
import json
import os
import urllib.request
from pathlib import Path
repo = os.environ["GITHUB_REPOSITORY"]
issue_number = os.environ["ISSUE_NUMBER"]
token = os.environ["GITHUB_TOKEN"]
api_url = f"https://api.github.com/repos/{repo}/issues/{issue_number}"
request = urllib.request.Request(
api_url,
headers={
"Accept": "application/vnd.github+json",
"Authorization": f"Bearer {token}",
"X-GitHub-Api-Version": "2022-11-28",
"User-Agent": "experiment-validation",
},
)
with urllib.request.urlopen(request) as response:
issue = json.load(response)
issue_context = {
"number": issue["number"],
"title": issue["title"],
"body": issue.get("body", ""),
}
output_dir = Path("data/output/experiment_validation")
output_dir.mkdir(parents=True, exist_ok=True)
(output_dir / "issue_context.json").write_text(
json.dumps(issue_context, ensure_ascii=False, indent=2) + "\n",
encoding="utf-8",
)
PY
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ISSUE_NUMBER: ${{ inputs.issue_number || github.event.issue.number }}
- name: Prepare experiment validation payload
id: experiment_payload
run: |
python3 scripts/prepare_experiment_validation.py \
--issue-context-file data/output/experiment_validation/issue_context.json \
--output-dir data/output/experiment_validation >> "$GITHUB_OUTPUT"
- name: Append task summary
run: cat data/output/experiment_validation/task_summary.md >> "$GITHUB_STEP_SUMMARY"
- name: Append skip reason
if: steps.experiment_payload.outputs.should_run != 'true'
run: |
if [ -f data/output/experiment_validation/skip_reason.txt ]; then
cat data/output/experiment_validation/skip_reason.txt >> "$GITHUB_STEP_SUMMARY"
fi
- name: Download raw history for experiment validation
if: steps.experiment_payload.outputs.should_run == 'true'
run: |
python3 scripts/download_history.py --top-liquid "${DOWNLOAD_TOP_LIQUID}" --force-exchange-info
- name: Run monthly shadow build
if: steps.experiment_payload.outputs.should_run == 'true' && steps.experiment_payload.outputs.run_shadow_build == 'true'
run: |
python3 scripts/run_monthly_shadow_build.py --skip-publish-dry-run
- name: Run walk-forward validation
if: steps.experiment_payload.outputs.should_run == 'true' && steps.experiment_payload.outputs.run_walkforward_validation == 'true'
run: |
python3 scripts/run_walkforward_validation.py
- name: Render validation summary
if: steps.experiment_payload.outputs.should_run == 'true'
run: |
python3 scripts/render_experiment_validation_summary.py \
--payload-file data/output/experiment_validation/payload.json \
--shadow-summary-file data/output/monthly_shadow_build_summary.json \
--output-file data/output/experiment_validation/validation_summary.md
- name: Append validation summary
if: steps.experiment_payload.outputs.should_run == 'true'
run: cat data/output/experiment_validation/validation_summary.md >> "$GITHUB_STEP_SUMMARY"
- name: Post validation comment
if: steps.experiment_payload.outputs.should_run == 'true'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python3 scripts/post_experiment_validation_comment.py \
--repo "${GITHUB_REPOSITORY}" \
--issue-number "${{ inputs.issue_number || github.event.issue.number }}" \
--review-file data/output/experiment_validation/validation_summary.md \
--run-url "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
- name: Upload validation artifact
if: always()
uses: actions/upload-artifact@v7
with:
name: experiment-validation-${{ inputs.issue_number || github.event.issue.number }}
path: data/output/experiment_validation/