-
Notifications
You must be signed in to change notification settings - Fork 0
337 lines (276 loc) · 10.9 KB
/
weekly-data-refresh.yml
File metadata and controls
337 lines (276 loc) · 10.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
name: Weekly Data Refresh
# This workflow automates the weekly extraction and validation of alignment research data
# from the StampyAI Alignment Research Dataset on Hugging Face.
#
# Schedule: Every Monday at 2:00 AM UTC
# Manual trigger: Available via workflow_dispatch
#
# Process:
# 1. Extract delta (new records since last run)
# 2. Validate extracted data against schema
# 3. Commit and push updated data to repository
#
# Requirements:
# - HF_TOKEN secret (optional but recommended for higher rate limits)
# - Write permissions for repository
on:
schedule:
# Run every Monday at 2:00 AM UTC
# Cron format: minute hour day-of-month month day-of-week
- cron: '0 2 * * 1'
workflow_dispatch:
# Allow manual triggering from GitHub UI
inputs:
mode:
description: 'Extraction mode'
required: false
default: 'delta'
type: choice
options:
- delta
- full
limit:
description: 'Record limit (optional, for testing)'
required: false
type: string
env:
PYTHON_VERSION: '3.11'
LOG_RETENTION_DAYS: 30
jobs:
refresh-alignment-data:
name: Extract and Validate Alignment Research Data
runs-on: ubuntu-latest
permissions:
contents: write # Required to commit and push changes
steps:
# ============================================================
# Setup
# ============================================================
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Full history for accurate delta detection
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'pip'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install datasets huggingface_hub jsonschema jsonlines
echo "✓ Dependencies installed"
- name: Display environment info
run: |
echo "Python version: $(python --version)"
echo "Pip version: $(pip --version)"
echo "Working directory: $(pwd)"
echo "Disk space:"
df -h
# ============================================================
# Data Extraction
# ============================================================
- name: Extract alignment research data
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
MODE: ${{ inputs.mode || 'delta' }}
LIMIT: ${{ inputs.limit || '' }}
run: |
echo "=================================================="
echo "Starting Alignment Research Data Extraction"
echo "=================================================="
echo "Mode: ${MODE}"
echo "Limit: ${LIMIT:-unlimited}"
echo "Authentication: $([[ -n "$HF_TOKEN" ]] && echo "enabled (HF_TOKEN)" || echo "disabled (anonymous)")"
echo ""
# Build command
CMD="python data/raw/alignment_research/extraction_script.py --mode ${MODE}"
# Add limit if specified
if [[ -n "${LIMIT}" ]]; then
CMD="${CMD} --limit ${LIMIT}"
fi
# Add auth flag if no token
if [[ -z "$HF_TOKEN" ]]; then
CMD="${CMD} --no-auth"
fi
echo "Executing: ${CMD}"
echo ""
# Run extraction
${CMD}
EXIT_CODE=$?
echo ""
echo "=================================================="
if [ $EXIT_CODE -eq 0 ]; then
echo "✓ Extraction completed successfully"
else
echo "✗ Extraction failed with exit code ${EXIT_CODE}"
fi
echo "=================================================="
exit $EXIT_CODE
# ============================================================
# Data Validation
# ============================================================
- name: Validate extracted data
run: |
echo "=================================================="
echo "Validating Extracted Data"
echo "=================================================="
# Find the most recent dump directory
LATEST_DUMP=$(ls -td data/raw/alignment_research/dumps/*/ 2>/dev/null | head -n 1)
if [[ -z "$LATEST_DUMP" ]]; then
echo "✗ No dump directory found"
exit 1
fi
echo "Validating: ${LATEST_DUMP}"
echo ""
# Run validation (skip ASCII check for efficiency)
python scripts/validation/validate_alignment_research.py \
"${LATEST_DUMP}" \
--no-ascii-check
EXIT_CODE=$?
echo ""
echo "=================================================="
if [ $EXIT_CODE -eq 0 ]; then
echo "✓ Validation passed"
else
echo "✗ Validation failed"
fi
echo "=================================================="
exit $EXIT_CODE
# ============================================================
# Statistics and Reporting
# ============================================================
- name: Generate extraction report
if: success()
run: |
echo "=================================================="
echo "Extraction Statistics"
echo "=================================================="
# Find latest dump
LATEST_DUMP=$(ls -td data/raw/alignment_research/dumps/*/ 2>/dev/null | head -n 1)
if [[ -n "$LATEST_DUMP" ]]; then
echo "Dump directory: ${LATEST_DUMP}"
echo ""
# Display metadata
if [[ -f "${LATEST_DUMP}_metadata.json" ]]; then
echo "Metadata:"
python -c "
import json
import sys
with open('${LATEST_DUMP}_metadata.json') as f:
meta = json.load(f)
print(f\" Source: {meta.get('source_name')}\" )
print(f\" Extraction date: {meta.get('extraction_date')}\")
print(f\" Extraction type: {meta.get('extraction_type')}\")
print(f\" Record count: {meta.get('record_count')}\")
print(f\" Status: {meta.get('extraction_status')}\")
stats = meta.get('extraction_statistics', {})
if stats:
print(f\" Records fetched: {stats.get('records_fetched')}\")
print(f\" Records filtered: {stats.get('records_filtered')}\")
print(f\" Records written: {stats.get('records_written')}\")
print(f\" Errors: {stats.get('errors_encountered')}\")
print(f\" Duration: {stats.get('duration_seconds', 0):.1f}s\")
"
fi
echo ""
echo "Files:"
ls -lh "${LATEST_DUMP}"
echo ""
echo "Total dump size:"
du -sh "${LATEST_DUMP}"
fi
echo "=================================================="
# ============================================================
# Upload Logs as Artifacts
# ============================================================
- name: Upload extraction logs
if: always() # Upload logs even if workflow fails
uses: actions/upload-artifact@v4
with:
name: alignment-extraction-logs-${{ github.run_number }}
path: |
logs/alignment_extraction/
logs/alignment_validation/
retention-days: ${{ env.LOG_RETENTION_DAYS }}
if-no-files-found: warn
# ============================================================
# Git Commit and Push
# ============================================================
- name: Check for changes
id: check_changes
run: |
# Check if there are any changes to commit
git add data/ logs/
if git diff --staged --quiet; then
echo "has_changes=false" >> $GITHUB_OUTPUT
echo "No changes to commit"
else
echo "has_changes=true" >> $GITHUB_OUTPUT
echo "Changes detected"
# Show what changed
echo ""
echo "Changed files:"
git diff --staged --name-only
fi
- name: Commit and push changes
if: steps.check_changes.outputs.has_changes == 'true'
run: |
# Configure git
git config user.name "GitHub Actions Bot"
git config user.email "actions@github.com"
# Get timestamp for commit message
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
# Get record count from latest metadata
LATEST_DUMP=$(ls -td data/raw/alignment_research/dumps/*/ 2>/dev/null | head -n 1)
RECORD_COUNT="unknown"
if [[ -f "${LATEST_DUMP}_metadata.json" ]]; then
RECORD_COUNT=$(python -c "import json; print(json.load(open('${LATEST_DUMP}_metadata.json')).get('record_count', 'unknown'))")
fi
# Create detailed commit message
cat > commit_message.txt << EOF
data: weekly alignment research refresh
Automated data refresh from StampyAI Alignment Research Dataset
- Timestamp: ${TIMESTAMP}
- Records: ${RECORD_COUNT}
- Workflow run: ${{ github.run_number }}
- Triggered by: ${{ github.event_name }}
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: GitHub Actions <actions@github.com>
EOF
# Commit with detailed message
git commit -F commit_message.txt
# Push changes
git push
echo "✓ Changes committed and pushed successfully"
- name: No changes to commit
if: steps.check_changes.outputs.has_changes == 'false'
run: |
echo "ℹ No new data extracted - repository is up to date"
echo "This is expected for delta mode when there are no new records"
# ============================================================
# Failure Notification
# ============================================================
- name: Workflow failure summary
if: failure()
run: |
echo "=================================================="
echo "⚠️ WORKFLOW FAILED"
echo "=================================================="
echo ""
echo "Please check:"
echo " 1. Logs uploaded as artifacts"
echo " 2. HuggingFace dataset availability"
echo " 3. Rate limiting (add HF_TOKEN secret if needed)"
echo " 4. Schema validation errors"
echo ""
echo "Logs available in workflow artifacts for 30 days"
echo "=================================================="
# Workflow summary:
# - Runs weekly on Monday at 2am UTC
# - Can be triggered manually with custom parameters
# - Extracts delta (new records) by default
# - Validates all extracted data
# - Commits and pushes if changes detected
# - Uploads logs as artifacts for debugging
# - Handles failures gracefully with informative messages