-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdocker-compose.production.yml
More file actions
185 lines (175 loc) · 6.49 KB
/
docker-compose.production.yml
File metadata and controls
185 lines (175 loc) · 6.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
services:
# Elasticsearch must be deployed as a separate Coolify Service
# (use "Elasticsearch" in the Coolify services menu)
# After deploying it, copy its internal Docker network URL and set
# ELASTICSEARCH_URL in the environment variables below.
# Scraper API - serves match data to prostaff-api
scraper-api:
build:
context: .
dockerfile: Dockerfile.production
restart: unless-stopped
networks:
- coolify
- vcgokoow00g0ggs0wwg4so4o
expose:
- "8000"
environment:
- RIOT_API_KEY=${RIOT_API_KEY}
- ESPORTS_API_KEY=${ESPORTS_API_KEY}
- ELASTICSEARCH_URL=${ELASTICSEARCH_URL}
- SCRAPER_API_KEY=${SCRAPER_API_KEY}
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-https://api.prostaff.gg,https://prostaff.gg,https://www.prostaff.gg}
- DEFAULT_PLATFORM_REGION=${DEFAULT_PLATFORM_REGION:-BR1}
- API_PORT=8000
- PYTHONUNBUFFERED=1
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8000/health || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
labels:
# Coolify Meta
- coolify.managed=true
- coolify.applicationId=2
- coolify.type=application
# Traefik - HTTPS router
- traefik.enable=true
- traefik.http.routers.prostaff-scraper.rule=Host(`scraper.prostaff.gg`)
- traefik.http.routers.prostaff-scraper.entrypoints=https
- traefik.http.routers.prostaff-scraper.tls=true
- traefik.http.routers.prostaff-scraper.tls.certresolver=letsencrypt
- traefik.http.routers.prostaff-scraper.middlewares=scraper-cors
# Service
- traefik.http.services.prostaff-scraper.loadbalancer.server.port=8000
- traefik.http.services.prostaff-scraper.loadbalancer.server.scheme=http
# HTTP to HTTPS redirect
- traefik.http.routers.prostaff-scraper-http.rule=Host(`scraper.prostaff.gg`)
- traefik.http.routers.prostaff-scraper-http.entrypoints=http
- traefik.http.routers.prostaff-scraper-http.middlewares=scraper-redirect-https
- traefik.http.middlewares.scraper-redirect-https.redirectscheme.scheme=https
- traefik.http.middlewares.scraper-redirect-https.redirectscheme.permanent=true
# CORS middleware
- traefik.http.middlewares.scraper-cors.headers.accesscontrolallowmethods=GET,POST,OPTIONS
- traefik.http.middlewares.scraper-cors.headers.accesscontrolalloworiginlist=https://api.prostaff.gg,https://prostaff.gg,https://www.prostaff.gg
- traefik.http.middlewares.scraper-cors.headers.accesscontrolallowcredentials=true
- traefik.http.middlewares.scraper-cors.headers.accesscontrolallowheaders=Content-Type,Authorization,X-API-Key
- traefik.http.middlewares.scraper-cors.headers.accesscontrolmaxage=86400
# Network
- traefik.docker.network=coolify
# Enrichment daemon - enriches games with items/runes/KDA
# Queries Leaguepedia (1 req/9s) then Riot Match-V5 for each unenriched game.
# Batch of 50 games = ~7.5min. Checks every 30min for new games to enrich.
enrichment-daemon:
build:
context: .
dockerfile: Dockerfile.production
restart: unless-stopped
networks:
- coolify
- vcgokoow00g0ggs0wwg4so4o
environment:
- RIOT_API_KEY=${RIOT_API_KEY}
- ELASTICSEARCH_URL=${ELASTICSEARCH_URL}
- DEFAULT_PLATFORM_REGION=${DEFAULT_PLATFORM_REGION:-BR1}
- PYTHONUNBUFFERED=1
# No HTTP server in this service — disable inherited Dockerfile healthcheck
healthcheck:
disable: true
command:
- "python"
- "etl/enrichment_pipeline.py"
- "--daemon"
- "--production"
- "--interval"
- "30"
- "--batch"
- "50"
labels:
- coolify.managed=true
- coolify.applicationId=2
- coolify.type=application
# Scraper Cron - periodic sync using the production ETL pipeline daemon
scraper-cron:
build:
context: .
dockerfile: Dockerfile.production
restart: unless-stopped
networks:
- coolify
- vcgokoow00g0ggs0wwg4so4o
environment:
- RIOT_API_KEY=${RIOT_API_KEY}
- ESPORTS_API_KEY=${ESPORTS_API_KEY}
- ELASTICSEARCH_URL=${ELASTICSEARCH_URL}
- DEFAULT_PLATFORM_REGION=${DEFAULT_PLATFORM_REGION:-BR1}
- PYTHONUNBUFFERED=1
# Leagues to sync (space-separated). Example: "CBLOL LCS LEC LCK"
- SYNC_LEAGUES=${SYNC_LEAGUES:-CBLOL}
# Sync interval in hours
- SYNC_INTERVAL_HOURS=${SYNC_INTERVAL_HOURS:-1}
# Match limit per league per run
- SYNC_LIMIT=${SYNC_LIMIT:-100}
# No HTTP server in this service — disable inherited Dockerfile healthcheck
healthcheck:
disable: true
command:
- "python"
- "etl/competitive_pipeline.py"
- "--daemon"
- "--production"
- "--interval"
- "${SYNC_INTERVAL_HOURS:-1}"
- "--leagues"
- "${SYNC_LEAGUES:-CBLOL}"
- "--limit"
- "${SYNC_LIMIT:-100}"
labels:
- coolify.managed=true
- coolify.applicationId=2
- coolify.type=application
# Historical Backfill — imports ALL games for every CBLOL/LTA Sul edition
# from Leaguepedia into Elasticsearch. Fully resumable via progress file.
#
# Runs once per deploy, processes pending/failed tournaments, then exits.
# On crash or rate-limit, restarts automatically and resumes from last checkpoint.
#
# After the backfill populates ES, the Rails API's scheduled
# HistoricalBackfillJob pulls the data into the Rails DB.
#
# Estimated time for a full CBLOL history (~50 tournaments): 8-12 hours.
# Subsequent runs only process new/failed tournaments: minutes.
backfill:
build:
context: .
dockerfile: Dockerfile.production
restart: on-failure
networks:
- coolify
- vcgokoow00g0ggs0wwg4so4o
environment:
- ELASTICSEARCH_URL=${ELASTICSEARCH_URL}
- PYTHONUNBUFFERED=1
# Leagues to backfill (space-separated). Aliases (LTA Sul) are resolved automatically.
- BACKFILL_LEAGUE=${BACKFILL_LEAGUE:-CBLOL}
- BACKFILL_MIN_YEAR=${BACKFILL_MIN_YEAR:-2013}
command:
- "python"
- "etl/historical_backfill.py"
- "--league"
- "${BACKFILL_LEAGUE:-CBLOL}"
- "--min-year"
- "${BACKFILL_MIN_YEAR:-2013}"
# Disable the Dockerfile healthcheck (no HTTP server in this service)
healthcheck:
disable: true
labels:
- coolify.managed=true
- coolify.applicationId=2
- coolify.type=application
networks:
coolify:
external: true
vcgokoow00g0ggs0wwg4so4o:
external: true