60 lines
1.7 KiB
Python
60 lines
1.7 KiB
Python
import asyncio
|
|
import aiohttp
|
|
import datetime
|
|
import re
|
|
import logging
|
|
from prometheus_client import Gauge
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
ARKOSES = {
|
|
"genevois": "https://genevois.arkose.com/",
|
|
"massy": "https://massy.arkose.com/",
|
|
"montreuil": "https://montreuil.arkose.com/",
|
|
"nation": "https://nation.arkose.com/",
|
|
}
|
|
|
|
BLOCKPARK_OCCUPATION = Gauge(
|
|
"blockpark_occupation", "Blockpark occupation", ["blockpark"]
|
|
)
|
|
|
|
GAUGE_RE = re.compile(r'<div class="jauge-bar"><div style="width: ([0-9]*)%"></div>')
|
|
|
|
|
|
class FailedScrape(Exception):
|
|
pass
|
|
|
|
|
|
async def _scrape_arkose(arkose, session):
|
|
async with session.get(ARKOSES[arkose]) as resp:
|
|
if resp.status != 200:
|
|
raise FailedScrape("Non-200 error")
|
|
content = await resp.text()
|
|
|
|
with open("/tmp/arkose_{}.html".format(arkose), "w") as handle:
|
|
handle.write(content)
|
|
|
|
match = GAUGE_RE.search(content)
|
|
if not match:
|
|
raise FailedScrape("Could not extract gauge")
|
|
return int(match[1])
|
|
|
|
|
|
async def scrape_metric_forever(interval: int):
|
|
async with aiohttp.ClientSession() as session:
|
|
while True:
|
|
start_time = datetime.datetime.now()
|
|
occupation_data = await asyncio.gather(
|
|
*[_scrape_arkose(arkose, session) for arkose in ARKOSES],
|
|
return_exceptions=True
|
|
)
|
|
|
|
for arkose, occup in zip(ARKOSES, occupation_data):
|
|
if isinstance(occup, Exception):
|
|
logger.warning("%s failed: %s", arkose, occup)
|
|
continue
|
|
BLOCKPARK_OCCUPATION.labels(blockpark=arkose).set(occup)
|
|
|
|
delta_time = datetime.datetime.now() - start_time
|
|
await asyncio.sleep(interval - delta_time.total_seconds())
|