Skip to content
Snippets Groups Projects
Commit e13b29db authored by Christoph Alt's avatar Christoph Alt
Browse files

job infos from all pages are now collected

parent 8edc4284
Branches
No related tags found
No related merge requests found
Pipeline #42924 passed
......@@ -26,11 +26,18 @@ def get_api_url_pipelines(base_url: str, project_id: int, pipeline_id: int):
def get_job_info(url: str):
jobs = requests.get(url)
if jobs.status_code != 200:
jobs.raise_for_status()
for job in jobs.json():
yield job
next_url = url
while True:
logger.info(f"reqeuesting {next_url}")
jobs = requests.get(next_url)
if jobs.status_code != 200:
jobs.raise_for_status()
for job in jobs.json():
yield job
if (next_page := jobs.headers['x-next-page']):
next_url = f"{url}?page={next_page}"
else:
break
def create_job_datapoint(job: dict) -> DataPoint:
......
import os
import logging
import dotenv
import os
import pprint
from influxdb import InfluxDBClient
from dataclasses import dataclass
import dotenv
from influxdb import InfluxDBClient
logger = logging.getLogger(__file__)
MISSING_DB_PW = """
......@@ -52,14 +53,14 @@ class Uploader:
database=config.database,
)
def upload(self, points, dry_run=False, *args, **kwargs):
def upload(self, points, dry_run=False, *, time_precision='s', **kwargs):
logger.info(f"Uploading: {pprint.pformat(points)}")
if (common_tags := kwargs.get("tags")):
logger.info(f"with common tags: {pprint.pformat(common_tags)}")
if not dry_run:
success = self.client.write_points(points,
*args,
time_precision=time_precision,
**kwargs)
if success:
logger.info(f"Uploaded {len(points)} items")
......
......@@ -56,7 +56,7 @@ def file_time_to_sec(file_path) -> int:
return int(os.path.getmtime(file_path))
def time_conversion(time_stamp, *, pattern="%Y-%m-%d %H:%M:%S%z"):
def time_conversion(time_stamp, *, pattern="%Y-%m-%d %H:%M:%S"):
try:
return int(time_stamp)
except ValueError as e:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment