From 9e8882de996bc6f09d5b34255fa358006f79a877 Mon Sep 17 00:00:00 2001 From: Maarten Sijm <9739541+mpsijm@users.noreply.github.com> Date: Wed, 30 Oct 2024 22:49:03 +0100 Subject: [PATCH] [contest] Extract commonly used "get from API and read JSON" function --- bin/contest.py | 13 ++++++++++--- bin/download_submissions.py | 19 +++++-------------- bin/export.py | 14 ++++---------- bin/solve_stats.py | 21 ++++++--------------- bin/tools.py | 8 ++------ 5 files changed, 27 insertions(+), 48 deletions(-) diff --git a/bin/contest.py b/bin/contest.py index d35e9f41..9798d916 100644 --- a/bin/contest.py +++ b/bin/contest.py @@ -80,9 +80,7 @@ def get_contest_id(): def get_contests(): url = f'{get_api()}/contests' verbose(f'query {url}') - r = call_api('GET', '/contests') - r.raise_for_status() - contests = json.loads(r.text) + contests = call_api_get_json('/contests') assert isinstance(contests, list) return contests @@ -102,3 +100,12 @@ def call_api(method, endpoint, **kwargs): if not r.ok: error(r.text) return r + + +def call_api_get_json(url): + r = call_api('GET', url) + r.raise_for_status() + try: + return r.json() + except Exception as e: + print(f'\nError in decoding JSON:\n{e}\n{r.text()}') diff --git a/bin/download_submissions.py b/bin/download_submissions.py index 91d6e1cd..8738c98d 100644 --- a/bin/download_submissions.py +++ b/bin/download_submissions.py @@ -5,7 +5,7 @@ import config import parallel -from contest import call_api, get_contest_id +from contest import call_api_get_json, get_contest_id from util import ProgressBar, fatal from verdicts import Verdict, from_string @@ -14,15 +14,6 @@ # bt download_submissions [--user ] [--password ] [--contest ] [--api ] -def req(url: str): - r = call_api('GET', url) - r.raise_for_status() - try: - return r.json() - except Exception as e: - fatal(f'\nError in decoding JSON:\n{e}\n{r.text()}') - - def download_submissions(): contest_id = get_contest_id() if contest_id is None: @@ -30,7 +21,7 @@ def download_submissions(): bar = ProgressBar('Downloading metadata', count=4, max_len=len('submissions')) bar.start('submissions') - submissions = {s["id"]: s for s in req(f"/contests/{contest_id}/submissions")} + submissions = {s["id"]: s for s in call_api_get_json(f"/contests/{contest_id}/submissions")} bar.done() submission_digits = max(len(s['id']) for s in submissions.values()) @@ -45,11 +36,11 @@ def download_submissions(): # Fetch account info so we can filter for team submissions bar.start('accounts') - accounts = {a['team_id']: a for a in req(f"/contests/{contest_id}/accounts")} + accounts = {a['team_id']: a for a in call_api_get_json(f"/contests/{contest_id}/accounts")} bar.done() bar.start('judgements') - for j in req(f"/contests/{contest_id}/judgements"): + for j in call_api_get_json(f"/contests/{contest_id}/judgements"): # Note that the submissions list only contains submissions that were submitted on time, # while the judgements list contains all judgements, therefore the submission might not exist. if j["submission_id"] in submissions: @@ -80,7 +71,7 @@ def download_submission(s): Verdict.COMPILER_ERROR: 'compiler_error', }[verdict] - source_code = req(f"/contests/{contest_id}/submissions/{i}/source-code") + source_code = call_api_get_json(f"/contests/{contest_id}/submissions/{i}/source-code") if len(source_code) != 1: bar.warn( f"\nSkipping submission {i}: has {len(source_code)} source files instead of 1." diff --git a/bin/export.py b/bin/export.py index 3be1d2c3..a7ad1658 100644 --- a/bin/export.py +++ b/bin/export.py @@ -510,17 +510,12 @@ def export_contest_and_problems(problems, statement_language): r.raise_for_status() log('Uploaded contest.pdf.') - def get_problems(): - r = call_api('GET', f'/contests/{cid}/problems') - r.raise_for_status() - return yaml.load(r.text, Loader=yaml.SafeLoader) - # Query the internal DOMjudge problem IDs. - ccs_problems = get_problems() + ccs_problems = call_api_get_json(f'/contests/{cid}/problems') if not ccs_problems: export_problems(problems, cid) # Need to query the API again, because `/problems/add-data` returns a list of IDs, not the full problem objects. - ccs_problems = get_problems() + ccs_problems = call_api_get_json(f'/contests/{cid}/problems') check_if_user_has_team() @@ -537,9 +532,8 @@ def get_problem_id(problem): def check_if_user_has_team(): # Not using the /users/{uid} route, because {uid} is either numeric or a string depending on the DOMjudge config. - r = call_api('GET', f'/users') - r.raise_for_status() - if not any(user['username'] == config.args.username and user['team'] for user in r.json()): + users = call_api_get_json(f'/users') + if not any(user['username'] == config.args.username and user['team'] for user in users): warn(f'User "{config.args.username}" is not associated with a team.') warn('Therefore, the jury submissions will not be run by the judgehosts.') log('Continue export to DOMjudge? [N/y]') diff --git a/bin/solve_stats.py b/bin/solve_stats.py index 38622b8f..d6957a11 100644 --- a/bin/solve_stats.py +++ b/bin/solve_stats.py @@ -3,7 +3,7 @@ from pathlib import Path import config -from contest import call_api, get_contest_id +from contest import get_contest_id, call_api_get_json from util import ProgressBar # Note on multiprocessing: @@ -18,18 +18,9 @@ judgement_colors = {'AC': 'lime', 'WA': 'red', 'TLE': '#c0f', 'RTE': 'orange', '': 'skyblue'} -def req(url: str): - r = call_api('GET', url) - r.raise_for_status() - try: - return r.json() - except Exception as e: - print(f'\nError in decoding JSON:\n{e}\n{r.text()}') - - # Turns an endpoint list result into an object, mapped by 'id' -def req_assoc(url: str) -> dict[str, dict]: - return {o['id']: o for o in req(url)} +def get_json_assoc(url: str) -> dict[str, dict]: + return {o['id']: o for o in call_api_get_json(url)} def time_string_to_minutes(time_string: str) -> float: @@ -84,7 +75,7 @@ def generate_solve_stats(post_freeze: bool): bar = ProgressBar('Fetching', count=3, max_len=len('Contest data')) bar.start('Contest') - contest = req(url_prefix) + contest = call_api_get_json(url_prefix) bar.done() freeze_duration = time_string_to_minutes(contest['scoreboard_freeze_duration']) @@ -94,7 +85,7 @@ def generate_solve_stats(post_freeze: bool): bar.start('Contest data') with Pool(num_jobs) as p: problems, submissions, teams, languages, judgement_types = p.map( - req_assoc, + get_json_assoc, [ url_prefix + endpoint for endpoint in [ @@ -111,7 +102,7 @@ def generate_solve_stats(post_freeze: bool): judgement_types[''] = {'id': '', 'name': 'pending'} bar.start('Judgements') - for j in req(url_prefix + 'judgements'): + for j in call_api_get_json(url_prefix + 'judgements'): # Firstly, only one judgement should be 'valid': in case of rejudgings, this should be the "active" judgement. # Secondly, note that the submissions list only contains submissions that were submitted on time, # while the judgements list contains all judgements, therefore the submission might not exist. diff --git a/bin/tools.py b/bin/tools.py index 1e481956..3fc3094b 100755 --- a/bin/tools.py +++ b/bin/tools.py @@ -197,16 +197,12 @@ def get_pos(id): solves = dict() # Read set of problems - response = call_api('GET', f'/contests/{cid}/problems?public=true') - response.raise_for_status() - contest_problems = json.loads(response.text) + contest_problems = call_api_get_json(f'/contests/{cid}/problems?public=true') assert isinstance(problems, list) for p in contest_problems: solves[p['id']] = 0 - response = call_api('GET', f'/contests/{cid}/scoreboard?public=true') - response.raise_for_status() - scoreboard = json.loads(response.text) + scoreboard = call_api_get_json(f'/contests/{cid}/scoreboard?public=true') for team in scoreboard['rows']: for p in team['problems']: