Skip to content

Commit

Permalink
[contest] Extract commonly used "get from API and read JSON" function
Browse files Browse the repository at this point in the history
  • Loading branch information
mpsijm committed Oct 30, 2024
1 parent a0fcbb8 commit 9e8882d
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 48 deletions.
13 changes: 10 additions & 3 deletions bin/contest.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,7 @@ def get_contest_id():
def get_contests():
url = f'{get_api()}/contests'
verbose(f'query {url}')
r = call_api('GET', '/contests')
r.raise_for_status()
contests = json.loads(r.text)
contests = call_api_get_json('/contests')
assert isinstance(contests, list)
return contests

Expand All @@ -102,3 +100,12 @@ def call_api(method, endpoint, **kwargs):
if not r.ok:
error(r.text)
return r


def call_api_get_json(url):
r = call_api('GET', url)
r.raise_for_status()
try:
return r.json()
except Exception as e:
print(f'\nError in decoding JSON:\n{e}\n{r.text()}')
19 changes: 5 additions & 14 deletions bin/download_submissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import config
import parallel
from contest import call_api, get_contest_id
from contest import call_api_get_json, get_contest_id
from util import ProgressBar, fatal
from verdicts import Verdict, from_string

Expand All @@ -14,23 +14,14 @@
# bt download_submissions [--user <username>] [--password <password>] [--contest <contest_id>] [--api <domjudge_url>]


def req(url: str):
r = call_api('GET', url)
r.raise_for_status()
try:
return r.json()
except Exception as e:
fatal(f'\nError in decoding JSON:\n{e}\n{r.text()}')


def download_submissions():
contest_id = get_contest_id()
if contest_id is None:
fatal("No contest ID found. Set in contest.yaml or pass --contest-id <cid>.")

bar = ProgressBar('Downloading metadata', count=4, max_len=len('submissions'))
bar.start('submissions')
submissions = {s["id"]: s for s in req(f"/contests/{contest_id}/submissions")}
submissions = {s["id"]: s for s in call_api_get_json(f"/contests/{contest_id}/submissions")}
bar.done()

submission_digits = max(len(s['id']) for s in submissions.values())
Expand All @@ -45,11 +36,11 @@ def download_submissions():

# Fetch account info so we can filter for team submissions
bar.start('accounts')
accounts = {a['team_id']: a for a in req(f"/contests/{contest_id}/accounts")}
accounts = {a['team_id']: a for a in call_api_get_json(f"/contests/{contest_id}/accounts")}
bar.done()

bar.start('judgements')
for j in req(f"/contests/{contest_id}/judgements"):
for j in call_api_get_json(f"/contests/{contest_id}/judgements"):
# Note that the submissions list only contains submissions that were submitted on time,
# while the judgements list contains all judgements, therefore the submission might not exist.
if j["submission_id"] in submissions:
Expand Down Expand Up @@ -80,7 +71,7 @@ def download_submission(s):
Verdict.COMPILER_ERROR: 'compiler_error',
}[verdict]

source_code = req(f"/contests/{contest_id}/submissions/{i}/source-code")
source_code = call_api_get_json(f"/contests/{contest_id}/submissions/{i}/source-code")
if len(source_code) != 1:
bar.warn(
f"\nSkipping submission {i}: has {len(source_code)} source files instead of 1."
Expand Down
14 changes: 4 additions & 10 deletions bin/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,17 +510,12 @@ def export_contest_and_problems(problems, statement_language):
r.raise_for_status()
log('Uploaded contest.pdf.')

def get_problems():
r = call_api('GET', f'/contests/{cid}/problems')
r.raise_for_status()
return yaml.load(r.text, Loader=yaml.SafeLoader)

# Query the internal DOMjudge problem IDs.
ccs_problems = get_problems()
ccs_problems = call_api_get_json(f'/contests/{cid}/problems')
if not ccs_problems:
export_problems(problems, cid)
# Need to query the API again, because `/problems/add-data` returns a list of IDs, not the full problem objects.
ccs_problems = get_problems()
ccs_problems = call_api_get_json(f'/contests/{cid}/problems')

check_if_user_has_team()

Expand All @@ -537,9 +532,8 @@ def get_problem_id(problem):

def check_if_user_has_team():
# Not using the /users/{uid} route, because {uid} is either numeric or a string depending on the DOMjudge config.
r = call_api('GET', f'/users')
r.raise_for_status()
if not any(user['username'] == config.args.username and user['team'] for user in r.json()):
users = call_api_get_json(f'/users')
if not any(user['username'] == config.args.username and user['team'] for user in users):
warn(f'User "{config.args.username}" is not associated with a team.')
warn('Therefore, the jury submissions will not be run by the judgehosts.')
log('Continue export to DOMjudge? [N/y]')
Expand Down
21 changes: 6 additions & 15 deletions bin/solve_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from pathlib import Path

import config
from contest import call_api, get_contest_id
from contest import get_contest_id, call_api_get_json
from util import ProgressBar

# Note on multiprocessing:
Expand All @@ -18,18 +18,9 @@
judgement_colors = {'AC': 'lime', 'WA': 'red', 'TLE': '#c0f', 'RTE': 'orange', '': 'skyblue'}


def req(url: str):
r = call_api('GET', url)
r.raise_for_status()
try:
return r.json()
except Exception as e:
print(f'\nError in decoding JSON:\n{e}\n{r.text()}')


# Turns an endpoint list result into an object, mapped by 'id'
def req_assoc(url: str) -> dict[str, dict]:
return {o['id']: o for o in req(url)}
def get_json_assoc(url: str) -> dict[str, dict]:
return {o['id']: o for o in call_api_get_json(url)}


def time_string_to_minutes(time_string: str) -> float:
Expand Down Expand Up @@ -84,7 +75,7 @@ def generate_solve_stats(post_freeze: bool):
bar = ProgressBar('Fetching', count=3, max_len=len('Contest data'))

bar.start('Contest')
contest = req(url_prefix)
contest = call_api_get_json(url_prefix)
bar.done()

freeze_duration = time_string_to_minutes(contest['scoreboard_freeze_duration'])
Expand All @@ -94,7 +85,7 @@ def generate_solve_stats(post_freeze: bool):
bar.start('Contest data')
with Pool(num_jobs) as p:
problems, submissions, teams, languages, judgement_types = p.map(
req_assoc,
get_json_assoc,
[
url_prefix + endpoint
for endpoint in [
Expand All @@ -111,7 +102,7 @@ def generate_solve_stats(post_freeze: bool):
judgement_types[''] = {'id': '', 'name': 'pending'}

bar.start('Judgements')
for j in req(url_prefix + 'judgements'):
for j in call_api_get_json(url_prefix + 'judgements'):
# Firstly, only one judgement should be 'valid': in case of rejudgings, this should be the "active" judgement.
# Secondly, note that the submissions list only contains submissions that were submitted on time,
# while the judgements list contains all judgements, therefore the submission might not exist.
Expand Down
8 changes: 2 additions & 6 deletions bin/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,16 +197,12 @@ def get_pos(id):
solves = dict()

# Read set of problems
response = call_api('GET', f'/contests/{cid}/problems?public=true')
response.raise_for_status()
contest_problems = json.loads(response.text)
contest_problems = call_api_get_json(f'/contests/{cid}/problems?public=true')
assert isinstance(problems, list)
for p in contest_problems:
solves[p['id']] = 0

response = call_api('GET', f'/contests/{cid}/scoreboard?public=true')
response.raise_for_status()
scoreboard = json.loads(response.text)
scoreboard = call_api_get_json(f'/contests/{cid}/scoreboard?public=true')

for team in scoreboard['rows']:
for p in team['problems']:
Expand Down

0 comments on commit 9e8882d

Please sign in to comment.