Skip to content

Commit

Permalink
Bugfix/jsonschema (#577)
Browse files Browse the repository at this point in the history
* Run the checks against the solution schema and the checks schemas as well

* Fix error on tests. Didn't run them on local :(

* Added some new tests

* some small changes for the tests in the DAGS so that they run on the PR and we see that we do not ahve any new issues with how the application solve actually works

* Fixed some testing on cornflow-client and added support for future versions of pandas

* Changed the way the checks are run

* Fix failing test?

* Missing checks on instances

* Fixed timer instance

* Small adjustments for the tests
  • Loading branch information
ggsdc authored Dec 5, 2024
1 parent a34a01d commit ed58530
Show file tree
Hide file tree
Showing 21 changed files with 333 additions and 116 deletions.
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/bar_cutting/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ def to_dict(self) -> dict:

return pickle.loads(pickle.dumps(data_p, -1))

def check(self):
return dict()

def get_bars(self) -> TupList:
"""
Returns a TupList with the ids of the bars.
Expand Down
5 changes: 4 additions & 1 deletion cornflow-dags/DAG/dag_timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ class Instance(InstanceCore):
schema = get_empty_schema()
schema_checks = get_empty_schema()

def check(self):
return dict()


class Solution(SolutionCore):
schema = get_empty_schema()
Expand All @@ -30,7 +33,7 @@ def solve(self, options):
def get_objective(self) -> float:
return 0

def check_solution(self, *args, **kwargs):
def check_solution(self):
return dict()


Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/facility_location/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ def to_dict(self) -> dict:
data_p["parameters"] = self.data["parameters"]
return pickle.loads(pickle.dumps(data_p, -1))

def check(self):
return dict()

def get_suppliers(self):
return self.data["suppliers"].keys_tl()

Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/graph_coloring/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,8 @@ class Instance(InstanceCore):
schema = load_json(os.path.join(os.path.dirname(__file__), "../schemas/input.json"))
schema_checks = get_empty_schema()

def check(self):
return dict()

def get_pairs(self):
return pt.TupList((el["n1"], el["n2"]) for el in self.data["pairs"])
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/knapsack/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,9 @@ def to_dict(self):

return data_dict

def check(self):
return dict()

def get_objects_values(self):
return {
self.data["ids"][i]: self.data["values"][i]
Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/roadef/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,9 @@ def to_dict(self):
._update(sources_tables)
)

def check(self):
return dict()

@staticmethod
def dict_to_int_or_float(data_dict):
"""
Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/solve_model_dag/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class Instance(InstanceCore):
schema = get_pulp_jsonschema()
schema_checks = get_empty_schema()

def check(self):
return dict()


class Solution(SolutionCore):
schema = get_pulp_jsonschema()
Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/tsp/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,8 @@ def to_tsplib95(self):
)
return tsp.models.StandardProblem(**dict_data)

def check(self):
return dict()

def get_arcs(self) -> TupList:
return self.data["arcs"]
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/two_dimension_bin_packing/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@ def to_dict(self):
data_p["parameters"] = self.data["parameters"]
return pickle.loads(pickle.dumps(data_p, -1))

def check(self):
return dict()

def _get_property(self, key, prop) -> SuperDict:
return self.data[key].get_property(prop)

Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/vrp/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ def to_dict(self):

return data

def check(self):
return dict()

def get_nodes(self):
return [i["n"] for i in self.data["demand"].values()]

Expand Down
10 changes: 6 additions & 4 deletions cornflow-dags/tests/test_dags.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def test_schema_load(self):
self.assertIsInstance(self.app.instance.schema, dict)
self.assertIsInstance(self.app.solution.schema, dict)
self.assertIsInstance(self.app.schema, dict)
self.assertIsInstance(self.app.instance.schema_checks, dict)

def test_config_requirements(self):
keys = {"solver", "timeLimit"}
Expand Down Expand Up @@ -85,8 +86,9 @@ def test_try_solving_testcase(self, config=None):
instance = self.app.instance.from_dict(instance_data)
solution = self.app.solution.from_dict(solution_test)
s = self.app.get_default_solver_name()
experim = self.app.get_solver(s)(instance, solution)
checks = experim.check_solution()
experiment = self.app.get_solver(s)(instance, solution)
self.assertIsInstance(experiment.schema_checks, dict)
checks = experiment.check_solution()
failed_checks = [k for k, v in checks.items() if len(v) > 0]
if len(failed_checks) > 0:
print(
Expand All @@ -97,9 +99,9 @@ def test_try_solving_testcase(self, config=None):
if len(values) > 0:
print(f"{check}: {values}")

experim.get_objective()
experiment.get_objective()

validator = Draft7Validator(experim.schema_checks)
validator = Draft7Validator(experiment.schema_checks)
if not validator.is_valid(solution_check):
raise Exception("The solution checks have invalid format")

Expand Down
23 changes: 18 additions & 5 deletions cornflow-server/cornflow/endpoints/login.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ class LoginBaseEndpoint(BaseMetaResource):
"""
Base endpoint to perform a login action from a user
"""

def __init__(self):
super().__init__()
self.ldap_class = LDAPBase
Expand Down Expand Up @@ -102,7 +103,9 @@ def auth_ldap_authenticate(self, username, password):
raise InvalidCredentials()
user = self.data_model.get_one_object(username=username)
if not user:
current_app.logger.info(f"LDAP user {username} does not exist and is created")
current_app.logger.info(
f"LDAP user {username} does not exist and is created"
)
email = ldap_obj.get_user_email(username)
if not email:
email = ""
Expand All @@ -122,10 +125,14 @@ def auth_ldap_authenticate(self, username, password):

except IntegrityError as e:
db.session.rollback()
current_app.logger.error(f"Integrity error on user role assignment on log in: {e}")
current_app.logger.error(
f"Integrity error on user role assignment on log in: {e}"
)
except DBAPIError as e:
db.session.rollback()
current_app.logger.error(f"Unknown error on user role assignment on log in: {e}")
current_app.logger.error(
f"Unknown error on user role assignment on log in: {e}"
)

return user

Expand Down Expand Up @@ -163,7 +170,9 @@ def auth_oid_authenticate(self, token):
user = self.data_model.get_one_object(username=username)

if not user:
current_app.logger.info(f"OpenID user {username} does not exist and is created")
current_app.logger.info(
f"OpenID user {username} does not exist and is created"
)

data = {"username": username, "email": username}

Expand All @@ -183,7 +192,11 @@ def auth_oid_authenticate(self, token):

def check_last_password_change(user):
if user.pwd_last_change:
if user.pwd_last_change + timedelta(days=int(current_app.config["PWD_ROTATION_TIME"])) < datetime.utcnow():
if (
user.pwd_last_change
+ timedelta(days=int(current_app.config["PWD_ROTATION_TIME"]))
< datetime.utcnow()
):
return True
return False

Expand Down
17 changes: 9 additions & 8 deletions libs/client/cornflow_client/constants.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Constants values used in schemas functions.
"""

import pulp as pl
from ortools.sat.python import cp_model

Expand Down Expand Up @@ -119,7 +120,6 @@
("timeLimit", "pyomo", "cbc"): "sec",
("pump_passes", "pyomo", "cbc"): "pumpC",
("heuristics", "pyomo", "cbc"): "heur",

("is_mip", "pulp", "cbc"): "mip",
("abs_gap", "pulp", "cbc"): "gapAbs",
("rel_gap", "pulp", "cbc"): "gapRel",
Expand All @@ -129,7 +129,6 @@
("threads", "pulp", "cbc"): "threads",
("presolve", "pulp", "cbc"): "presolve",
("msg", "pulp", "cbc"): "msg",

("abs_gap", "pyomo", "gurobi"): "MIPGapAbs",
("rel_gap", "pyomo", "gurobi"): "MIPGap",
("time_limit", "pyomo", "gurobi"): "TimeLimit",
Expand All @@ -145,7 +144,6 @@
("pump_passes", "pyomo", "gurobi"): "PumpPasses",
("heuristics", "pyomo", "gurobi"): "Heuristics",
("threads", "pyomo", "gurobi"): "threads",

("is_mip", "pulp", "gurobi"): "mip",
("abs_gap", "pulp", "gurobi"): "gapAbs",
("rel_gap", "pulp", "gurobi"): "gapRel",
Expand All @@ -155,7 +153,6 @@
("feasibility_tol", "pulp", "gurobi"): "FeasibilityTol",
("iteration_limit", "pulp", "gurobi"): "IterationLimit",
("msg", "pulp", "gurobi"): "msg",

("abs_gap", "pyomo", "scip"): "limits/absgap",
("rel_gap", "pyomo", "scip"): "limits/gap",
("time_limit", "pyomo", "scip"): "limits/time",
Expand All @@ -171,7 +168,6 @@
("nlp_tol", "pyomo", "scip"): "heuristics/subnlp/opttol",
("cutoff", "pyomo", "scip"): "heuristics/subnlp/setcutoff",
("nlp_iteration_limit", "pyomo", "scip"): "heuristics/subnlp/itermin",

("is_mip", "pulp", "scip"): "mip",
("abs_gap", "pulp", "scip"): "gapAbs",
("rel_gap", "pulp", "scip"): "gapRel",
Expand All @@ -186,7 +182,6 @@
("solution_limit", "pulp", "scip"): "limits/maxsol",
("nlp_iteration_limit", "pulp", "scip"): "heuristics/subnlp/itermin",
("msg", "pulp", "scip"): "msg",

("rel_gap", "pyomo", "highs"): "mip_rel_gap",
("abs_gap", "pyomo", "highs"): "mip_abs_gap",
("time_limit", "pyomo", "highs"): "time_limit",
Expand All @@ -195,7 +190,6 @@
("parallel", "pyomo", "highs"): "parallel",
("crossover", "pyomo", "highs"): "run_crossover",
("heuristics", "pyomo", "highs"): "mip_heuristic_effort",

("is_mip", "pulp", "highs"): "mip",
("abs_gap", "pulp", "highs"): "gapAbs",
("rel_gap", "pulp", "highs"): "gapRel",
Expand All @@ -213,7 +207,6 @@
"cbc": "cbc",
"scip": "scip",
"highs": "highs",

# PULP
"PULP_CBC_CMD": "cbc",
"GUROBI_CMD": "gurobi",
Expand Down Expand Up @@ -262,3 +255,11 @@ class BadInstance(Exception):

class BadSolution(Exception):
pass


class BadInstanceChecks(Exception):
pass


class BadSolutionChecks(Exception):
pass
18 changes: 12 additions & 6 deletions libs/client/cornflow_client/core/application.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Base code for the application core.
"""

# Partial imports
from abc import ABC, abstractmethod
from timeit import default_timer as timer
Expand Down Expand Up @@ -179,7 +180,7 @@ def solve(
f"The solution does not match the schema:\n{sol_errors}"
)

instance_checks = SuperDict(inst.check())
instance_checks = SuperDict(inst.data_checks())

warnings_tables = (
SuperDict.from_dict(inst.schema_checks)["properties"]
Expand Down Expand Up @@ -233,17 +234,22 @@ def solve(
log_json["sol_code"] = SOLUTION_STATUS_FEASIBLE

if log_json["sol_code"] > 0:
sol_errors = algo.solution.check_schema()
if sol_errors:
raise BadSolution(
f"The solution does not match the schema:\n{sol_errors}"
)
sol = algo.solution.to_dict()

if sol != {} and sol is not None:
checks = algo.check_solution()
checks = algo.data_checks()
else:
checks = None

return sol, checks, instance_checks, log_txt, log_json

def check(
self, instance_data: dict, solution_data: dict, *args, **kwargs
self, instance_data: dict, solution_data: dict = None
) -> Tuple[Dict, Dict, Dict]:
"""
Checks the instance and solution data
Expand All @@ -257,13 +263,13 @@ def check(
raise NoSolverException(f"No solver is available")
inst = self.instance.from_dict(instance_data)

instance_checks = inst.check(*args, **kwargs)
instance_checks = inst.data_checks()

if solution_data is not None:
sol = self.solution.from_dict(solution_data)
algo = solver_class(inst, sol)
start = timer()
solution_checks = algo.check_solution(*args, **kwargs)
solution_checks = algo.data_checks()
else:
start = timer()
solution_checks = None
Expand Down
Loading

0 comments on commit ed58530

Please sign in to comment.