Skip to content

Commit

Permalink
Merge pull request #566 from baobabsoluciones/release/v1.1.3
Browse files Browse the repository at this point in the history
Release/v1.1.3
  • Loading branch information
ggsdc authored Dec 5, 2024
2 parents 4da6007 + 4b05040 commit 5a5cd78
Show file tree
Hide file tree
Showing 26 changed files with 362 additions and 120 deletions.
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/bar_cutting/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ def to_dict(self) -> dict:

return pickle.loads(pickle.dumps(data_p, -1))

def check(self):
return dict()

def get_bars(self) -> TupList:
"""
Returns a TupList with the ids of the bars.
Expand Down
5 changes: 4 additions & 1 deletion cornflow-dags/DAG/dag_timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ class Instance(InstanceCore):
schema = get_empty_schema()
schema_checks = get_empty_schema()

def check(self):
return dict()


class Solution(SolutionCore):
schema = get_empty_schema()
Expand All @@ -30,7 +33,7 @@ def solve(self, options):
def get_objective(self) -> float:
return 0

def check_solution(self, *args, **kwargs):
def check_solution(self):
return dict()


Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/facility_location/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ def to_dict(self) -> dict:
data_p["parameters"] = self.data["parameters"]
return pickle.loads(pickle.dumps(data_p, -1))

def check(self):
return dict()

def get_suppliers(self):
return self.data["suppliers"].keys_tl()

Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/graph_coloring/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,8 @@ class Instance(InstanceCore):
schema = load_json(os.path.join(os.path.dirname(__file__), "../schemas/input.json"))
schema_checks = get_empty_schema()

def check(self):
return dict()

def get_pairs(self):
return pt.TupList((el["n1"], el["n2"]) for el in self.data["pairs"])
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/knapsack/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,9 @@ def to_dict(self):

return data_dict

def check(self):
return dict()

def get_objects_values(self):
return {
self.data["ids"][i]: self.data["values"][i]
Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/roadef/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,9 @@ def to_dict(self):
._update(sources_tables)
)

def check(self):
return dict()

@staticmethod
def dict_to_int_or_float(data_dict):
"""
Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/solve_model_dag/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class Instance(InstanceCore):
schema = get_pulp_jsonschema()
schema_checks = get_empty_schema()

def check(self):
return dict()


class Solution(SolutionCore):
schema = get_pulp_jsonschema()
Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/tsp/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,5 +59,8 @@ def to_tsplib95(self):
)
return tsp.models.StandardProblem(**dict_data)

def check(self):
return dict()

def get_arcs(self) -> TupList:
return self.data["arcs"]
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/two_dimension_bin_packing/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@ def to_dict(self):
data_p["parameters"] = self.data["parameters"]
return pickle.loads(pickle.dumps(data_p, -1))

def check(self):
return dict()

def _get_property(self, key, prop) -> SuperDict:
return self.data[key].get_property(prop)

Expand Down
3 changes: 3 additions & 0 deletions cornflow-dags/DAG/vrp/core/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ def to_dict(self):

return data

def check(self):
return dict()

def get_nodes(self):
return [i["n"] for i in self.data["demand"].values()]

Expand Down
10 changes: 6 additions & 4 deletions cornflow-dags/tests/test_dags.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def test_schema_load(self):
self.assertIsInstance(self.app.instance.schema, dict)
self.assertIsInstance(self.app.solution.schema, dict)
self.assertIsInstance(self.app.schema, dict)
self.assertIsInstance(self.app.instance.schema_checks, dict)

def test_config_requirements(self):
keys = {"solver", "timeLimit"}
Expand Down Expand Up @@ -85,8 +86,9 @@ def test_try_solving_testcase(self, config=None):
instance = self.app.instance.from_dict(instance_data)
solution = self.app.solution.from_dict(solution_test)
s = self.app.get_default_solver_name()
experim = self.app.get_solver(s)(instance, solution)
checks = experim.check_solution()
experiment = self.app.get_solver(s)(instance, solution)
self.assertIsInstance(experiment.schema_checks, dict)
checks = experiment.check_solution()
failed_checks = [k for k, v in checks.items() if len(v) > 0]
if len(failed_checks) > 0:
print(
Expand All @@ -97,9 +99,9 @@ def test_try_solving_testcase(self, config=None):
if len(values) > 0:
print(f"{check}: {values}")

experim.get_objective()
experiment.get_objective()

validator = Draft7Validator(experim.schema_checks)
validator = Draft7Validator(experiment.schema_checks)
if not validator.is_valid(solution_check):
raise Exception("The solution checks have invalid format")

Expand Down
12 changes: 11 additions & 1 deletion cornflow-server/changelog.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
version 1.1.3
--------------

- released: 2024-12-05
- description: small changes
- changelog:
- changed the json schema validation on airflow so that solution, instance checks and solution checks are correctly reviewed.
- added some small changes to make sure that future compatibility with new version of libraries is ready.
- added a reconnect from airflow to cornflow to make sure that if the model fails we can get back as much as possible.

version 1.1.2
--------------

Expand All @@ -9,7 +19,7 @@ version 1.1.2
version 1.1.1
--------------

- released: 2024-08-29
- released: 2024-09-18
- description: small security fixes
- changelog:
- bump PuLP to version 2.9.0
Expand Down
23 changes: 18 additions & 5 deletions cornflow-server/cornflow/endpoints/login.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ class LoginBaseEndpoint(BaseMetaResource):
"""
Base endpoint to perform a login action from a user
"""

def __init__(self):
super().__init__()
self.ldap_class = LDAPBase
Expand Down Expand Up @@ -102,7 +103,9 @@ def auth_ldap_authenticate(self, username, password):
raise InvalidCredentials()
user = self.data_model.get_one_object(username=username)
if not user:
current_app.logger.info(f"LDAP user {username} does not exist and is created")
current_app.logger.info(
f"LDAP user {username} does not exist and is created"
)
email = ldap_obj.get_user_email(username)
if not email:
email = ""
Expand All @@ -122,10 +125,14 @@ def auth_ldap_authenticate(self, username, password):

except IntegrityError as e:
db.session.rollback()
current_app.logger.error(f"Integrity error on user role assignment on log in: {e}")
current_app.logger.error(
f"Integrity error on user role assignment on log in: {e}"
)
except DBAPIError as e:
db.session.rollback()
current_app.logger.error(f"Unknown error on user role assignment on log in: {e}")
current_app.logger.error(
f"Unknown error on user role assignment on log in: {e}"
)

return user

Expand Down Expand Up @@ -163,7 +170,9 @@ def auth_oid_authenticate(self, token):
user = self.data_model.get_one_object(username=username)

if not user:
current_app.logger.info(f"OpenID user {username} does not exist and is created")
current_app.logger.info(
f"OpenID user {username} does not exist and is created"
)

data = {"username": username, "email": username}

Expand All @@ -183,7 +192,11 @@ def auth_oid_authenticate(self, token):

def check_last_password_change(user):
if user.pwd_last_change:
if user.pwd_last_change + timedelta(days=int(current_app.config["PWD_ROTATION_TIME"])) < datetime.utcnow():
if (
user.pwd_last_change
+ timedelta(days=int(current_app.config["PWD_ROTATION_TIME"]))
< datetime.utcnow()
):
return True
return False

Expand Down
2 changes: 1 addition & 1 deletion cornflow-server/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

setuptools.setup(
name="cornflow",
version="1.1.2",
version="1.1.3",
author="baobab soluciones",
author_email="[email protected]",
description="Cornflow is an open source multi-solver optimization server with a REST API built using flask.",
Expand Down
13 changes: 12 additions & 1 deletion libs/client/changelog.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,18 @@
version 1.1.3
------------

- released: 2024-12-05
- description: changes to json schemas validation on airflow.
- changelog:
- changed the json schema validation on airflow so that solution, instance checks and solution checks are correctly reviewed.
- added some small changes to make sure that future compatibility with new version of libraries is ready.
- added a reconnect from airflow to cornflow to make sure that if the model fails we can get back as much as possible.


version 1.1.1
--------------

- released: 2024-08-29
- released: 2024-09-18
- description: small security fixes
- changelog:
- bump PuLP to version 2.9.0
Expand Down
4 changes: 4 additions & 0 deletions libs/client/cornflow_client/airflow/dag_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,13 +226,17 @@ def cf_solve(fun, dag_name, secrets, **kwargs):
except NoSolverException as e:
if config.get("msg", True):
print("No solver found !")
# We reconnect in case the solver has been more than 24 hours solving before the error is raised
client = connect_to_cornflow(secrets)
try_to_save_error(client, exec_id, -1)
client.update_status(exec_id, {"status": -1})
try_to_save_airflow_log(client, exec_id, ti, base_log_folder)
raise AirflowDagException(e)
except Exception as e:
if config.get("msg", True):
print("Some unknown error happened")
# We reconnect in case the solver has been more than 24 hours solving before the error is raised
client = connect_to_cornflow(secrets)
try_to_save_error(client, exec_id, -1)
client.update_status(exec_id, {"status": -1})
try_to_save_airflow_log(client, exec_id, ti, base_log_folder)
Expand Down
17 changes: 9 additions & 8 deletions libs/client/cornflow_client/constants.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Constants values used in schemas functions.
"""

import pulp as pl
from ortools.sat.python import cp_model

Expand Down Expand Up @@ -119,7 +120,6 @@
("timeLimit", "pyomo", "cbc"): "sec",
("pump_passes", "pyomo", "cbc"): "pumpC",
("heuristics", "pyomo", "cbc"): "heur",

("is_mip", "pulp", "cbc"): "mip",
("abs_gap", "pulp", "cbc"): "gapAbs",
("rel_gap", "pulp", "cbc"): "gapRel",
Expand All @@ -129,7 +129,6 @@
("threads", "pulp", "cbc"): "threads",
("presolve", "pulp", "cbc"): "presolve",
("msg", "pulp", "cbc"): "msg",

("abs_gap", "pyomo", "gurobi"): "MIPGapAbs",
("rel_gap", "pyomo", "gurobi"): "MIPGap",
("time_limit", "pyomo", "gurobi"): "TimeLimit",
Expand All @@ -145,7 +144,6 @@
("pump_passes", "pyomo", "gurobi"): "PumpPasses",
("heuristics", "pyomo", "gurobi"): "Heuristics",
("threads", "pyomo", "gurobi"): "threads",

("is_mip", "pulp", "gurobi"): "mip",
("abs_gap", "pulp", "gurobi"): "gapAbs",
("rel_gap", "pulp", "gurobi"): "gapRel",
Expand All @@ -155,7 +153,6 @@
("feasibility_tol", "pulp", "gurobi"): "FeasibilityTol",
("iteration_limit", "pulp", "gurobi"): "IterationLimit",
("msg", "pulp", "gurobi"): "msg",

("abs_gap", "pyomo", "scip"): "limits/absgap",
("rel_gap", "pyomo", "scip"): "limits/gap",
("time_limit", "pyomo", "scip"): "limits/time",
Expand All @@ -171,7 +168,6 @@
("nlp_tol", "pyomo", "scip"): "heuristics/subnlp/opttol",
("cutoff", "pyomo", "scip"): "heuristics/subnlp/setcutoff",
("nlp_iteration_limit", "pyomo", "scip"): "heuristics/subnlp/itermin",

("is_mip", "pulp", "scip"): "mip",
("abs_gap", "pulp", "scip"): "gapAbs",
("rel_gap", "pulp", "scip"): "gapRel",
Expand All @@ -186,7 +182,6 @@
("solution_limit", "pulp", "scip"): "limits/maxsol",
("nlp_iteration_limit", "pulp", "scip"): "heuristics/subnlp/itermin",
("msg", "pulp", "scip"): "msg",

("rel_gap", "pyomo", "highs"): "mip_rel_gap",
("abs_gap", "pyomo", "highs"): "mip_abs_gap",
("time_limit", "pyomo", "highs"): "time_limit",
Expand All @@ -195,7 +190,6 @@
("parallel", "pyomo", "highs"): "parallel",
("crossover", "pyomo", "highs"): "run_crossover",
("heuristics", "pyomo", "highs"): "mip_heuristic_effort",

("is_mip", "pulp", "highs"): "mip",
("abs_gap", "pulp", "highs"): "gapAbs",
("rel_gap", "pulp", "highs"): "gapRel",
Expand All @@ -213,7 +207,6 @@
"cbc": "cbc",
"scip": "scip",
"highs": "highs",

# PULP
"PULP_CBC_CMD": "cbc",
"GUROBI_CMD": "gurobi",
Expand Down Expand Up @@ -262,3 +255,11 @@ class BadInstance(Exception):

class BadSolution(Exception):
pass


class BadInstanceChecks(Exception):
pass


class BadSolutionChecks(Exception):
pass
Loading

0 comments on commit 5a5cd78

Please sign in to comment.