Skip to content

Commit

Permalink
Expose models objects (#89)
Browse files Browse the repository at this point in the history
* Expose models objects

* Update CHANGELOG
  • Loading branch information
caique-lima authored Aug 16, 2019
1 parent fe2908b commit 0809c75
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 19 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Changelog

## [1.15.1] - 2019-08-16
- **Enhancement**
- Now learners that have a model exposes it in the logs as `object` key

## [1.15.0] - 2019-08-12
- **Enhancement**
- Make `custom_transformer` a pure function
Expand Down
2 changes: 1 addition & 1 deletion src/fklearn/resources/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.15.0
1.15.1
4 changes: 2 additions & 2 deletions src/fklearn/training/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ def p(new_df: pd.DataFrame) -> pd.DataFrame:
'prediction_column': prediction_column,
'package': "sklearn",
'package_version': sklearn.__version__,
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': clf}

return p, p(df), log

Expand Down
16 changes: 8 additions & 8 deletions src/fklearn/training/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ def p(new_df: pd.DataFrame) -> pd.DataFrame:
'package': "sklearn",
'package_version': sk_version,
'feature_importance': dict(zip(features, clf.coef_.flatten())),
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': clf}

return p, p(df), log

Expand Down Expand Up @@ -219,8 +219,8 @@ def p(new_df: pd.DataFrame, apply_shap: bool = False) -> pd.DataFrame:
'package_version': xgb.__version__,
'parameters': assoc(params, "num_estimators", num_estimators),
'feature_importance': bst.get_score(),
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': bst}

return p, p(df), log

Expand Down Expand Up @@ -357,8 +357,8 @@ def p(new_df: pd.DataFrame, apply_shap: bool = False) -> pd.DataFrame:
'package_version': catboost.__version__,
'parameters': assoc(params, "num_estimators", num_estimators),
'feature_importance': cbr.feature_importances_,
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': cbr}

return p, p(df), log

Expand Down Expand Up @@ -446,8 +446,8 @@ def p(new_df: pd.DataFrame) -> pd.DataFrame:
'parameters': assoc(params, "vocab_size", sparse_vect.shape[1]),
'package': "sklearn",
'package_version': sk_version,
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': clf}

return p, p(df), log

Expand Down
18 changes: 10 additions & 8 deletions src/fklearn/training/regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ def p(new_df: pd.DataFrame) -> pd.DataFrame:
'package': "sklearn",
'package_version': sk_version,
'feature_importance': dict(zip(features, regr.coef_.flatten())),
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': regr}

return p, p(df), log

Expand Down Expand Up @@ -190,8 +190,8 @@ def p(new_df: pd.DataFrame, apply_shap: bool = False) -> pd.DataFrame:
'package_version': xgb.__version__,
'parameters': assoc(params, "num_estimators", num_estimators),
'feature_importance': bst.get_score(),
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': bst}

return p, p(df), log

Expand Down Expand Up @@ -295,8 +295,8 @@ def p(new_df: pd.DataFrame, apply_shap: bool = False) -> pd.DataFrame:
'package_version': catboost.__version__,
'parameters': assoc(params, "num_estimators", num_estimators),
'feature_importance': cbr.feature_importances_,
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': cbr}

return p, p(df), log

Expand Down Expand Up @@ -395,8 +395,8 @@ def p(new_df: pd.DataFrame) -> pd.DataFrame:
'prediction_column': prediction_column,
'package': "sklearn",
'package_version': sk_version,
'training_samples': len(df)
}}
'training_samples': len(df)},
'object': gp}

return p, p(df), log

Expand Down Expand Up @@ -592,6 +592,8 @@ def p(new_df: pd.DataFrame) -> pd.DataFrame:

p.__doc__ = learner_pred_fn_docstring("custom_supervised_model_learner")

log["object"] = model

return p, p(df), log


Expand Down

0 comments on commit 0809c75

Please sign in to comment.