Skip to content

Commit

Permalink
Update versions of sacred and tf (#54)
Browse files Browse the repository at this point in the history
* Update versions of sacred and tf

* Update readme

* Update requirements

* Use tf beta version

* Set tf seed manually

* Try to set tf seed once

* Check less precise
  • Loading branch information
JarnoRFB authored Oct 29, 2019
1 parent ce4c99a commit d16c9f3
Show file tree
Hide file tree
Showing 9 changed files with 31 additions and 52 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ RUN apt-get update \
&& apt-get -y install --no-install-recommends apt-utils 2>&1

# Install git, process tools, lsb-release (common in install instructions for CLIs)
RUN apt-get -y install git procps lsb-release ffmpeg vim
RUN apt-get -y install git procps lsb-release ffmpeg vim exuberant-ctags

# Install any missing dependencies for enhanced language service
RUN apt-get install -y libicu[0-9][0-9]
Expand Down
4 changes: 0 additions & 4 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,9 @@ text that contains the interpretation.

Installation
------------
To use incense you need the newest development version of sacred, so that
content-types of artifacts are automatically detected. Therefore, you first
have to install sacred from github and then install incense from PyPI.

::

pip install git+https://github.com/IDSIA/sacred.git
pip install incense

Documentation
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
pytest
pytest-cov
codecov
tensorflow==1.13.1
tensorflow==2.0.0b1
python-dotenv
scikit-learn
jupyterlab
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
git+https://github.com/IDSIA/sacred.git
sacred>=0.8
pandas>=0.23
pymongo>=3.7
easydict>=1.9
Expand Down
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@
packages=["incense"],
python_requires=">=3.5",
install_requires=[
"sacred",
"jsonpickle",
"sacred>=0.8",
"jsonpickle>=0.7.2",
"matplotlib>=3",
"pandas>=0.23",
"jupyterlab>=0.35",
"jupyterlab>=1.0",
"pymongo>=3.7",
"pyrsistent>=0.15.2",
"future-fstrings==1.0.0",
Expand Down
11 changes: 6 additions & 5 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import os

import pytest
from incense import ExperimentLoader
from sacred import Experiment as SacredExperiment
from sacred.observers import MongoObserver

from incense import ExperimentLoader


def get_mongo_uri():
in_devcontainer = os.environ.get("TERM_PROGRAM") == "vscode"
Expand All @@ -31,7 +32,7 @@ def loader():

@pytest.fixture
def delete_mongo_observer():
observer = MongoObserver.create(url=MONGO_URI, db_name=DELETE_DB_NAME)
observer = MongoObserver(url=MONGO_URI, db_name=DELETE_DB_NAME)
return observer


Expand All @@ -43,7 +44,7 @@ def delete_db_loader():

@pytest.fixture
def recent_mongo_observer():
observer = MongoObserver.create(url=MONGO_URI, db_name=RECENT_DB_NAME)
observer = MongoObserver(url=MONGO_URI, db_name=RECENT_DB_NAME)
return observer


Expand All @@ -55,7 +56,7 @@ def recent_db_loader():

@pytest.fixture
def heterogenous_mongo_observer():
observer = MongoObserver.create(url=MONGO_URI, db_name=HETEROGENOUS_DB_NAME)
observer = MongoObserver(url=MONGO_URI, db_name=HETEROGENOUS_DB_NAME)
return observer


Expand All @@ -67,7 +68,7 @@ def heterogenous_db_loader():

@pytest.fixture
def info_mongo_observer():
observer = MongoObserver.create(url=MONGO_URI, db_name=INFO_DB_NAME)
observer = MongoObserver(url=MONGO_URI, db_name=INFO_DB_NAME)
return observer


Expand Down
47 changes: 14 additions & 33 deletions tests/example_experiment/conduct.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from sacred.observers import MongoObserver
from sacred.utils import apply_backspaces_and_linefeeds
from sklearn.metrics import confusion_matrix
from tensorflow.python.keras.callbacks import Callback


def get_mongo_uri():
Expand All @@ -22,7 +21,7 @@ def get_mongo_uri():
return None


class MetricsLogger(Callback):
class MetricsLogger(tf.keras.callbacks.Callback):
"""Callback to log loss and accuracy to sacred database."""

def __init__(self, run):
Expand All @@ -31,30 +30,10 @@ def __init__(self, run):

def on_epoch_end(self, epoch, logs):
self._run.log_scalar("training_loss", float(logs["loss"]), step=epoch)
self._run.log_scalar("training_acc", float(logs["acc"]), step=epoch)
self._run.log_scalar("training_accuracy", float(logs["accuracy"]), step=epoch)


def plot_confusion_matrix(confusion_matrix, class_names, figsize=(15, 12)):
"""Prints a confusion matrix, as returned by sklearn.metrics.confusion_matrix, as a heatmap.
Based on https://gist.github.com/shaypal5/94c53d765083101efc0240d776a23823
Arguments
---------
confusion_matrix: numpy.ndarray
The numpy.ndarray object returned from a call to sklearn.metrics.confusion_matrix.
Similarly constructed ndarrays can also be used.
class_names: list
An ordered list of class names, in the order they index the given confusion matrix.
figsize: tuple
A 2-long tuple, the first value determining the horizontal size of the ouputted figure,
the second determining the vertical size. Defaults to (10,7).
Returns
-------
matplotlib.figure.Figure
The resulting confusion matrix figure
"""
df_cm = pd.DataFrame(confusion_matrix, index=class_names, columns=class_names)
fig, ax = plt.subplots(figsize=figsize)
heatmap = sns.heatmap(df_cm, annot=False, cmap="Blues")
Expand All @@ -68,13 +47,13 @@ def plot_accuracy_development(history, _run):
writer = FFMpegWriter(fps=1)
filename = "accuracy_movie.mp4"
with writer.saving(fig, filename, 600):
acc = history.history["acc"]
x = list(range(1, len(acc) + 1))
y = acc
ax.set(xlim=[0.9, len(acc) + 0.1], ylim=[0, 1], xlabel="epoch", ylabel="accuracy")
accuracy = history.history["accuracy"]
x = list(range(1, len(accuracy) + 1))
y = accuracy
ax.set(xlim=[0.9, len(accuracy) + 0.1], ylim=[0, 1], xlabel="epoch", ylabel="accuracy")
[acc_line] = ax.plot(x, y, "o-")

for i in range(1, len(acc) + 1):
for i in range(1, len(accuracy) + 1):
acc_line.set_data(x[:i], y[:i])

writer.grab_frame()
Expand All @@ -85,17 +64,17 @@ def plot_accuracy_development(history, _run):
def write_csv_as_text(history, _run):
filename = "history.txt"
with open(filename, "w") as handle:
handle.write("acc, loss\n")
for acc, loss in zip(history.history["acc"], history.history["loss"]):
handle.write(f"{acc}, {loss}\n")
handle.write("accuracy, loss\n")
for accuracy, loss in zip(history.history["accuracy"], history.history["loss"]):
handle.write(f"{accuracy}, {loss}\n")

_run.add_artifact(filename=filename, name="history")


ex = Experiment("example")
ex.captured_out_filter = apply_backspaces_and_linefeeds

ex.observers.append(MongoObserver.create(url=get_mongo_uri(), db_name="incense_test"))
ex.observers.append(MongoObserver(url=get_mongo_uri(), db_name="incense_test"))


@ex.config
Expand All @@ -119,6 +98,7 @@ def make_model():

@ex.command
def conduct(epochs, optimizer, _run):

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
x_train = x_train / 255.0
x_test = x_test / 255.0
Expand Down Expand Up @@ -164,10 +144,11 @@ def conduct(epochs, optimizer, _run):
for metric, value in results.items():
_run.log_scalar(f"test_{metric}", value)

return results["acc"]
return results["accuracy"]


if __name__ == "__main__":
tf.random.set_seed(42)
ex.run("conduct")
ex.run("conduct", config_updates={"epochs": 3})
ex.run("conduct", config_updates={"optimizer": "adam"})
2 changes: 1 addition & 1 deletion tests/test_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def test_repr(loader):

def test_metrics(loader):
exp = loader.find_by_id(3)
metric_names = ["training_loss", "training_acc", "test_loss", "test_acc"]
metric_names = ["training_loss", "training_accuracy", "test_loss", "test_accuracy"]
for metric_name in metric_names:
assert metric_name in exp.metrics.keys()
assert isinstance(exp.metrics[metric_name], pd.Series)
Expand Down
7 changes: 4 additions & 3 deletions tests/test_projector.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def test_projection_without_renaming(loader):
}
expected_projected = pd.DataFrame(expected_projected_data).set_index("exp_id")
projected = exps.project(on=["config.epochs", "config.optimizer"], rename=None)

assert_frame_equal(projected.sort_index(axis="columns"), expected_projected.sort_index(axis="columns"))


Expand All @@ -32,11 +31,13 @@ def test_projection_with_aggregation(loader):
expected_projected_data = {
"exp_id": [1, 2, 3],
"epochs": [1, 3, 1],
"training_loss_mean": [0.6378391059716543, 0.425261557425393, 0.2187067011743784],
"training_loss_mean": [0.6404899107933044, 0.42554907141460313, 0.22108366647909086],
}
expected_projected = pd.DataFrame(expected_projected_data).set_index("exp_id")
projected = exps.project(on=["config.epochs", {"metrics.training_loss": np.mean}])
assert_frame_equal(projected.sort_index(axis="columns"), expected_projected.sort_index(axis="columns"))
assert_frame_equal(
projected.sort_index(axis="columns"), expected_projected.sort_index(axis="columns"), check_less_precise=True
)


def test_projection_with_heterogenous_formats(heterogenous_db_loader, heterogenous_mongo_observer, add_exp_to_db):
Expand Down

0 comments on commit d16c9f3

Please sign in to comment.