Skip to content

Commit

Permalink
Merge pull request #53 from duembgen/simulations
Browse files Browse the repository at this point in the history
Fix few errors due to the pervious merge and reformat with yapf version 0.28.0.
  • Loading branch information
micha7a authored Oct 25, 2019
2 parents b12eea4 + 580a2b8 commit 21544e2
Show file tree
Hide file tree
Showing 10 changed files with 93 additions and 91 deletions.
10 changes: 6 additions & 4 deletions EDMtools.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#!/usr/bin/env python
# module EDMtools

# TODO this module is currently not used. When we actually use these functions, we should
# take them from pylocus instead of using them here.
# TODO this module is currently not used. When we actually use these functions, we should
# take them from pylocus instead of using them here.

import numpy as np

Expand All @@ -28,12 +28,13 @@ def classicalMDS(EDM_noisy, d):
def procrustes(A, X):
return algorithms.procrustes(A.T, X.T, scale=False)


def procrustes_adam(A, X):
# TODO: check if this implementation is more elegant than the one in pylocus.
# columns of A are the anchors
# columns of X are the points to be aligned to the anchors
d = A.shape[0] # dimension
L = A.shape[1] # number anchors
d = A.shape[0] # dimension
L = A.shape[1] # number anchors
assert L >= d, 'Procrustes needs at least d anchors.'

a_bar = np.reshape(np.mean(A, axis=1), (2, 1))
Expand All @@ -46,5 +47,6 @@ def procrustes_adam(A, X):
oneL = np.ones(L)
return R @ X_bar + a_bar


if __name__ == "__main__":
print('nothing happens when running this module.')
3 changes: 2 additions & 1 deletion GenerateAllFigures.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
"source": [
"from trajectory_creator import get_trajectory\n",
"from plotting_tools import add_scalebar, remove_ticks\n",
"import copy\n",
"\n",
"np.random.seed(0)\n",
"\n",
Expand Down Expand Up @@ -338,7 +339,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.7.3"
}
},
"nbformat": 4,
Expand Down
14 changes: 5 additions & 9 deletions baseline_solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,17 @@ def reconstructD_topright(coeffs, basis, anchors):
""" Construct D_topright from coeffs (coefficients), basis vectors and anchors."""
N = basis.shape[1]
M = anchors.shape[1]
return np.outer(np.ones(N), np.diag(
anchors.T @ anchors)) - 2 * basis.T @ coeffs.T @ anchors + np.outer(
np.diag(basis.T @ coeffs.T @ coeffs @ basis), np.ones(M))
return np.outer(np.ones(N), np.diag(anchors.T @ anchors)) - 2 * basis.T @ coeffs.T @ anchors + np.outer(
np.diag(basis.T @ coeffs.T @ coeffs @ basis), np.ones(M))


def customMDS(D_topright, basis, anchors):
""" Custom MDS for matrix of shape M, N. """
[d, M] = anchors.shape
N = basis.shape[1]
JM = np.eye(M) - np.ones([M, M]) / M
tmp = lowRankApproximation(
JM @ (np.outer(np.diag(anchors.T @ anchors), np.ones(N)) - D_topright.T), d)
return 0.5 * np.linalg.inv(anchors @ JM @ anchors.T) @ anchors @ tmp @ basis.T @ np.linalg.inv(
basis @ basis.T)
tmp = lowRankApproximation(JM @ (np.outer(np.diag(anchors.T @ anchors), np.ones(N)) - D_topright.T), d)
return 0.5 * np.linalg.inv(anchors @ JM @ anchors.T) @ anchors @ tmp @ basis.T @ np.linalg.inv(basis @ basis.T)


def SRLS(anchors, basis, coeffs, D_topright):
Expand All @@ -45,8 +42,7 @@ def getSRLSGrad(anchors, basis, coeffs, D_topright):
""" Get gradient of SRLS function. """
[K, N] = basis.shape
M = anchors.shape[1]
LHS = anchors @ (
np.outer(np.diag(anchors.T @ anchors), np.ones(N)) - D_topright.transpose()) @ basis.T
LHS = anchors @ (np.outer(np.diag(anchors.T @ anchors), np.ones(N)) - D_topright.transpose()) @ basis.T

term1 = M * np.outer(np.diag(basis.T @ coeffs.T @ coeffs @ basis), np.ones(K))
term2 = -2 * basis.T @ coeffs.T @ anchors @ np.outer(np.ones(M), np.ones(K))
Expand Down
8 changes: 4 additions & 4 deletions data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,10 @@ def get_smooth_points(C_list, t_list, traj):
mean_window = 10
datetimes = [datetime.datetime.fromtimestamp(t) for t in result_df.t]
result_df.index = [pd.Timestamp(datetime) for datetime in datetimes]
result_df.loc[:, 'px_median'] = result_df['px'].rolling(
'{}s'.format(mean_window), min_periods=1, center=False).median()
result_df.loc[:, 'py_median'] = result_df['py'].rolling(
'{}s'.format(mean_window), min_periods=1, center=False).median()
result_df.loc[:, 'px_median'] = result_df['px'].rolling('{}s'.format(mean_window), min_periods=1,
center=False).median()
result_df.loc[:, 'py_median'] = result_df['py'].rolling('{}s'.format(mean_window), min_periods=1,
center=False).median()
return result_df


Expand Down
59 changes: 29 additions & 30 deletions general_rank_experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,23 +94,21 @@
print("Start comparision calculation for {} anchors".format(n_anchors))

probabilities1 = [
h.probability_upper_bound_any_measurements(
params["n_dimensions"],
params["n_constraints"],
n,
position_wise=False,
n_anchors=n_anchors,
n_measurements=params["fixed_n_measurements"]) for n in n_positions_list
h.probability_upper_bound_any_measurements(params["n_dimensions"],
params["n_constraints"],
n,
position_wise=False,
n_anchors=n_anchors,
n_measurements=params["fixed_n_measurements"]) for n in n_positions_list
]

probabilities2 = [
h.probability_upper_bound_any_measurements(
params["n_dimensions"],
params["n_constraints"],
n,
position_wise=True,
n_anchors=n_anchors,
n_measurements=params["fixed_n_measurements"]) for n in n_positions_list
h.probability_upper_bound_any_measurements(params["n_dimensions"],
params["n_constraints"],
n,
position_wise=True,
n_anchors=n_anchors,
n_measurements=params["fixed_n_measurements"]) for n in n_positions_list
]

probabilities1 = np.array(probabilities1)
Expand All @@ -132,8 +130,11 @@
plt.plot(n_positions_list[beg:], probabilities1[beg:], linestyle=":", c="C1", label="anchor upper bound")
plt.plot(n_positions_list[beg:], probabilities2[beg:], linestyle=":", c="C2", label="sample upper bound")
plt.plot(n_positions_list[beg:], probabilities2[beg:] * probabilities1, linestyle=":", c="C3", label="both bounds")
plt.plot(
n_positions_list[beg:], probabilities2[beg:] * np.min(probabilities1[beg:]), linestyle=":", c="C0", label="a hack")
plt.plot(n_positions_list[beg:],
probabilities2[beg:] * np.min(probabilities1[beg:]),
linestyle=":",
c="C0",
label="a hack")
plt.plot(n_positions_list[beg:], anchor_condition, c="C2", label="sample condition")
plt.plot(n_positions_list[beg:], frame_condition, c="C1", label="anchor condition")
plt.plot(n_positions_list, both_conditions, c="C3", label="both conditions")
Expand All @@ -156,23 +157,21 @@
probabilities2 = []
for n_anchors in n_anchors_list:
probabilities.append([
h.probability_upper_bound_any_measurements(
params["n_dimensions"],
params["n_constraints"],
n,
position_wise=False,
n_anchors=n_anchors,
n_measurements=n_measurements) for n in n_positions_list
h.probability_upper_bound_any_measurements(params["n_dimensions"],
params["n_constraints"],
n,
position_wise=False,
n_anchors=n_anchors,
n_measurements=n_measurements) for n in n_positions_list
])
for n_anchors in n_anchors_list:
probabilities2.append([
h.probability_upper_bound_any_measurements(
params["n_dimensions"],
params["n_constraints"],
n,
position_wise=True,
n_anchors=n_anchors,
n_measurements=n_measurements) for n in n_positions_list
h.probability_upper_bound_any_measurements(params["n_dimensions"],
params["n_constraints"],
n,
position_wise=True,
n_anchors=n_anchors,
n_measurements=n_measurements) for n in n_positions_list
])

fig, ax = plt.subplots(figsize=(6, 6))
Expand Down
28 changes: 14 additions & 14 deletions hypothesis.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,8 +393,10 @@ def matrix_rank_experiment(params):
for r in range(params["n_repetitions"]):
frame = get_frame(params["n_constraints"], n_positions)
try:
idx_a, idx_f = random_indexes(
n_anchors, n_positions, n_measurements, one_per_time=params["one_per_time"])
idx_a, idx_f = random_indexes(n_anchors,
n_positions,
n_measurements,
one_per_time=params["one_per_time"])
if params["full_matrix"]:
constraints = get_full_matrix(idx_a, idx_f, anchors, frame)
else:
Expand Down Expand Up @@ -448,18 +450,16 @@ def plot_results(

f, ax = plt.subplots()
for a_idx, n_anchors in enumerate(params["n_anchors_list"]):
plt.plot(
x,
np.mean(ranks[:, a_idx, :], axis=1) / max_rank,
label="mean rank, {} anchors".format(n_anchors),
color="C{}".format(a_idx),
linestyle='dashed')
plt.step(
x,
np.sum(ranks[:, a_idx, :] >= max_rank, axis=1) / n_repetitions,
label="probability, {} anchors".format(n_anchors),
color="C{}".format(a_idx),
where='post')
plt.plot(x,
np.mean(ranks[:, a_idx, :], axis=1) / max_rank,
label="mean rank, {} anchors".format(n_anchors),
color="C{}".format(a_idx),
linestyle='dashed')
plt.step(x,
np.sum(ranks[:, a_idx, :] >= max_rank, axis=1) / n_repetitions,
label="probability, {} anchors".format(n_anchors),
color="C{}".format(a_idx),
where='post')
if "fixed_n_measurements" in params:
plt.xlabel("number of positions")
else:
Expand Down
16 changes: 8 additions & 8 deletions plotting_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,12 @@ def add_plot_decoration(label, parameters):
plt.gca().xaxis.tick_bottom()


def add_scalebar(ax, size=5, loc='lower left'):
def add_scalebar(ax, size=5, size_vertical=1, loc='lower left'):
""" Add a scale bar to the plot.
:param ax: axis to use.
:param size: size of scale bar.
:param size_vertical: height (thckness) of the bar
:param loc: location (same syntax as for matplotlib legend)
"""
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar
Expand All @@ -65,7 +66,7 @@ def add_scalebar(ax, size=5, loc='lower left'):
pad=0.1,
color='black',
frameon=False,
size_vertical=1,
size_vertical=size_vertical,
fontproperties=fontprops)
ax.add_artist(scalebar)

Expand Down Expand Up @@ -171,12 +172,11 @@ def plot_noise(key,
pol = np.poly1d(z)
print(("anchors {}" if anchors else "noise: {}").format(second_dim[idx]))
print("fitted slope: {:.2f}".format(z[0]))
ax1.loglog(
measurements,
np.exp(pol(np.log(measurements))),
c=plot[0].get_color(),
label=("{} anchors" if anchors else r"noise: {}").format(second_dim[idx]),
linestyle=next(linecycler))
ax1.loglog(measurements,
np.exp(pol(np.log(measurements))),
c=plot[0].get_color(),
label=("{} anchors" if anchors else r"noise: {}").format(second_dim[idx]),
linestyle=next(linecycler))

plt.xlabel("number of measurements")
if error_type == "errors":
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ cvxpy>=1.0.6
cvxopt>=1.1.9
matplotlib>=3.0.2
sphinx==1.8.4
yapf>=0.26.0
yapf==0.28.0
# for notebook testing.
jupyter>=1.0.0
nbformat>=4.4.0
Expand Down
28 changes: 16 additions & 12 deletions simulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,10 @@ def run_simulation(parameters, outfolder=None, solver=None, verbose=False):
basis, D_topright = get_measurements(trajectory, environment.anchors, n_samples=n_positions)
distances = np.sqrt(D_topright)
D_topright = add_noise(D_topright, noise_sigma, parameters["noise_to_square"])
mask = create_mask(
n_positions, n_anchors, strategy=parameters['sampling_strategy'], n_missing=n_missing)
mask = create_mask(n_positions,
n_anchors,
strategy=parameters['sampling_strategy'],
n_missing=n_missing)
if parameters['measure_distances']:
squared_distances.extend(D_topright.flatten().tolist())
D_topright = np.multiply(D_topright, mask)
Expand All @@ -145,26 +147,31 @@ def run_simulation(parameters, outfolder=None, solver=None, verbose=False):
assert h.limit_condition(np.sort(np.sum(mask, axis=0))[::-1], DIM + 1,
n_complexity), "insufficient rank"
if (solver is None) or (solver == semidefRelaxationNoiseless):
X = semidefRelaxationNoiseless(
D_topright, environment.anchors, basis, chosen_solver=cvxpy.CVXOPT)
X = semidefRelaxationNoiseless(D_topright,
environment.anchors,
basis,
chosen_solver=cvxpy.CVXOPT)
P_hat = X[:DIM, DIM:]
elif solver == 'rightInverseOfConstraints':
X = rightInverseOfConstraints(D_topright, environment.anchors, basis)
P_hat = X[:DIM, DIM:]
elif solver == 'alternativePseudoInverse':
P_hat = alternativePseudoInverse(D_topright, environment.anchors, basis)
elif solver == 'weightedPseudoInverse':
P_hat = alternativePseudoInverse(
D_topright, environment.anchors, basis, weighted=True)
P_hat = alternativePseudoInverse(D_topright,
environment.anchors,
basis,
weighted=True)
else:
raise ValueError(
'Solver needs to be "semidefRelaxationNoiseless", "rightInverseOfConstraints"'
' or "alternativePseudoInverse"')

# calculate reconstruction error with respect to distances
trajectory_estimated = Trajectory(coeffs=P_hat)
_, D_estimated = get_measurements(
trajectory_estimated, environment.anchors, n_samples=n_positions)
_, D_estimated = get_measurements(trajectory_estimated,
environment.anchors,
n_samples=n_positions)
estimated_distances = np.sqrt(D_estimated)

robust_add(errors, indexes, np.linalg.norm(P_hat - trajectory.coeffs))
Expand Down Expand Up @@ -253,10 +260,7 @@ def read_results(filestart):
new_array = np.load(full_path, allow_pickle=False)
if key in results.keys():
old_array = results[key]
# if old_array.shape == new_array.shape: # this should not be needed if we always add new axis
results[key] = np.stack([old_array, new_array], axis=-1)
# else:
# results[key] = np.concatenate([old_array, new_array[..., np.newaxis]], axis=-1)
results[key] = np.stack([old_array, new_array[..., np.newaxis]], axis=-1)
else:
print('new key:', key)
results[key] = new_array[..., np.newaxis]
Expand Down
16 changes: 8 additions & 8 deletions trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
from matplotlib.patches import Circle
from global_variables import DIM, TMAX, TAU, ROBOT_WIDTH, EPSILON

import copy


class Trajectory(object):
""" Trajectory class.
Expand All @@ -22,7 +24,6 @@ class Trajectory(object):
:member model: trajectory model,
either bandlimited, full_bandlimited (both sines and cosines) or polynomial.
"""

def __init__(self,
n_complexity=3,
dim=DIM,
Expand Down Expand Up @@ -60,7 +61,7 @@ def __init__(self,

def copy(self):
new = Trajectory(self.n_complexity, self.dim, self.model, self.period, coeffs=np.copy(self.coeffs))
new.params = copy(self.params)
new.params = copy.deepcopy(self.params)
return new

def get_times(self, n_samples):
Expand Down Expand Up @@ -583,12 +584,11 @@ def get_left_and_right_arcs(self,
if plot:
plt.figure()
plt.plot(np.cumsum(ds_left), np.cumsum(ds_right), label="continous")
plt.scatter(
np.cumsum(distances_left),
np.cumsum(distances_right),
label="discretized ({})".format(len(distances_left)),
marker='x',
color='C1')
plt.scatter(np.cumsum(distances_left),
np.cumsum(distances_right),
label="discretized ({})".format(len(distances_left)),
marker='x',
color='C1')
plt.legend()
plt.show()
print("minimum distance traveled by center: {:.4f}m".format(np.min((distances_left + distances_right) / 2)))
Expand Down

0 comments on commit 21544e2

Please sign in to comment.