Skip to content
Snippets Groups Projects
Commit 4ec0ad08 authored by Benjamin Berkels's avatar Benjamin Berkels
Browse files

Upload New File

parent 1b96bd25
No related branches found
No related tags found
No related merge requests found
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import imageio
from ex5 import GradientDescent
def Ja(x, f, h, lambda_):
energy = np.sum(np.square(x - f)) + lambda_ / h**2 * (np.sum(np.square(x[1:, :]-x[0:-1, :])) + np.sum(np.square(x[:, 1:]-x[:, 0:-1])))
return energy
def DJa(x, f, h, lambda_):
grad = 2 * (x - f)
grad[0:-1, :] = grad[0:-1, :] - 2 * lambda_ / h**2 * (x[1:, :]-x[0:-1, :])
grad[1:, :] = grad[1:, :] - 2 * lambda_ / h**2 * (x[0:-1, :] - x[1:, :])
grad[:, 0:-1] = grad[:, 0:-1] - 2 * lambda_ / h**2 * (x[:, 1:]-x[:, 0:-1])
grad[:, 1:] = grad[:, 1:] - 2 * lambda_ / h**2 * (x[:, 0:-1] - x[:, 1:])
return grad
def Jb(x, f, h, lambda_, epsilon):
energy = np.sum(np.square(x - f)) + lambda_ / h * (np.sum(np.sqrt(np.square(x[1:, :] - x[0:-1, :]) + epsilon**2)) + np.sum(np.sqrt(np.square(x[:, 1:] - x[:, 0:-1]) + epsilon**2)))
return energy
def DJb(x, f, h, lambda_, epsilon):
grad = 2 * (x - f)
grad[0:-1, :] = grad[0:-1, :] - lambda_ / h * np.divide(x[1:, :] - x[0:-1, :], np.sqrt(np.square(x[1:, :] - x[0:-1, :]) + epsilon**2))
grad[1:, :] = grad[1:, :] - lambda_ / h * np.divide(x[0:-1, :] - x[1:, :], np.sqrt(np.square(x[0:-1, :] - x[1:, :]) + epsilon**2))
grad[:, 0:-1] = grad[:, 0:-1] - lambda_ / h * np.divide(x[:, 1:] - x[:, 0:-1], np.sqrt(np.square(x[:, 1:] - x[:, 0:-1]) + epsilon**2))
grad[:, 1:] = grad[:, 1:] - lambda_ / h * np.divide(x[:, 0:-1] - x[:, 1:], np.sqrt(np.square(x[:, 0:-1] - x[:, 1:]) + epsilon**2))
return grad
def main():
# Load input image
print('reading file')
f0 = imageio.imread('https://www.cs.rug.nl/svcg/uploads/Shapes/dskel_mandrill_orig.png', as_gray=True)/255
N = f0.shape[0]
M = f0.shape[1]
np.random.seed(42)
f = f0 + 0.5 * (np.random.rand(*f0.shape) - 0.5)
x0 = f
# Set parameters;
lambda_ = 0.0005
sigma = 0.5
beta = 0.5
h = 1 / (max(N, M) - 1)
epsilon = .001
# First functional
def Ea(x): return Ja(x, f, h, lambda_)
def DEa(x): return DJa(x, f, h, lambda_)
xa = GradientDescent(x0, Ea, DEa, beta, sigma, 1000, 0.001)
# Second functional
def Eb(x): return Jb(x, f, h, lambda_, epsilon)
def DEb(x): return DJb(x, f, h, lambda_, epsilon)
xb = GradientDescent(x0, Eb, DEb, beta, sigma, 1000, 0.001)
# Display input and result
plt.subplot(1, 3, 1)
plt.imshow(f, interpolation='nearest', cmap=plt.cm.get_cmap('gray'), vmin=0, vmax=1)
plt.axis('off')
plt.title('Input image f')
plt.subplot(1, 3, 2)
plt.imshow(xa, interpolation='nearest', cmap=plt.cm.get_cmap('gray'), vmin=0, vmax=1)
plt.axis('off')
plt.title('Result A')
plt.subplot(1, 3, 3)
plt.imshow(xb, interpolation='nearest', cmap=plt.cm.get_cmap('gray'), vmin=0, vmax=1)
plt.axis('off')
plt.title('Result B')
plt.show()
if __name__ == '__main__':
main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment