Naive squared error minimization [ex999.0]ΒΆ

import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import find_peaks

# naive computation of a squared error, returns an estimate for x
def getSEAverage(y, nofX, J_MAX):
    K = y.shape[0]          # number of samples
    k = np.arange(0, K)     # (time or sample) index
    J = np.zeros(nofX)      # squared error cost
    x_range = np.linspace(0, 8, num=nofX)  # range of x-values to be tested

    for i in range(0, nofX):   # sum up squared error for each possible x-value
        for ik in range(0, K):
            J[i] = J[i] + (y[ik] - x_range[i]) ** 2

    ind_x, _ = find_peaks(-J)   # find that x-index with minimal cost
    return (k, x_range, J, ind_x)

# Plots costs for each selection of x
def plotFigsCR0(y, k, ind_x, x_range, J, J_MAX):
    fig, axs = plt.subplots(1, 2, sharex='none', figsize=(8,2))
    axs[0].stem(k, y, basefmt=' ', use_line_collection=True)
    axs[0].axhline(y=x_range[ind_x], ls='--', linewidth=1, c='black')
    axs[0].set(title='Observations $y$', xlabel='$k$')
    axs[0].set(ylim=(0, 10))
    axs[1].plot(x_range, J, lw=1, c='grey')
    axs[1].plot(x_range[ind_x], J[ind_x], 'ro-', label='line 1', linewidth=2)
    axs[1].text(x_range[ind_x], J[ind_x]+10, r'$\hat{x}='+str(round(x_range[ind_x[0]],2))+r'$ \\ $' + r'J(y;\hat{x}) =  '+str(round(J[ind_x[0]],2))+r'$')
    axs[1].axhline(y=J[ind_x], ls='--', color='red', linewidth=1)
    axs[1].plot([x_range[ind_x], x_range[ind_x]], [0, J[ind_x]], lw=1, c='black', ls='--')
    axs[1].set(title='Cost $J(y;x)$', xlabel='$x$')
    axs[1].set(ylim=(0, J_MAX))

NOF_X = 100 # number of X values
J_MAX = 100 # max. cost (to scale the plotting correctly)

y = np.array([4.2, 4.6, 3.4, 2.9, 3.5, 3.1, 5.1])   # sample array

(k, x_range, J, ind_x) = getSEAverage(y, NOF_X, J_MAX)
plotFigsCR0(y, k, ind_x, x_range, J, J_MAX)

Total running time of the script: ( 0 minutes 0.169 seconds)

Gallery generated by Sphinx-Gallery