Use the generalized finite differences formulas

This example shows how to use generalized finite difference (F.D.) formulas.

References

    1. Baudin (2023). Méthodes numériques. Dunod.

import numericalderivative as nd
import numpy as np
import pylab as pl
import matplotlib.colors as mcolors
import tabulate

Compute the first derivative using forward F.D. formula

This is the function we want to compute the derivative of.

def scaled_exp(x):
    alpha = 1.0e6
    return np.exp(-x / alpha)

Use the F.D. formula for f'(x)

x = 1.0
differentiation_order = 1
formula_accuracy = 2
finite_difference = nd.GeneralFiniteDifference(
    scaled_exp, x, differentiation_order, formula_accuracy, "central"
)
step = 1.0e-3  # A first guess
f_prime_approx = finite_difference.compute(step)
print(f"Approximate f'(x) = {f_prime_approx}")
Approximate f'(x) = -9.999989725173464e-07

To check our result, we define the exact first derivative.

def scaled_exp_prime(x):
    alpha = 1.0e6
    return -np.exp(-x / alpha) / alpha

Compute the exact derivative and the absolute error.

f_prime_exact = scaled_exp_prime(x)
print(f"Exact f'(x) = {f_prime_exact}")
absolute_error = abs(f_prime_approx - f_prime_exact)
print(f"Absolute error = {absolute_error}")
Exact f'(x) = -9.999990000005e-07
Absolute error = 2.7483153726165933e-14

Define the error function: this will be useful later.

def compute_absolute_error(
    step,
    x=1.0,
    differentiation_order=1,
    formula_accuracy=2,
    direction="central",
    verbose=True,
):
    finite_difference = nd.GeneralFiniteDifference(
        scaled_exp, x, differentiation_order, formula_accuracy, direction
    )
    f_prime_approx = finite_difference.compute(step)
    f_prime_exact = scaled_exp_prime(x)
    absolute_error = abs(f_prime_approx - f_prime_exact)
    if verbose:
        print(f"Approximate f'(x) = {f_prime_approx}")
        print(f"Exact f'(x) = {f_prime_exact}")
        print(f"Absolute error = {absolute_error}")
    return absolute_error

Compute the exact step for the forward F.D. formula

This step depends on the second derivative. Firstly, we assume that this is unknown and use a first guess of it, equal to 1.

differentiation_order = 1
formula_accuracy = 2
direction = "central"
finite_difference = nd.GeneralFiniteDifference(
    scaled_exp, x, differentiation_order, formula_accuracy, direction
)
second_derivative_value = 1.0  # A first guess
step, absolute_error = finite_difference.compute_step(second_derivative_value)
print(f"Approximately optimal step (using f''(x) = 1) = {step}")
print(f"Approximately absolute error = {absolute_error}")
_ = compute_absolute_error(step, True)
Approximately optimal step (using f''(x) = 1) = 8.733476581980381e-06
Approximately absolute error = 3.8136806603999814e-11
Approximate f'(x) = -9.999979182323467e-07
Exact f'(x) = -9.999990000005e-07
Absolute error = 1.081768153431351e-12

We see that the new step is much better than the our initial guess: the approximately optimal step is much smaller, which leads to a smaller absolute error.

In our particular example, the second derivative is known: let's use this information and compute the exactly optimal step.

def scaled_exp_2nd_derivative(x):
    alpha = 1.0e6
    return np.exp(-x / alpha) / (alpha**2)
second_derivative_value = scaled_exp_2nd_derivative(x)
print(f"Exact second derivative f''(x) = {second_derivative_value}")
step, absolute_error = finite_difference.compute_step(second_derivative_value)
print(f"Approximately optimal step (using f''(x) = 1) = {step}")
print(f"Approximately absolute error = {absolute_error}")
_ = compute_absolute_error(step, True)
Exact second derivative f''(x) = 9.999990000005e-13
Approximately optimal step (using f''(x) = 1) = 0.08733479493139723
Approximately absolute error = 3.813679389173307e-15
Approximate f'(x) = -9.999989997748166e-07
Exact f'(x) = -9.999990000005e-07
Absolute error = 2.256834584664358e-16

Compute the coefficients of several central F.D. formulas

We would like to compute the coefficients of a collection of central finite difference formulas.

We consider the differentation order up to the sixth derivative and the central F.D. formula up to the order 8.

maximum_differentiation_order = 6
formula_accuracy_list = [2, 4, 6, 8]

We want to the print the result as a table. This is the reason why we need to align the coefficients on the columns on the table.

First pass: compute the maximum number of coefficients.

maximum_number_of_coefficients = 0
direction = "central"
coefficients_list = []
for differentiation_order in range(1, 1 + maximum_differentiation_order):
    for formula_accuracy in formula_accuracy_list:
        finite_difference = nd.GeneralFiniteDifference(
            scaled_exp, x, differentiation_order, formula_accuracy, direction
        )
        coefficients = finite_difference.get_coefficients()
        coefficients_list.append(coefficients)
        maximum_number_of_coefficients = max(
            maximum_number_of_coefficients, len(coefficients)
        )

Second pass: compute the maximum number of coefficients

data = []
index = 0
for differentiation_order in range(1, 1 + maximum_differentiation_order):
    for formula_accuracy in formula_accuracy_list:
        coefficients = coefficients_list[index]
        row = [differentiation_order, formula_accuracy]
        padding_number = maximum_number_of_coefficients // 2 - len(coefficients) // 2
        for i in range(padding_number):
            row.append("")
        for i in range(len(coefficients)):
            row.append(f"{coefficients[i]:.3f}")
        data.append(row)
        index += 1
headers = ["Derivative", "Accuracy"]
for i in range(1 + maximum_number_of_coefficients):
    headers.append(i - maximum_number_of_coefficients // 2)
tabulate.tabulate(data, headers, tablefmt="html")
Derivative Accuracy-6 -5 -4 -3 -2 -1 0 1 2 3 4 5 6
1 2 -0.5 0 0.5
1 4 0.083 -0.667 0 0.667-0.083
1 6 -0.0170.150 -0.75 -0 0.75 -0.15 0.017
1 8 0.004 -0.0380.200 -0.8 0 0.8 -0.2 0.038-0.004
2 2 0.5 -1 0.5
2 4 -0.042 0.667-1.25 0.667-0.042
2 6 0.006 -0.075 0.75 -1.361 0.75 -0.075 0.006
2 8 -0.0010.013 -0.100 0.8 -1.424 0.8 -0.1 0.013-0.001
3 2 -0.083 0.167 0 -0.167 0.083
3 4 0.021 -0.167 0.271 0 -0.271 0.167-0.021
3 6 -0.0050.050 -0.235 0.339-0 -0.339 0.235-0.05 0.005
3 8 0.001 -0.0140.081 -0.289 0.386-0 -0.386 0.289-0.081 0.014-0.001
4 2 0.042 -0.167 0.25 -0.167 0.042
4 4 -0.0070.083 -0.271 0.389-0.271 0.083-0.007
4 6 0.001 -0.0170.117 -0.339 0.474-0.339 0.117-0.017 0.001
4 8 -0.0000.003 -0.0270.144 -0.386 0.531-0.386 0.144-0.027 0.003-0
5 2 -0.0040.017 -0.021-0 0.021-0.017 0.004
5 4 0.001 -0.0130.036 -0.04 0 0.04 -0.036 0.012-0.001
5 6 -0.0000.004 -0.0230.054 -0.056 0 0.056-0.054 0.023-0.004 0
5 80.000 -0.0010.009 -0.0330.070 -0.069 0 0.069-0.07 0.033-0.009 0.001 -0
6 2 0.001 -0.008 0.021-0.028 0.021-0.008 0.001
6 4 -0.0000.004 -0.018 0.04 -0.052 0.04 -0.018 0.004-0
6 6 0.000 -0.0010.008 -0.027 0.056-0.071 0.056-0.027 0.008-0.001 0
6 8-0.0000.000 -0.0020.011 -0.035 0.069-0.086 0.069-0.035 0.011-0.002 0 -0


We notice that the sum of the coefficients is zero. Furthermore, consider the properties of the coefficients with respect to the center coefficient of index \(i = 0\).

  • If the differentiation order \(d\) is odd (e.g. \(d = 3\)), then the symetrical coefficients are of opposite signs. In this case, \(c_0 = 0\).

  • If the differentiation order \(d\) is even (e.g. \(d = 4\)), then the symetrical coefficients are equal.

Make a plot of the coefficients depending on the indices.

maximum_differentiation_order = 4
formula_accuracy_list = [2, 4, 6]
direction = "central"
color_list = list(mcolors.TABLEAU_COLORS.keys())
marker_list = ["o", "v", "^", "<", ">"]
pl.figure()
pl.title("Central finite difference")
for differentiation_order in range(1, 1 + maximum_differentiation_order):
    for j in range(len(formula_accuracy_list)):
        finite_difference = nd.GeneralFiniteDifference(
            scaled_exp, x, differentiation_order, formula_accuracy_list[j], direction
        )
        coefficients = finite_difference.get_coefficients()
        imin, imax = finite_difference.get_indices_min_max()
        this_label = (
            r"$d = "
            f"{differentiation_order}"
            r"$, $p = "
            f"{formula_accuracy_list[j]}"
            r"$"
        )
        this_color = color_list[differentiation_order]
        this_marker = marker_list[j]
        pl.plot(
            range(imin, imax + 1),
            coefficients,
            "-" + this_marker,
            color=this_color,
            label=this_label,
        )
pl.xlabel(r"$i$")
pl.ylabel(r"$c_i$")
pl.legend(bbox_to_anchor=(1, 1))
pl.tight_layout()
Central finite difference

Compute the coefficients of several forward F.D. formulas

We would like to compute the coefficients of a collection of forward finite difference formulas.

We consider the differentation order up to the sixth derivative and the forward F.D. formula up to the order 8.

maximum_differentiation_order = 6
formula_accuracy_list = list(range(1, 8))

We want to the print the result as a table. This is the reason why we need to align the coefficients on the columns on the table.

First pass: compute the maximum number of coefficients.

maximum_number_of_coefficients = 0
direction = "forward"
data = []
for differentiation_order in range(1, 1 + maximum_differentiation_order):
    for formula_accuracy in formula_accuracy_list:
        finite_difference = nd.GeneralFiniteDifference(
            scaled_exp, x, differentiation_order, formula_accuracy, direction
        )
        coefficients = finite_difference.get_coefficients()
        maximum_number_of_coefficients = max(
            maximum_number_of_coefficients, len(coefficients)
        )
        row = [differentiation_order, formula_accuracy]
        for i in range(len(coefficients)):
            row.append(f"{coefficients[i]:.3f}")
        data.append(row)
        index += 1
headers = ["Derivative", "Accuracy"]
for i in range(1 + maximum_number_of_coefficients):
    headers.append(i)
tabulate.tabulate(data, headers, tablefmt="html")
Derivative Accuracy 0 1 2 3 4 5 6 7 8 9 10 11 12
1 1-1 1
1 2-1.5 2 -0.5
1 3-1.833 3 -1.5 0.333
1 4-2.083 4 -3 1.333 -0.25
1 5-2.283 5 -5 3.333 -1.25 0.2
1 6-2.45 6 -7.5 6.667 -3.75 1.2 -0.167
1 7-2.593 7 -10.5 11.667 -8.75 4.2 -1.167 0.143
2 1 0.5 -1 0.5
2 2 1 -2.5 2 -0.5
2 3 1.458 -4.333 4.75 -2.333 0.458
2 4 1.875 -6.417 8.917 -6.5 2.542 -0.417
2 5 2.256 -8.7 14.625-14.111 8.25 -2.7 0.381
2 6 2.606-11.15 21.975-26.361 20.5 -10.05 2.831 -0.35
2 7 2.93 -13.743 31.05 -44.511 43.188-28.2 11.906 -2.943 0.324
3 1-0.167 0.5 -0.5 0.167
3 2-0.417 1.5 -2 1.167 -0.25
3 3-0.708 2.958 -4.917 4.083 -1.708 0.292
3 4-1.021 4.833 -9.604 10.333 -6.396 2.167 -0.312
3 5-1.343 7.089-16.371 21.611-17.674 8.933 -2.568 0.322
3 6-1.669 9.694-25.49 39.85 -40.472 27.172-11.688 2.928 -0.326
3 7-1.994 12.624-37.209 67.194-81.487 68.187-39.031 14.646 -3.255 0.326
4 1 0.042 -0.167 0.25 -0.167 0.042
4 2 0.125 -0.583 1.083 -1 0.458 -0.083
4 3 0.243 -1.292 2.854 -3.361 2.229 -0.792 0.118
4 4 0.389 -2.312 5.917 -8.465 7.333 -3.854 1.139 -0.146
4 5 0.557 -3.656 10.617-17.867 19.085-13.256 5.84 -1.489 0.168
4 6 0.742 -5.324 17.292-33.442 42.448-36.618 21.415 -8.164 1.837-0.185
4 7 0.942 -7.319 26.267-57.373 84.327-86.874 63.294-32.095 10.811-2.18 0.199
5 1-0.008 0.042 -0.083 0.083 -0.042 0.008
5 2-0.029 0.167 -0.396 0.5 -0.354 0.133 -0.021
5 3-0.064 0.41 -1.125 1.715 -1.569 0.862 -0.264 0.035
5 4-0.113 0.799 -2.486 4.438 -4.972 3.585 -1.625 0.424 -0.049
5 5-0.174 1.355 -4.713 9.634-12.767 11.38 -6.822 2.651 -0.605 0.062
5 6-0.249 2.098 -8.053 18.54 -28.353 30.083-22.407 11.557 -3.945 0.804-0.074
5 7-0.334 3.039-12.761 32.665-56.601 69.631-61.955 39.805-18.069 5.512-1.016 0.086
6 1 0.001 -0.008 0.021 -0.028 0.021 -0.008 0.001
6 2 0.006 -0.037 0.108 -0.174 0.167 -0.096 0.031 -0.004
6 3 0.014 -0.101 0.332 -0.621 0.726 -0.543 0.254 -0.068 0.008
6 4 0.026 -0.214 0.782 -1.671 2.301 -2.118 1.304 -0.518 0.12 -0.012
6 5 0.043 -0.388 1.567 -3.763 5.962 -6.512 4.966 -2.61 0.905-0.187 0.017
6 6 0.066 -0.637 2.809 -7.492 13.42 -16.952 15.406-10.068 4.634-1.43 0.266-0.023
6 7 0.094 -0.971 4.647-13.618 27.204-39.007 41.137-32.123 18.418-7.556 2.104-0.3570.028


We notice that the sum of the coefficients is zero.

Make a plot of the coefficients depending on the indices.

maximum_differentiation_order = 4
formula_accuracy_list = [2, 4, 6]
direction = "forward"
color_list = list(mcolors.TABLEAU_COLORS.keys())
marker_list = ["o", "v", "^", "<", ">"]
pl.figure()
pl.title("Forward finite difference")
for differentiation_order in range(1, 1 + maximum_differentiation_order):
    for j in range(len(formula_accuracy_list)):
        finite_difference = nd.GeneralFiniteDifference(
            scaled_exp, x, differentiation_order, formula_accuracy_list[j], direction
        )
        coefficients = finite_difference.get_coefficients()
        imin, imax = finite_difference.get_indices_min_max()
        this_label = (
            r"$d = "
            f"{differentiation_order}"
            r"$, $p = "
            f"{formula_accuracy_list[j]}"
            r"$"
        )
        this_color = color_list[differentiation_order]
        this_marker = marker_list[j]
        pl.plot(
            range(imin, imax + 1),
            coefficients,
            "-" + this_marker,
            color=this_color,
            label=this_label,
        )
pl.xlabel(r"$i$")
pl.ylabel(r"$c_i$")
_ = pl.legend(bbox_to_anchor=(1, 1))
pl.tight_layout()
Forward finite difference

Total running time of the script: (0 minutes 0.400 seconds)