from __future__ import division, print_function, absolute_import
r"""
Parameters used in test and benchmark methods.
Collections of test cases suitable for testing 1-dimensional root-finders
'original': The original benchmarking functions.
Real-valued functions of real-valued inputs on an interval
with a zero.
f1, .., f3 are continuous and infinitely differentiable
f4 has a left- and right- discontinuity at the root
f5 has a root at 1 replacing a 1st order pole
f6 is randomly positive on one side of the root,
randomly negative on the other.
f4 - f6 are not continuous at the root.
'aps': The test problems in the 1995 paper
TOMS "Algorithm 748: Enclosing Zeros of Continuous Functions"
by Alefeld, Potra and Shi. Real-valued functions of
real-valued inputs on an interval with a zero.
Suitable for methods which start with an enclosing interval, and
derivatives up to 2nd order.
'complex': Some complex-valued functions of complex-valued inputs.
No enclosing bracket is provided.
Suitable for methods which use one or more starting values, and
derivatives up to 2nd order.
The test cases are provided as a list of dictionaries. The dictionary
keys will be a subset of:
["f", "fprime", "fprime2", "args", "bracket", "smoothness",
"a", "b", "x0", "x1", "root", "ID"]
"""
# Sources:
# [1] Alefeld, G. E. and Potra, F. A. and Shi, Yixun,
# "Algorithm 748: Enclosing Zeros of Continuous Functions",
# ACM Trans. Math. Softw. Volume 221(1995)
# doi = {10.1145/210089.210111},
from random import random
import numpy as np
from scipy.optimize import zeros as cc
# "description" refers to the original functions
description = """
f2 is a symmetric parabola, x**2 - 1
f3 is a quartic polynomial with large hump in interval
f4 is step function with a discontinuity at 1
f5 is a hyperbola with vertical asymptote at 1
f6 has random values positive to left of 1, negative to right
of course these are not real problems. They just test how the
'good' solvers behave in bad circumstances where bisection is
really the best. A good solver should not be much worse than
bisection in such circumstance, while being faster for smooth
monotone sorts of functions.
"""
def f1(x):
r"""f1 is a quadratic with roots at 0 and 1"""
return x * (x - 1.)
def f1_fp(x):
return 2 * x - 1
def f1_fpp(x):
return 2
def f2(x):
r"""f2 is a symmetric parabola, x**2 - 1"""
return x**2 - 1
def f2_fp(x):
return 2 * x
def f2_fpp(x):
return 2
def f3(x):
r"""A quartic with roots at 0, 1, 2 and 3"""
return x * (x - 1.) * (x - 2.) * (x - 3.) # x**4 - 6x**3 + 11x**2 - 6x
def f3_fp(x):
return 4 * x**3 - 18 * x**2 + 22 * x - 6
def f3_fpp(x):
return 12 * x**2 - 36 * x + 22
def f4(x):
r"""Piecewise linear, left and right discontinuous at x=1, the root."""
if x > 1:
return 1.0 + .1 * x
if x < 1:
return -1.0 + .1 * x
return 0
def f5(x):
r"""Hyperbola with a pole at x=1, but pole replaced with 0. Not continuous at root."""
if x != 1:
return 1.0 / (1. - x)
return 0
# f6(x) returns random value. Without memoization, calling twice with the
# same x returns different values, hence a "random value", not a
# "function with random values"
_f6_cache = {}
def f6(x):
v = _f6_cache.get(x, None)
if v is None:
if x > 1:
v = random()
elif x < 1:
v = -random()
else:
v = 0
_f6_cache[x] = v
return v
# Each Original test case has
# - a function and its two derivatives,
# - additional arguments,
# - a bracket enclosing a root,
# - the order of differentiability (smoothness) on this interval
# - a starting value for methods which don't require a bracket
# - the root (inside the bracket)
# - an Identifier of the test case
_ORIGINAL_TESTS_KEYS = ["f", "fprime", "fprime2", "args", "bracket", "smoothness", "x0", "root", "ID"]
_ORIGINAL_TESTS = [
[f1, f1_fp, f1_fpp, (), [0.5, np.sqrt(3)], np.inf, 0.6, 1.0, "original.01.00"],
[f2, f2_fp, f2_fpp, (), [0.5, np.sqrt(3)], np.inf, 0.6, 1.0, "original.02.00"],
[f3, f3_fp, f3_fpp, (), [0.5, np.sqrt(3)], np.inf, 0.6, 1.0, "original.03.00"],
[f4, None, None, (), [0.5, np.sqrt(3)], -1, 0.6, 1.0, "original.04.00"],
[f5, None, None, (), [0.5, np.sqrt(3)], -1, 0.6, 1.0, "original.05.00"],
[f6, None, None, (), [0.5, np.sqrt(3)], -np.inf, 0.6, 1.0, "original.05.00"]
]
_ORIGINAL_TESTS_DICTS = [dict(zip(_ORIGINAL_TESTS_KEYS, testcase)) for testcase in _ORIGINAL_TESTS]
# ##################
# "APS" test cases
# Functions and test cases that appear in [1]
def aps01_f(x):
r"""Straight forward sum of trigonometric function and polynomial"""
return np.sin(x) - x / 2
def aps01_fp(x):
return np.cos(x) - 1.0 / 2
def aps01_fpp(x):
return -np.sin(x)
def aps02_f(x):
r"""poles at x=n**2, 1st and 2nd derivatives at root are also close to 0"""
ii = np.arange(1, 21)
return -2 * np.sum((2 * ii - 5)**2 / (x - ii**2)**3)
def aps02_fp(x):
ii = np.arange(1, 21)
return 6 * np.sum((2 * ii - 5)**2 / (x - ii**2)**4)
def aps02_fpp(x):
ii = np.arange(1, 21)
return 24 * np.sum((2 * ii - 5)**2 / (x - ii**2)**5)
def aps03_f(x, a, b):
r"""Rapidly changing at the root"""
return a * x * np.exp(b * x)
def aps03_fp(x, a, b):
return a * (b * x + 1) * np.exp(b * x)
def aps03_fpp(x, a, b):
return a * (b * (b * x + 1) + b) * np.exp(b * x)
def aps04_f(x, n, a):
r"""Medium-degree polynomial"""
return x**n - a
def aps04_fp(x, n, a):
return n * x**(n - 1)
def aps04_fpp(x, n, a):
return n * (n - 1) * x**(n - 2)
def aps05_f(x):
r"""Simple Trigonometric function"""
return np.sin(x) - 1.0 / 2
def aps05_fp(x):
return np.cos(x)
def aps05_fpp(x):
return -np.sin(x)
def aps06_f(x, n):
r"""Exponential rapidly changing from -1 to 1 at x=0"""
return 2 * x * np.exp(-n) - 2 * np.exp(-n * x) + 1
def aps06_fp(x, n):
return 2 * np.exp(-n) + 2 * n * np.exp(-n * x)
def aps06_fpp(x, n):
return -2 * n * n * np.exp(-n * x)
def aps07_f(x, n):
r"""Upside down parabola with parametrizable height"""
return (1 + (1 - n)**2) * x - (1 - n * x)**2
def aps07_fp(x, n):
return (1 + (1 - n)**2) + 2 * n * (1 - n * x)
def aps07_fpp(x, n):
return -2 * n * n
def aps08_f(x, n):
r"""Degree n polynomial"""
return x * x - (1 - x)**n
def aps08_fp(x, n):
return 2 * x + n * (1 - x)**(n - 1)
def aps08_fpp(x, n):
return 2 - n * (n - 1) * (1 - x)**(n - 2)
def aps09_f(x, n):
r"""Upside down quartic with parametrizable height"""
return (1 + (1 - n)**4) * x - (1 - n * x)**4
def aps09_fp(x, n):
return (1 + (1 - n)**4) + 4 * n * (1 - n * x)**3
def aps09_fpp(x, n):
return -12 * n * (1 - n * x)**2
def aps10_f(x, n):
r"""Exponential plus a polynomial"""
return np.exp(-n * x) * (x - 1) + x**n
def aps10_fp(x, n):
return np.exp(-n * x) * (-n * (x - 1) + 1) + n * x**(n - 1)
def aps10_fpp(x, n):
return np.exp(-n * x) * (-n * (-n * (x - 1) + 1) + -n * x) + n * (n - 1) * x**(n - 2)
def aps11_f(x, n):
r"""Rational function with a zero at x=1/n and a pole at x=0"""
return (n * x - 1) / ((n - 1) * x)
def aps11_fp(x, n):
return 1 / (n - 1) / x**2
def aps11_fpp(x, n):
return -2 / (n - 1) / x**3
def aps12_f(x, n):
r"""n-th root of x, with a zero at x=n"""
return np.power(x, 1.0 / n) - np.power(n, 1.0 / n)
def aps12_fp(x, n):
return np.power(x, (1.0 - n) / n) / n
def aps12_fpp(x, n):
return np.power(x, (1.0 - 2 * n) / n) * (1.0 / n) * (1.0 - n) / n
_MAX_EXPABLE = np.log(np.finfo(float).max)
def aps13_f(x):
r"""Function with *all* derivatives 0 at the root"""
if x == 0:
return 0
# x2 = 1.0/x**2
# if x2 > 708:
# return 0
y = 1 / x**2
if y > _MAX_EXPABLE:
return 0
return x / np.exp(y)
def aps13_fp(x):
if x == 0:
return 0
y = 1 / x**2
if y > _MAX_EXPABLE:
return 0
return (1 + 2 / x**2) / np.exp(y)
def aps13_fpp(x):
Loading ...