log(1-x^(1/10000))

Percentage Accurate: 97.6% → 99.6%
Time: 5.1s
Alternatives: 11
Speedup: 0.7×

Specification

?
\[0 \leq x \land x \leq 1\]
\[\begin{array}{l} \\ \log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \end{array} \]
(FPCore (x) :precision binary64 (log (- 1.0 (pow x (/ 1.0 10000.0)))))
double code(double x) {
	return log((1.0 - pow(x, (1.0 / 10000.0))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log((1.0d0 - (x ** (1.0d0 / 10000.0d0))))
end function
public static double code(double x) {
	return Math.log((1.0 - Math.pow(x, (1.0 / 10000.0))));
}
def code(x):
	return math.log((1.0 - math.pow(x, (1.0 / 10000.0))))
function code(x)
	return log(Float64(1.0 - (x ^ Float64(1.0 / 10000.0))))
end
function tmp = code(x)
	tmp = log((1.0 - (x ^ (1.0 / 10000.0))));
end
code[x_] := N[Log[N[(1.0 - N[Power[x, N[(1.0 / 10000.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)
\end{array}

Sampling outcomes in binary64 precision:

Local Percentage Accuracy vs ?

The average percentage accuracy by input value. Horizontal axis shows value of an input variable; the variable is choosen in the title. Vertical axis is accuracy; higher is better. Red represent the original program, while blue represents Herbie's suggestion. These can be toggled with buttons below the plot. The line is an average while dots represent individual samples.

Accuracy vs Speed?

Herbie found 11 alternatives:

AlternativeAccuracySpeedup
The accuracy (vertical axis) and speed (horizontal axis) of each alternatives. Up and to the right is better. The red square shows the initial program, and each blue circle shows an alternative.The line shows the best available speed-accuracy tradeoffs.

Initial Program: 97.6% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \end{array} \]
(FPCore (x) :precision binary64 (log (- 1.0 (pow x (/ 1.0 10000.0)))))
double code(double x) {
	return log((1.0 - pow(x, (1.0 / 10000.0))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log((1.0d0 - (x ** (1.0d0 / 10000.0d0))))
end function
public static double code(double x) {
	return Math.log((1.0 - Math.pow(x, (1.0 / 10000.0))));
}
def code(x):
	return math.log((1.0 - math.pow(x, (1.0 / 10000.0))))
function code(x)
	return log(Float64(1.0 - (x ^ Float64(1.0 / 10000.0))))
end
function tmp = code(x)
	tmp = log((1.0 - (x ^ (1.0 / 10000.0))));
end
code[x_] := N[Log[N[(1.0 - N[Power[x, N[(1.0 / 10000.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)
\end{array}

Alternative 1: 99.6% accurate, 0.3× speedup?

\[\begin{array}{l} \\ \log \left(\frac{\frac{1 - {x}^{0.0027}}{\left({x}^{0.0018} + {x}^{0.0009}\right) + 1}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log
  (/
   (/ (- 1.0 (pow x 0.0027)) (+ (+ (pow x 0.0018) (pow x 0.0009)) 1.0))
   (*
    (+ (+ (pow x 0.0002) 1.0) (pow x 0.0001))
    (+ (+ 1.0 (pow x 0.0006)) (pow x 0.0003))))))
double code(double x) {
	return log((((1.0 - pow(x, 0.0027)) / ((pow(x, 0.0018) + pow(x, 0.0009)) + 1.0)) / (((pow(x, 0.0002) + 1.0) + pow(x, 0.0001)) * ((1.0 + pow(x, 0.0006)) + pow(x, 0.0003)))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log((((1.0d0 - (x ** 0.0027d0)) / (((x ** 0.0018d0) + (x ** 0.0009d0)) + 1.0d0)) / ((((x ** 0.0002d0) + 1.0d0) + (x ** 0.0001d0)) * ((1.0d0 + (x ** 0.0006d0)) + (x ** 0.0003d0)))))
end function
public static double code(double x) {
	return Math.log((((1.0 - Math.pow(x, 0.0027)) / ((Math.pow(x, 0.0018) + Math.pow(x, 0.0009)) + 1.0)) / (((Math.pow(x, 0.0002) + 1.0) + Math.pow(x, 0.0001)) * ((1.0 + Math.pow(x, 0.0006)) + Math.pow(x, 0.0003)))));
}
def code(x):
	return math.log((((1.0 - math.pow(x, 0.0027)) / ((math.pow(x, 0.0018) + math.pow(x, 0.0009)) + 1.0)) / (((math.pow(x, 0.0002) + 1.0) + math.pow(x, 0.0001)) * ((1.0 + math.pow(x, 0.0006)) + math.pow(x, 0.0003)))))
function code(x)
	return log(Float64(Float64(Float64(1.0 - (x ^ 0.0027)) / Float64(Float64((x ^ 0.0018) + (x ^ 0.0009)) + 1.0)) / Float64(Float64(Float64((x ^ 0.0002) + 1.0) + (x ^ 0.0001)) * Float64(Float64(1.0 + (x ^ 0.0006)) + (x ^ 0.0003)))))
end
function tmp = code(x)
	tmp = log((((1.0 - (x ^ 0.0027)) / (((x ^ 0.0018) + (x ^ 0.0009)) + 1.0)) / ((((x ^ 0.0002) + 1.0) + (x ^ 0.0001)) * ((1.0 + (x ^ 0.0006)) + (x ^ 0.0003)))));
end
code[x_] := N[Log[N[(N[(N[(1.0 - N[Power[x, 0.0027], $MachinePrecision]), $MachinePrecision] / N[(N[(N[Power[x, 0.0018], $MachinePrecision] + N[Power[x, 0.0009], $MachinePrecision]), $MachinePrecision] + 1.0), $MachinePrecision]), $MachinePrecision] / N[(N[(N[(N[Power[x, 0.0002], $MachinePrecision] + 1.0), $MachinePrecision] + N[Power[x, 0.0001], $MachinePrecision]), $MachinePrecision] * N[(N[(1.0 + N[Power[x, 0.0006], $MachinePrecision]), $MachinePrecision] + N[Power[x, 0.0003], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{\frac{1 - {x}^{0.0027}}{\left({x}^{0.0018} + {x}^{0.0009}\right) + 1}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Applied rewrites99.3%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0009}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right)} \]
  4. Step-by-step derivation
    1. lift--.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{1 - {x}^{\frac{9}{10000}}}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    2. flip3--N/A

      \[\leadsto \log \left(\frac{\color{blue}{\frac{{1}^{3} - {\left({x}^{\frac{9}{10000}}\right)}^{3}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    3. lower-/.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{\frac{{1}^{3} - {\left({x}^{\frac{9}{10000}}\right)}^{3}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    4. metadata-evalN/A

      \[\leadsto \log \left(\frac{\frac{\color{blue}{1} - {\left({x}^{\frac{9}{10000}}\right)}^{3}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    5. lower--.f64N/A

      \[\leadsto \log \left(\frac{\frac{\color{blue}{1 - {\left({x}^{\frac{9}{10000}}\right)}^{3}}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    6. lift-pow.f64N/A

      \[\leadsto \log \left(\frac{\frac{1 - {\color{blue}{\left({x}^{\frac{9}{10000}}\right)}}^{3}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    7. pow-powN/A

      \[\leadsto \log \left(\frac{\frac{1 - \color{blue}{{x}^{\left(\frac{9}{10000} \cdot 3\right)}}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    8. lower-pow.f64N/A

      \[\leadsto \log \left(\frac{\frac{1 - \color{blue}{{x}^{\left(\frac{9}{10000} \cdot 3\right)}}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    9. metadata-evalN/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\color{blue}{\frac{27}{10000}}}}{1 \cdot 1 + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    10. metadata-evalN/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\color{blue}{1} + \left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right)}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    11. +-commutativeN/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\color{blue}{\left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right) + 1}}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    12. lower-+.f64N/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\color{blue}{\left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + 1 \cdot {x}^{\frac{9}{10000}}\right) + 1}}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    13. *-lft-identityN/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + \color{blue}{{x}^{\frac{9}{10000}}}\right) + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    14. lower-+.f64N/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\color{blue}{\left({x}^{\frac{9}{10000}} \cdot {x}^{\frac{9}{10000}} + {x}^{\frac{9}{10000}}\right)} + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    15. pow2N/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\left(\color{blue}{{\left({x}^{\frac{9}{10000}}\right)}^{2}} + {x}^{\frac{9}{10000}}\right) + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    16. lift-pow.f64N/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\left({\color{blue}{\left({x}^{\frac{9}{10000}}\right)}}^{2} + {x}^{\frac{9}{10000}}\right) + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    17. pow-powN/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\left(\color{blue}{{x}^{\left(\frac{9}{10000} \cdot 2\right)}} + {x}^{\frac{9}{10000}}\right) + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    18. metadata-evalN/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\left({x}^{\color{blue}{\frac{9}{5000}}} + {x}^{\frac{9}{10000}}\right) + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    19. metadata-evalN/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\left({x}^{\color{blue}{\left(\frac{3}{5000} \cdot 3\right)}} + {x}^{\frac{9}{10000}}\right) + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    20. lower-pow.f64N/A

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{\frac{27}{10000}}}{\left(\color{blue}{{x}^{\left(\frac{3}{5000} \cdot 3\right)}} + {x}^{\frac{9}{10000}}\right) + 1}}{\left(\left({x}^{\frac{1}{5000}} + 1\right) + {x}^{\frac{1}{10000}}\right) \cdot \left(\left(1 + {x}^{\frac{3}{5000}}\right) + {x}^{\frac{3}{10000}}\right)}\right) \]
    21. metadata-eval99.5

      \[\leadsto \log \left(\frac{\frac{1 - {x}^{0.0027}}{\left({x}^{\color{blue}{0.0018}} + {x}^{0.0009}\right) + 1}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right) \]
  5. Applied rewrites99.5%

    \[\leadsto \log \left(\frac{\color{blue}{\frac{1 - {x}^{0.0027}}{\left({x}^{0.0018} + {x}^{0.0009}\right) + 1}}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right) \]
  6. Add Preprocessing

Alternative 2: 99.2% accurate, 0.3× speedup?

\[\begin{array}{l} \\ \log \left(\frac{{\left(\frac{\left(1 + {x}^{0.0004}\right) + {x}^{0.0002}}{1 - {x}^{0.0006}}\right)}^{-1}}{{x}^{0.0001} + 1}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log
  (/
   (pow
    (/ (+ (+ 1.0 (pow x 0.0004)) (pow x 0.0002)) (- 1.0 (pow x 0.0006)))
    -1.0)
   (+ (pow x 0.0001) 1.0))))
double code(double x) {
	return log((pow((((1.0 + pow(x, 0.0004)) + pow(x, 0.0002)) / (1.0 - pow(x, 0.0006))), -1.0) / (pow(x, 0.0001) + 1.0)));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log((((((1.0d0 + (x ** 0.0004d0)) + (x ** 0.0002d0)) / (1.0d0 - (x ** 0.0006d0))) ** (-1.0d0)) / ((x ** 0.0001d0) + 1.0d0)))
end function
public static double code(double x) {
	return Math.log((Math.pow((((1.0 + Math.pow(x, 0.0004)) + Math.pow(x, 0.0002)) / (1.0 - Math.pow(x, 0.0006))), -1.0) / (Math.pow(x, 0.0001) + 1.0)));
}
def code(x):
	return math.log((math.pow((((1.0 + math.pow(x, 0.0004)) + math.pow(x, 0.0002)) / (1.0 - math.pow(x, 0.0006))), -1.0) / (math.pow(x, 0.0001) + 1.0)))
function code(x)
	return log(Float64((Float64(Float64(Float64(1.0 + (x ^ 0.0004)) + (x ^ 0.0002)) / Float64(1.0 - (x ^ 0.0006))) ^ -1.0) / Float64((x ^ 0.0001) + 1.0)))
end
function tmp = code(x)
	tmp = log((((((1.0 + (x ^ 0.0004)) + (x ^ 0.0002)) / (1.0 - (x ^ 0.0006))) ^ -1.0) / ((x ^ 0.0001) + 1.0)));
end
code[x_] := N[Log[N[(N[Power[N[(N[(N[(1.0 + N[Power[x, 0.0004], $MachinePrecision]), $MachinePrecision] + N[Power[x, 0.0002], $MachinePrecision]), $MachinePrecision] / N[(1.0 - N[Power[x, 0.0006], $MachinePrecision]), $MachinePrecision]), $MachinePrecision], -1.0], $MachinePrecision] / N[(N[Power[x, 0.0001], $MachinePrecision] + 1.0), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{{\left(\frac{\left(1 + {x}^{0.0004}\right) + {x}^{0.0002}}{1 - {x}^{0.0006}}\right)}^{-1}}{{x}^{0.0001} + 1}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. flip--N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right)} \]
    3. lower-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right)} \]
    4. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{1} - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    5. lower--.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    6. lift-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000}\right)}} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    7. lift-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\frac{1}{10000}\right)} \cdot \color{blue}{{x}^{\left(\frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    8. pow-prod-upN/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000} + \frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    9. lower-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000} + \frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    10. lift-/.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\color{blue}{\frac{1}{10000}} + \frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    11. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\color{blue}{\frac{1}{10000}} + \frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    12. lift-/.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\frac{1}{10000} + \color{blue}{\frac{1}{10000}}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    13. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\frac{1}{10000} + \color{blue}{\frac{1}{10000}}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    14. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\color{blue}{\frac{1}{5000}}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    15. +-commutativeN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{1}{5000}}}{\color{blue}{{x}^{\left(\frac{1}{10000}\right)} + 1}}\right) \]
    16. lower-+.f6498.3

      \[\leadsto \log \left(\frac{1 - {x}^{0.0002}}{\color{blue}{{x}^{\left(\frac{1}{10000}\right)} + 1}}\right) \]
    17. lift-/.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{1}{5000}}}{{x}^{\color{blue}{\left(\frac{1}{10000}\right)}} + 1}\right) \]
    18. metadata-eval98.3

      \[\leadsto \log \left(\frac{1 - {x}^{0.0002}}{{x}^{\color{blue}{0.0001}} + 1}\right) \]
  4. Applied rewrites98.3%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0002}}{{x}^{0.0001} + 1}\right)} \]
  5. Applied rewrites99.1%

    \[\leadsto \log \left(\frac{\color{blue}{\frac{1}{\frac{\left(1 + {x}^{0.0004}\right) + {x}^{0.0002}}{1 - {x}^{0.0006}}}}}{{x}^{0.0001} + 1}\right) \]
  6. Final simplification99.1%

    \[\leadsto \log \left(\frac{{\left(\frac{\left(1 + {x}^{0.0004}\right) + {x}^{0.0002}}{1 - {x}^{0.0006}}\right)}^{-1}}{{x}^{0.0001} + 1}\right) \]
  7. Add Preprocessing

Alternative 3: 99.4% accurate, 0.3× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1 - {x}^{0.0009}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log
  (/
   (- 1.0 (pow x 0.0009))
   (*
    (+ (+ (pow x 0.0002) 1.0) (pow x 0.0001))
    (+ (+ 1.0 (pow x 0.0006)) (pow x 0.0003))))))
double code(double x) {
	return log(((1.0 - pow(x, 0.0009)) / (((pow(x, 0.0002) + 1.0) + pow(x, 0.0001)) * ((1.0 + pow(x, 0.0006)) + pow(x, 0.0003)))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 - (x ** 0.0009d0)) / ((((x ** 0.0002d0) + 1.0d0) + (x ** 0.0001d0)) * ((1.0d0 + (x ** 0.0006d0)) + (x ** 0.0003d0)))))
end function
public static double code(double x) {
	return Math.log(((1.0 - Math.pow(x, 0.0009)) / (((Math.pow(x, 0.0002) + 1.0) + Math.pow(x, 0.0001)) * ((1.0 + Math.pow(x, 0.0006)) + Math.pow(x, 0.0003)))));
}
def code(x):
	return math.log(((1.0 - math.pow(x, 0.0009)) / (((math.pow(x, 0.0002) + 1.0) + math.pow(x, 0.0001)) * ((1.0 + math.pow(x, 0.0006)) + math.pow(x, 0.0003)))))
function code(x)
	return log(Float64(Float64(1.0 - (x ^ 0.0009)) / Float64(Float64(Float64((x ^ 0.0002) + 1.0) + (x ^ 0.0001)) * Float64(Float64(1.0 + (x ^ 0.0006)) + (x ^ 0.0003)))))
end
function tmp = code(x)
	tmp = log(((1.0 - (x ^ 0.0009)) / ((((x ^ 0.0002) + 1.0) + (x ^ 0.0001)) * ((1.0 + (x ^ 0.0006)) + (x ^ 0.0003)))));
end
code[x_] := N[Log[N[(N[(1.0 - N[Power[x, 0.0009], $MachinePrecision]), $MachinePrecision] / N[(N[(N[(N[Power[x, 0.0002], $MachinePrecision] + 1.0), $MachinePrecision] + N[Power[x, 0.0001], $MachinePrecision]), $MachinePrecision] * N[(N[(1.0 + N[Power[x, 0.0006], $MachinePrecision]), $MachinePrecision] + N[Power[x, 0.0003], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1 - {x}^{0.0009}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Applied rewrites99.3%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0009}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right)} \]
  4. Add Preprocessing

Alternative 4: 99.2% accurate, 0.4× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1 - {x}^{0.0006}}{\left({x}^{0.0001} + 1\right) \cdot \left(\left(1 + {x}^{0.0004}\right) + {x}^{0.0002}\right)}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log
  (/
   (- 1.0 (pow x 0.0006))
   (* (+ (pow x 0.0001) 1.0) (+ (+ 1.0 (pow x 0.0004)) (pow x 0.0002))))))
double code(double x) {
	return log(((1.0 - pow(x, 0.0006)) / ((pow(x, 0.0001) + 1.0) * ((1.0 + pow(x, 0.0004)) + pow(x, 0.0002)))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 - (x ** 0.0006d0)) / (((x ** 0.0001d0) + 1.0d0) * ((1.0d0 + (x ** 0.0004d0)) + (x ** 0.0002d0)))))
end function
public static double code(double x) {
	return Math.log(((1.0 - Math.pow(x, 0.0006)) / ((Math.pow(x, 0.0001) + 1.0) * ((1.0 + Math.pow(x, 0.0004)) + Math.pow(x, 0.0002)))));
}
def code(x):
	return math.log(((1.0 - math.pow(x, 0.0006)) / ((math.pow(x, 0.0001) + 1.0) * ((1.0 + math.pow(x, 0.0004)) + math.pow(x, 0.0002)))))
function code(x)
	return log(Float64(Float64(1.0 - (x ^ 0.0006)) / Float64(Float64((x ^ 0.0001) + 1.0) * Float64(Float64(1.0 + (x ^ 0.0004)) + (x ^ 0.0002)))))
end
function tmp = code(x)
	tmp = log(((1.0 - (x ^ 0.0006)) / (((x ^ 0.0001) + 1.0) * ((1.0 + (x ^ 0.0004)) + (x ^ 0.0002)))));
end
code[x_] := N[Log[N[(N[(1.0 - N[Power[x, 0.0006], $MachinePrecision]), $MachinePrecision] / N[(N[(N[Power[x, 0.0001], $MachinePrecision] + 1.0), $MachinePrecision] * N[(N[(1.0 + N[Power[x, 0.0004], $MachinePrecision]), $MachinePrecision] + N[Power[x, 0.0002], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1 - {x}^{0.0006}}{\left({x}^{0.0001} + 1\right) \cdot \left(\left(1 + {x}^{0.0004}\right) + {x}^{0.0002}\right)}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. flip--N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right)} \]
    3. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{1} - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    4. flip3--N/A

      \[\leadsto \log \left(\frac{\color{blue}{\frac{{1}^{3} - {\left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{1 \cdot 1 + \left(\left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) + 1 \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    5. associate-/l/N/A

      \[\leadsto \log \color{blue}{\left(\frac{{1}^{3} - {\left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{\left(1 + {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left(1 \cdot 1 + \left(\left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) + 1 \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)\right)\right)}\right)} \]
    6. lower-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{{1}^{3} - {\left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{\left(1 + {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left(1 \cdot 1 + \left(\left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) + 1 \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)\right)\right)}\right)} \]
  4. Applied rewrites99.1%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0006}}{\left({x}^{0.0001} + 1\right) \cdot \left(\left(1 + {x}^{0.0004}\right) + {x}^{0.0002}\right)}\right)} \]
  5. Add Preprocessing

Alternative 5: 99.2% accurate, 0.4× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1 - {x}^{0.0006}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left({x}^{0.0003} + 1\right)}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log
  (/
   (- 1.0 (pow x 0.0006))
   (* (+ (+ (pow x 0.0002) 1.0) (pow x 0.0001)) (+ (pow x 0.0003) 1.0)))))
double code(double x) {
	return log(((1.0 - pow(x, 0.0006)) / (((pow(x, 0.0002) + 1.0) + pow(x, 0.0001)) * (pow(x, 0.0003) + 1.0))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 - (x ** 0.0006d0)) / ((((x ** 0.0002d0) + 1.0d0) + (x ** 0.0001d0)) * ((x ** 0.0003d0) + 1.0d0))))
end function
public static double code(double x) {
	return Math.log(((1.0 - Math.pow(x, 0.0006)) / (((Math.pow(x, 0.0002) + 1.0) + Math.pow(x, 0.0001)) * (Math.pow(x, 0.0003) + 1.0))));
}
def code(x):
	return math.log(((1.0 - math.pow(x, 0.0006)) / (((math.pow(x, 0.0002) + 1.0) + math.pow(x, 0.0001)) * (math.pow(x, 0.0003) + 1.0))))
function code(x)
	return log(Float64(Float64(1.0 - (x ^ 0.0006)) / Float64(Float64(Float64((x ^ 0.0002) + 1.0) + (x ^ 0.0001)) * Float64((x ^ 0.0003) + 1.0))))
end
function tmp = code(x)
	tmp = log(((1.0 - (x ^ 0.0006)) / ((((x ^ 0.0002) + 1.0) + (x ^ 0.0001)) * ((x ^ 0.0003) + 1.0))));
end
code[x_] := N[Log[N[(N[(1.0 - N[Power[x, 0.0006], $MachinePrecision]), $MachinePrecision] / N[(N[(N[(N[Power[x, 0.0002], $MachinePrecision] + 1.0), $MachinePrecision] + N[Power[x, 0.0001], $MachinePrecision]), $MachinePrecision] * N[(N[Power[x, 0.0003], $MachinePrecision] + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1 - {x}^{0.0006}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left({x}^{0.0003} + 1\right)}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. flip3--N/A

      \[\leadsto \log \color{blue}{\left(\frac{{1}^{3} - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right)} \]
    3. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{1} - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    4. flip--N/A

      \[\leadsto \log \left(\frac{\color{blue}{\frac{1 \cdot 1 - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3} \cdot {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{1 + {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    5. metadata-evalN/A

      \[\leadsto \log \left(\frac{\frac{1 \cdot 1 - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3} \cdot {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{\color{blue}{{1}^{3}} + {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    6. associate-/l/N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3} \cdot {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{\left(1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)\right) \cdot \left({1}^{3} + {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}\right)}\right)} \]
    7. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{1} - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3} \cdot {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{\left(1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)\right) \cdot \left({1}^{3} + {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}\right)}\right) \]
    8. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{{1}^{3}} - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3} \cdot {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{\left(1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)\right) \cdot \left({1}^{3} + {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}\right)}\right) \]
    9. unpow-prod-downN/A

      \[\leadsto \log \left(\frac{{1}^{3} - \color{blue}{{\left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}}{\left(1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)\right) \cdot \left({1}^{3} + {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}\right)}\right) \]
  4. Applied rewrites99.1%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0006}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left({x}^{0.0003} + 1\right)}\right)} \]
  5. Add Preprocessing

Alternative 6: 99.0% accurate, 0.5× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1 - {x}^{0.0004}}{\left({x}^{0.0001} + 1\right) \cdot \left({x}^{0.0002} + 1\right)}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log
  (/
   (- 1.0 (pow x 0.0004))
   (* (+ (pow x 0.0001) 1.0) (+ (pow x 0.0002) 1.0)))))
double code(double x) {
	return log(((1.0 - pow(x, 0.0004)) / ((pow(x, 0.0001) + 1.0) * (pow(x, 0.0002) + 1.0))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 - (x ** 0.0004d0)) / (((x ** 0.0001d0) + 1.0d0) * ((x ** 0.0002d0) + 1.0d0))))
end function
public static double code(double x) {
	return Math.log(((1.0 - Math.pow(x, 0.0004)) / ((Math.pow(x, 0.0001) + 1.0) * (Math.pow(x, 0.0002) + 1.0))));
}
def code(x):
	return math.log(((1.0 - math.pow(x, 0.0004)) / ((math.pow(x, 0.0001) + 1.0) * (math.pow(x, 0.0002) + 1.0))))
function code(x)
	return log(Float64(Float64(1.0 - (x ^ 0.0004)) / Float64(Float64((x ^ 0.0001) + 1.0) * Float64((x ^ 0.0002) + 1.0))))
end
function tmp = code(x)
	tmp = log(((1.0 - (x ^ 0.0004)) / (((x ^ 0.0001) + 1.0) * ((x ^ 0.0002) + 1.0))));
end
code[x_] := N[Log[N[(N[(1.0 - N[Power[x, 0.0004], $MachinePrecision]), $MachinePrecision] / N[(N[(N[Power[x, 0.0001], $MachinePrecision] + 1.0), $MachinePrecision] * N[(N[Power[x, 0.0002], $MachinePrecision] + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1 - {x}^{0.0004}}{\left({x}^{0.0001} + 1\right) \cdot \left({x}^{0.0002} + 1\right)}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. flip--N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right)} \]
    3. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{1} - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    4. flip--N/A

      \[\leadsto \log \left(\frac{\color{blue}{\frac{1 \cdot 1 - \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}{1 + {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    5. associate-/l/N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}{\left(1 + {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left(1 + {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right)} \]
    6. lower-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}{\left(1 + {x}^{\left(\frac{1}{10000}\right)}\right) \cdot \left(1 + {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right)} \]
  4. Applied rewrites98.9%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0004}}{\left({x}^{0.0001} + 1\right) \cdot \left({x}^{0.0002} + 1\right)}\right)} \]
  5. Add Preprocessing

Alternative 7: 98.8% accurate, 0.5× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1 - {x}^{0.0003}}{\left({x}^{0.0002} + 1\right) + {x}^{0.0001}}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log (/ (- 1.0 (pow x 0.0003)) (+ (+ (pow x 0.0002) 1.0) (pow x 0.0001)))))
double code(double x) {
	return log(((1.0 - pow(x, 0.0003)) / ((pow(x, 0.0002) + 1.0) + pow(x, 0.0001))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 - (x ** 0.0003d0)) / (((x ** 0.0002d0) + 1.0d0) + (x ** 0.0001d0))))
end function
public static double code(double x) {
	return Math.log(((1.0 - Math.pow(x, 0.0003)) / ((Math.pow(x, 0.0002) + 1.0) + Math.pow(x, 0.0001))));
}
def code(x):
	return math.log(((1.0 - math.pow(x, 0.0003)) / ((math.pow(x, 0.0002) + 1.0) + math.pow(x, 0.0001))))
function code(x)
	return log(Float64(Float64(1.0 - (x ^ 0.0003)) / Float64(Float64((x ^ 0.0002) + 1.0) + (x ^ 0.0001))))
end
function tmp = code(x)
	tmp = log(((1.0 - (x ^ 0.0003)) / (((x ^ 0.0002) + 1.0) + (x ^ 0.0001))));
end
code[x_] := N[Log[N[(N[(1.0 - N[Power[x, 0.0003], $MachinePrecision]), $MachinePrecision] / N[(N[(N[Power[x, 0.0002], $MachinePrecision] + 1.0), $MachinePrecision] + N[Power[x, 0.0001], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1 - {x}^{0.0003}}{\left({x}^{0.0002} + 1\right) + {x}^{0.0001}}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. flip3--N/A

      \[\leadsto \log \color{blue}{\left(\frac{{1}^{3} - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right)} \]
    3. lower-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{{1}^{3} - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right)} \]
    4. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{1} - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    5. lower--.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{1 - {\left({x}^{\left(\frac{1}{10000}\right)}\right)}^{3}}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    6. lift-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - {\color{blue}{\left({x}^{\left(\frac{1}{10000}\right)}\right)}}^{3}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    7. pow-powN/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000} \cdot 3\right)}}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    8. lower-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000} \cdot 3\right)}}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    9. lift-/.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\color{blue}{\frac{1}{10000}} \cdot 3\right)}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    10. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\color{blue}{\frac{1}{10000}} \cdot 3\right)}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    11. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\color{blue}{\frac{3}{10000}}}}{1 \cdot 1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    12. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{3}{10000}}}{\color{blue}{1} + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + 1 \cdot {x}^{\left(\frac{1}{10000}\right)}\right)}\right) \]
    13. *-lft-identityN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{3}{10000}}}{1 + \left({x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)} + \color{blue}{{x}^{\left(\frac{1}{10000}\right)}}\right)}\right) \]
    14. associate-+r+N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{3}{10000}}}{\color{blue}{\left(1 + {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) + {x}^{\left(\frac{1}{10000}\right)}}}\right) \]
    15. lower-+.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{3}{10000}}}{\color{blue}{\left(1 + {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) + {x}^{\left(\frac{1}{10000}\right)}}}\right) \]
  4. Applied rewrites98.8%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0003}}{\left({x}^{0.0002} + 1\right) + {x}^{0.0001}}\right)} \]
  5. Add Preprocessing

Alternative 8: 98.4% accurate, 0.5× speedup?

\[\begin{array}{l} \\ \mathsf{log1p}\left(-{x}^{0.0002}\right) - \mathsf{log1p}\left({x}^{0.0001}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (- (log1p (- (pow x 0.0002))) (log1p (pow x 0.0001))))
double code(double x) {
	return log1p(-pow(x, 0.0002)) - log1p(pow(x, 0.0001));
}
public static double code(double x) {
	return Math.log1p(-Math.pow(x, 0.0002)) - Math.log1p(Math.pow(x, 0.0001));
}
def code(x):
	return math.log1p(-math.pow(x, 0.0002)) - math.log1p(math.pow(x, 0.0001))
function code(x)
	return Float64(log1p(Float64(-(x ^ 0.0002))) - log1p((x ^ 0.0001)))
end
code[x_] := N[(N[Log[1 + (-N[Power[x, 0.0002], $MachinePrecision])], $MachinePrecision] - N[Log[1 + N[Power[x, 0.0001], $MachinePrecision]], $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
\mathsf{log1p}\left(-{x}^{0.0002}\right) - \mathsf{log1p}\left({x}^{0.0001}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift-log.f64N/A

      \[\leadsto \color{blue}{\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    3. flip--N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right)} \]
    4. log-divN/A

      \[\leadsto \color{blue}{\log \left(1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) - \log \left(1 + {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    5. lower--.f64N/A

      \[\leadsto \color{blue}{\log \left(1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}\right) - \log \left(1 + {x}^{\left(\frac{1}{10000}\right)}\right)} \]
  4. Applied rewrites98.4%

    \[\leadsto \color{blue}{\mathsf{log1p}\left(-{x}^{0.0002}\right) - \mathsf{log1p}\left({x}^{0.0001}\right)} \]
  5. Add Preprocessing

Alternative 9: 98.4% accurate, 0.7× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1 - {x}^{0.0002}}{{x}^{0.0001} + 1}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log (/ (- 1.0 (pow x 0.0002)) (+ (pow x 0.0001) 1.0))))
double code(double x) {
	return log(((1.0 - pow(x, 0.0002)) / (pow(x, 0.0001) + 1.0)));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 - (x ** 0.0002d0)) / ((x ** 0.0001d0) + 1.0d0)))
end function
public static double code(double x) {
	return Math.log(((1.0 - Math.pow(x, 0.0002)) / (Math.pow(x, 0.0001) + 1.0)));
}
def code(x):
	return math.log(((1.0 - math.pow(x, 0.0002)) / (math.pow(x, 0.0001) + 1.0)))
function code(x)
	return log(Float64(Float64(1.0 - (x ^ 0.0002)) / Float64((x ^ 0.0001) + 1.0)))
end
function tmp = code(x)
	tmp = log(((1.0 - (x ^ 0.0002)) / ((x ^ 0.0001) + 1.0)));
end
code[x_] := N[Log[N[(N[(1.0 - N[Power[x, 0.0002], $MachinePrecision]), $MachinePrecision] / N[(N[Power[x, 0.0001], $MachinePrecision] + 1.0), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1 - {x}^{0.0002}}{{x}^{0.0001} + 1}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. flip--N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right)} \]
    3. lower-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{1 \cdot 1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right)} \]
    4. metadata-evalN/A

      \[\leadsto \log \left(\frac{\color{blue}{1} - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    5. lower--.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{1 - {x}^{\left(\frac{1}{10000}\right)} \cdot {x}^{\left(\frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    6. lift-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000}\right)}} \cdot {x}^{\left(\frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    7. lift-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\frac{1}{10000}\right)} \cdot \color{blue}{{x}^{\left(\frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    8. pow-prod-upN/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000} + \frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    9. lower-pow.f64N/A

      \[\leadsto \log \left(\frac{1 - \color{blue}{{x}^{\left(\frac{1}{10000} + \frac{1}{10000}\right)}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    10. lift-/.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\color{blue}{\frac{1}{10000}} + \frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    11. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\color{blue}{\frac{1}{10000}} + \frac{1}{10000}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    12. lift-/.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\frac{1}{10000} + \color{blue}{\frac{1}{10000}}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    13. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\left(\frac{1}{10000} + \color{blue}{\frac{1}{10000}}\right)}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    14. metadata-evalN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\color{blue}{\frac{1}{5000}}}}{1 + {x}^{\left(\frac{1}{10000}\right)}}\right) \]
    15. +-commutativeN/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{1}{5000}}}{\color{blue}{{x}^{\left(\frac{1}{10000}\right)} + 1}}\right) \]
    16. lower-+.f6498.3

      \[\leadsto \log \left(\frac{1 - {x}^{0.0002}}{\color{blue}{{x}^{\left(\frac{1}{10000}\right)} + 1}}\right) \]
    17. lift-/.f64N/A

      \[\leadsto \log \left(\frac{1 - {x}^{\frac{1}{5000}}}{{x}^{\color{blue}{\left(\frac{1}{10000}\right)}} + 1}\right) \]
    18. metadata-eval98.3

      \[\leadsto \log \left(\frac{1 - {x}^{0.0002}}{{x}^{\color{blue}{0.0001}} + 1}\right) \]
  4. Applied rewrites98.3%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0002}}{{x}^{0.0001} + 1}\right)} \]
  5. Add Preprocessing

Alternative 10: 97.6% accurate, 1.1× speedup?

\[\begin{array}{l} \\ \mathsf{log1p}\left(-{x}^{0.0001}\right) \end{array} \]
(FPCore (x) :precision binary64 (log1p (- (pow x 0.0001))))
double code(double x) {
	return log1p(-pow(x, 0.0001));
}
public static double code(double x) {
	return Math.log1p(-Math.pow(x, 0.0001));
}
def code(x):
	return math.log1p(-math.pow(x, 0.0001))
function code(x)
	return log1p(Float64(-(x ^ 0.0001)))
end
code[x_] := N[Log[1 + (-N[Power[x, 0.0001], $MachinePrecision])], $MachinePrecision]
\begin{array}{l}

\\
\mathsf{log1p}\left(-{x}^{0.0001}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. lift-log.f64N/A

      \[\leadsto \color{blue}{\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    2. lift--.f64N/A

      \[\leadsto \log \color{blue}{\left(1 - {x}^{\left(\frac{1}{10000}\right)}\right)} \]
    3. sub-negN/A

      \[\leadsto \log \color{blue}{\left(1 + \left(\mathsf{neg}\left({x}^{\left(\frac{1}{10000}\right)}\right)\right)\right)} \]
    4. lower-log1p.f64N/A

      \[\leadsto \color{blue}{\mathsf{log1p}\left(\mathsf{neg}\left({x}^{\left(\frac{1}{10000}\right)}\right)\right)} \]
    5. lower-neg.f6497.4

      \[\leadsto \mathsf{log1p}\left(\color{blue}{-{x}^{\left(\frac{1}{10000}\right)}}\right) \]
    6. lift-/.f64N/A

      \[\leadsto \mathsf{log1p}\left(-{x}^{\color{blue}{\left(\frac{1}{10000}\right)}}\right) \]
    7. metadata-eval97.4

      \[\leadsto \mathsf{log1p}\left(-{x}^{\color{blue}{0.0001}}\right) \]
  4. Applied rewrites97.4%

    \[\leadsto \color{blue}{\mathsf{log1p}\left(-{x}^{0.0001}\right)} \]
  5. Add Preprocessing

Alternative 11: 1.6% accurate, 1.1× speedup?

\[\begin{array}{l} \\ \mathsf{log1p}\left({x}^{0.0001}\right) \end{array} \]
(FPCore (x) :precision binary64 (log1p (pow x 0.0001)))
double code(double x) {
	return log1p(pow(x, 0.0001));
}
public static double code(double x) {
	return Math.log1p(Math.pow(x, 0.0001));
}
def code(x):
	return math.log1p(math.pow(x, 0.0001))
function code(x)
	return log1p((x ^ 0.0001))
end
code[x_] := N[Log[1 + N[Power[x, 0.0001], $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\mathsf{log1p}\left({x}^{0.0001}\right)
\end{array}
Derivation
  1. Initial program 97.4%

    \[\log \left(1 - {x}^{\left(\frac{1}{10000}\right)}\right) \]
  2. Add Preprocessing
  3. Applied rewrites99.3%

    \[\leadsto \log \color{blue}{\left(\frac{1 - {x}^{0.0009}}{\left(\left({x}^{0.0002} + 1\right) + {x}^{0.0001}\right) \cdot \left(\left(1 + {x}^{0.0006}\right) + {x}^{0.0003}\right)}\right)} \]
  4. Applied rewrites1.6%

    \[\leadsto \color{blue}{\mathsf{log1p}\left({x}^{0.0001}\right)} \]
  5. Add Preprocessing

Reproduce

?
herbie shell --seed 1 
(FPCore (x)
  :name "log(1-x^(1/10000))"
  :precision binary64
  :pre (and (<= 0.0 x) (<= x 1.0))
  (log (- 1.0 (pow x (/ 1.0 10000.0)))))