import java.math.BigDecimal; public class testtest { public static final BigDecimal TWO = new BigDecimal(2); public static final int digits = 1000; public static final BigDecimal TOLERANCE = BigDecimal.ONE.scaleByPowerOfTen(-digits); public static double MidpointMethod = 0; public static long MidpointMethod(int n) { BigDecimal k = new BigDecimal(n); BigDecimal a = BigDecimal.ONE; // set a to be one BigDecimal b = k; // set b to be two long start = System.nanoTime(); // start the timer while(a.multiply(a).subtract(k).abs().compareTo(TOLERANCE) >= 0) { // while our decimals aren't close enough to the square root of two if(a.multiply(a).subtract(k).abs().compareTo(b.multiply(b).subtract(k).abs()) > 0) // if a is farther from the square root of two than b a = a.add(b).divide(TWO); // set a to be the average of a and b else // if a is closer to the square root of two than b b = a.add(b).divide(TWO); // set b to be the average of a and b } return System.nanoTime() - start; // return the time taken } public static void main(String[] args) { System.out.println(MidpointMethod(2)/10e6); } } This program outputs 6224.5209, but when I ran it it took way, way over 20 seconds to run. Why does it display 6 seconds when it actually took more than 20 seconds?
is the 6 seconds an accurate and precise measure of how long the program took?