Example usage for com.google.common.math DoubleMath fuzzyEquals

List of usage examples for com.google.common.math DoubleMath fuzzyEquals

Introduction

In this page you can find the example usage for com.google.common.math DoubleMath fuzzyEquals.

Prototype

public static boolean fuzzyEquals(double a, double b, double tolerance) 

Source Link

Document

Returns true if a and b are within tolerance of each other.

Usage

From source file:com.opengamma.strata.collect.DoubleArrayMath.java

/**
 * Compares each element in the first array to the matching index in the second array within a tolerance.
 * <p>/*  ww w.  j  a va2 s . c  om*/
 * If the arrays differ in length, false is returned.
 * <p>
 * The input arrays are not mutated.
 * 
 * @param array1  the first array to check
 * @param array2  the second array to check
 * @param tolerance  the tolerance to use
 * @return true if the arrays are effectively equal
 */
public static boolean fuzzyEquals(double[] array1, double[] array2, double tolerance) {
    if (array1.length != array2.length) {
        return false;
    }
    for (int i = 0; i < array1.length; i++) {
        if (!DoubleMath.fuzzyEquals(array1[i], array2[i], tolerance)) {
            return false;
        }
    }
    return true;
}

From source file:com.opengamma.strata.basics.currency.CurrencyAmount.java

/**
 * Converts this amount to an equivalent amount the specified currency.
 * <p>//from www  .  j ava 2s  . c o  m
 * The result will be expressed in terms of the given currency, converting
 * using the specified FX rate.
 * <p>
 * For example, if this represents 'GBP 100' and this method is called with
 * arguments {@code (USD, 1.6)} then the result will be 'USD 160'.
 * 
 * @param resultCurrency  the currency of the result
 * @param fxRate  the FX rate from this currency to the result currency
 * @return the converted instance, which should be expressed in the specified currency
 * @throws IllegalArgumentException if the FX is not 1 when no conversion is required
 */
public CurrencyAmount convertedTo(Currency resultCurrency, double fxRate) {
    if (currency.equals(resultCurrency)) {
        if (DoubleMath.fuzzyEquals(fxRate, 1d, 1e-8)) {
            return this;
        }
        throw new IllegalArgumentException("FX rate must be 1 when no conversion required");
    }
    return CurrencyAmount.of(resultCurrency, amount * fxRate);
}

From source file:com.opengamma.strata.pricer.impl.volatility.smile.function.SabrHaganVolatilityFunctionProvider.java

/**
 * Computes the first and second order derivatives of the Black implied volatility in the SABR model. 
 * <p>/*ww  w  . j  a va2 s.c  o m*/
 * The first derivative values will be stored in the input array {@code volatilityD} 
 * The array contains, [0] Derivative w.r.t the forward, [1] the derivative w.r.t the strike, [2] the derivative w.r.t. to alpha,
 * [3] the derivative w.r.t. to beta, [4] the derivative w.r.t. to rho, and [5] the derivative w.r.t. to nu. 
 * Thus the length of the array should be 6.
 * <p>
 * The second derivative values will be stored in the input array {@code volatilityD2}. 
 * Only the second order derivative with respect to the forward and strike are implemented.
 * The array contains [0][0] forward-forward; [0][1] forward-strike; [1][1] strike-strike.
 * Thus the size should be 2 x 2.
 * <p>
 * Around ATM, a first order expansion is used to due to some 0/0-type indetermination. 
 * The second order derivative produced is poor around ATM.
 * 
 * @param forward  the forward value of the underlying
 * @param strike  the strike value of the option
 * @param timeToExpiry  the time to expiry of the option
 * @param data  the SABR data.
 * @param volatilityD  the array used to return the first order derivative
 * @param volatilityD2  the array of array used to return the second order derivative
 * @return the Black implied volatility
 */
@Override
public double getVolatilityAdjoint2(double forward, double strike, double timeToExpiry, SabrFormulaData data,
        double[] volatilityD, double[][] volatilityD2) {
    double k = Math.max(strike, 0.000001);
    double alpha = data.getAlpha();
    double beta = data.getBeta();
    double rho = data.getRho();
    double nu = data.getNu();
    // Forward
    double h0 = (1 - beta) / 2;
    double h1 = forward * k;
    double h1h0 = Math.pow(h1, h0);
    double h12 = h1h0 * h1h0;
    double h2 = Math.log(forward / k);
    double h22 = h2 * h2;
    double h23 = h22 * h2;
    double h24 = h23 * h2;
    double f1 = h1h0 * (1 + h0 * h0 / 6.0 * (h22 + h0 * h0 / 20.0 * h24));
    double f2 = nu / alpha * h1h0 * h2;
    double f3 = h0 * h0 / 6.0 * alpha * alpha / h12 + rho * beta * nu * alpha / 4.0 / h1h0
            + (2 - 3 * rho * rho) / 24.0 * nu * nu;
    double sqrtf2 = Math.sqrt(1 - 2 * rho * f2 + f2 * f2);
    double f2x = 0.0;
    double x = 0.0, xp = 0, xpp = 0;
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        f2x = 1.0 - 0.5 * f2 * rho; //small f2 expansion to f2^2 terms
    } else {
        if (DoubleMath.fuzzyEquals(rho, 1.0, RHO_EPS)) {
            x = f2 < 1.0 ? -Math.log(1.0 - f2) - 0.5 * Math.pow(f2 / (f2 - 1.0), 2) * (1.0 - rho)
                    : Math.log(2.0 * f2 - 2.0) - Math.log(1.0 - rho);
        } else {
            x = Math.log((sqrtf2 + f2 - rho) / (1 - rho));
        }
        xp = 1. / sqrtf2;
        xpp = (rho - f2) / Math.pow(sqrtf2, 3.0);
        f2x = f2 / x;
    }
    double sigma = alpha / f1 * f2x * (1 + f3 * timeToExpiry);
    // First level
    double h0Dbeta = -0.5;
    double sigmaDf1 = -sigma / f1;
    double sigmaDf2 = 0;
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        sigmaDf2 = alpha / f1 * (1 + f3 * timeToExpiry) * -0.5 * rho;
    } else {
        sigmaDf2 = alpha / f1 * (1 + f3 * timeToExpiry) * (1.0 / x - f2 * xp / (x * x));
    }
    double sigmaDf3 = alpha / f1 * f2x * timeToExpiry;
    double sigmaDf4 = f2x / f1 * (1 + f3 * timeToExpiry);
    double sigmaDx = -alpha / f1 * f2 / (x * x) * (1 + f3 * timeToExpiry);
    double[][] sigmaD2ff = new double[3][3];
    sigmaD2ff[0][0] = -sigmaDf1 / f1 + sigma / (f1 * f1); //OK
    sigmaD2ff[0][1] = -sigmaDf2 / f1;
    sigmaD2ff[0][2] = -sigmaDf3 / f1;
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        sigmaD2ff[1][2] = alpha / f1 * -0.5 * rho * timeToExpiry;
    } else {
        sigmaD2ff[1][1] = alpha / f1 * (1 + f3 * timeToExpiry)
                * (-2 * xp / (x * x) - f2 * xpp / (x * x) + 2 * f2 * xp * xp / (x * x * x));
        sigmaD2ff[1][2] = alpha / f1 * timeToExpiry * (1.0 / x - f2 * xp / (x * x));
    }
    sigmaD2ff[2][2] = 0.0;
    //      double sigma = alpha / f1 * f2x * (1 + f3 * theta);
    // Second level
    double[] f1Dh = new double[3];
    double[] f2Dh = new double[3];
    double[] f3Dh = new double[3];
    f1Dh[0] = h1h0 * (h0 * (h22 / 3.0 + h0 * h0 / 40.0 * h24)) + Math.log(h1) * f1;
    f1Dh[1] = h0 * f1 / h1;
    f1Dh[2] = h1h0 * (h0 * h0 / 6.0 * (2.0 * h2 + h0 * h0 / 5.0 * h23));
    f2Dh[0] = Math.log(h1) * f2;
    f2Dh[1] = h0 * f2 / h1;
    f2Dh[2] = nu / alpha * h1h0;
    f3Dh[0] = h0 / 3.0 * alpha * alpha / h12 - 2 * h0 * h0 / 6.0 * alpha * alpha / h12 * Math.log(h1)
            - rho * beta * nu * alpha / 4.0 / h1h0 * Math.log(h1);
    f3Dh[1] = -2 * h0 * h0 / 6.0 * alpha * alpha / h12 * h0 / h1
            - rho * beta * nu * alpha / 4.0 / h1h0 * h0 / h1;
    f3Dh[2] = 0.0;
    double[] f1Dp = new double[4]; // Derivative to sabr parameters
    double[] f2Dp = new double[4];
    double[] f3Dp = new double[4];
    double[] f4Dp = new double[4];
    f1Dp[0] = 0.0;
    f1Dp[1] = f1Dh[0] * h0Dbeta;
    f1Dp[2] = 0.0;
    f1Dp[3] = 0.0;
    f2Dp[0] = -f2 / alpha;
    f2Dp[1] = f2Dh[0] * h0Dbeta;
    f2Dp[2] = 0.0;
    f2Dp[3] = h1h0 * h2 / alpha;
    f3Dp[0] = h0 * h0 / 3.0 * alpha / h12 + rho * beta * nu / 4.0 / h1h0;
    f3Dp[1] = rho * nu * alpha / 4.0 / h1h0 + f3Dh[0] * h0Dbeta;
    f3Dp[2] = beta * nu * alpha / 4.0 / h1h0 - rho / 4.0 * nu * nu;
    f3Dp[3] = rho * beta * alpha / 4.0 / h1h0 + (2 - 3 * rho * rho) / 12.0 * nu;
    f4Dp[0] = 1.0;
    f4Dp[1] = 0.0;
    f4Dp[2] = 0.0;
    f4Dp[3] = 0.0;
    double sigmaDh1 = sigmaDf1 * f1Dh[1] + sigmaDf2 * f2Dh[1] + sigmaDf3 * f3Dh[1];
    double sigmaDh2 = sigmaDf1 * f1Dh[2] + sigmaDf2 * f2Dh[2] + sigmaDf3 * f3Dh[2];
    double[][] f1D2hh = new double[2][2]; // No h0
    double[][] f2D2hh = new double[2][2];
    double[][] f3D2hh = new double[2][2];
    f1D2hh[0][0] = h0 * (h0 - 1) * f1 / (h1 * h1);
    f1D2hh[0][1] = h0 * h1h0 / h1 * h0 * h0 / 6.0 * (2.0 * h2 + 4.0 * h0 * h0 / 20.0 * h23);
    f1D2hh[1][1] = h1h0 * (h0 * h0 / 6.0 * (2.0 + 12.0 * h0 * h0 / 20.0 * h2));
    f2D2hh[0][0] = h0 * (h0 - 1) * f2 / (h1 * h1);
    f2D2hh[0][1] = nu / alpha * h0 * h1h0 / h1;
    f2D2hh[1][1] = 0.0;
    f3D2hh[0][0] = 2 * h0 * (2 * h0 + 1) * h0 * h0 / 6.0 * alpha * alpha / (h12 * h1 * h1)
            + h0 * (h0 + 1) * rho * beta * nu * alpha / 4.0 / (h1h0 * h1 * h1);
    f3D2hh[0][1] = 0.0;
    f3D2hh[1][1] = 0.0;
    double[][] sigmaD2hh = new double[2][2]; // No h0
    for (int loopx = 0; loopx < 2; loopx++) {
        for (int loopy = loopx; loopy < 2; loopy++) {
            sigmaD2hh[loopx][loopy] = (sigmaD2ff[0][0] * f1Dh[loopy + 1] + sigmaD2ff[0][1] * f2Dh[loopy + 1]
                    + sigmaD2ff[0][2] * f3Dh[loopy + 1]) * f1Dh[loopx + 1]
                    + sigmaDf1 * f1D2hh[loopx][loopy]
                    + (sigmaD2ff[0][1] * f1Dh[loopy + 1] + sigmaD2ff[1][1] * f2Dh[loopy + 1]
                            + sigmaD2ff[1][2] * f3Dh[loopy + 1]) * f2Dh[loopx + 1]
                    + sigmaDf2 * f2D2hh[loopx][loopy]
                    + (sigmaD2ff[0][2] * f1Dh[loopy + 1] + sigmaD2ff[1][2] * f2Dh[loopy + 1]
                            + sigmaD2ff[2][2] * f3Dh[loopy + 1]) * f3Dh[loopx + 1]
                    + sigmaDf3 * f3D2hh[loopx][loopy];
        }
    }
    // Third level
    double h1Df = k;
    double h1Dk = forward;
    double h1D2ff = 0.0;
    double h1D2kf = 1.0;
    double h1D2kk = 0.0;
    double h2Df = 1.0 / forward;
    double h2Dk = -1.0 / k;
    double h2D2ff = -1 / (forward * forward);
    double h2D2fk = 0.0;
    double h2D2kk = 1.0 / (k * k);
    volatilityD[0] = sigmaDh1 * h1Df + sigmaDh2 * h2Df;
    volatilityD[1] = sigmaDh1 * h1Dk + sigmaDh2 * h2Dk;
    volatilityD[2] = sigmaDf1 * f1Dp[0] + sigmaDf2 * f2Dp[0] + sigmaDf3 * f3Dp[0] + sigmaDf4 * f4Dp[0];
    volatilityD[3] = sigmaDf1 * f1Dp[1] + sigmaDf2 * f2Dp[1] + sigmaDf3 * f3Dp[1] + sigmaDf4 * f4Dp[1];
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        volatilityD[4] = -0.5 * f2 + sigmaDf3 * f3Dp[2];
    } else {
        double xDr;
        if (DoubleMath.fuzzyEquals(rho, 1.0, RHO_EPS)) {
            xDr = f2 > 1.0 ? 1.0 / (1.0 - rho) + (0.5 - f2) / (f2 - 1.0) / (f2 - 1.0)
                    : 0.5 * Math.pow(f2 / (1.0 - f2), 2.0)
                            + 0.25 * (f2 - 4.0) * Math.pow(f2 / (f2 - 1.0), 3) / (f2 - 1.0) * (1.0 - rho);
            if (Doubles.isFinite(xDr)) {
                volatilityD[4] = sigmaDf1 * f1Dp[2] + sigmaDx * xDr + sigmaDf3 * f3Dp[2] + sigmaDf4 * f4Dp[2];
            } else {
                volatilityD[4] = Double.NEGATIVE_INFINITY;
            }
        } else {
            xDr = (-f2 / sqrtf2 - 1 + (sqrtf2 + f2 - rho) / (1 - rho)) / (sqrtf2 + f2 - rho);
            volatilityD[4] = sigmaDf1 * f1Dp[2] + sigmaDx * xDr + sigmaDf3 * f3Dp[2] + sigmaDf4 * f4Dp[2];
        }
    }
    volatilityD[5] = sigmaDf1 * f1Dp[3] + sigmaDf2 * f2Dp[3] + sigmaDf3 * f3Dp[3] + sigmaDf4 * f4Dp[3];
    volatilityD2[0][0] = (sigmaD2hh[0][0] * h1Df + sigmaD2hh[0][1] * h2Df) * h1Df + sigmaDh1 * h1D2ff
            + (sigmaD2hh[0][1] * h1Df + sigmaD2hh[1][1] * h2Df) * h2Df + sigmaDh2 * h2D2ff;
    volatilityD2[0][1] = (sigmaD2hh[0][0] * h1Dk + sigmaD2hh[0][1] * h2Dk) * h1Df + sigmaDh1 * h1D2kf
            + (sigmaD2hh[0][1] * h1Dk + sigmaD2hh[1][1] * h2Dk) * h2Df + sigmaDh2 * h2D2fk;
    volatilityD2[1][0] = volatilityD2[0][1];
    volatilityD2[1][1] = (sigmaD2hh[0][0] * h1Dk + sigmaD2hh[0][1] * h2Dk) * h1Dk + sigmaDh1 * h1D2kk
            + (sigmaD2hh[0][1] * h1Dk + sigmaD2hh[1][1] * h2Dk) * h2Dk + sigmaDh2 * h2D2kk;
    return sigma;
}

From source file:ml.shifu.shifu.container.meta.MetaFactory.java

/**
 * Validate the input value. Find the @MetaItem from warehouse, and do the validation
 * //ww w  .j  a v  a2  s  .c  o  m
 * @param isGridSearch
 *            - if grid search, ignore validation in train#params as they are set all as list
 * @param itemKey
 *            - the key to locate MetaItem
 * @param itemValue
 *            - the value to validate
 * @return if validate OK, return "OK"
 *         or return the cause - String
 * @throws Exception
 *             any exception in validaiton
 */
public static String validate(boolean isGridSearch, String itemKey, Object itemValue) throws Exception {
    MetaItem itemMeta = itemsWareHouse.get(itemKey);

    if (isGridSearch && filterOut(itemKey)) {
        return VALIDATE_OK;
    }

    if (itemMeta == null) {
        return itemKey + " - not found meta info.";
    }

    if (itemMeta.getType().equals("text")) {
        String value = ((itemValue == null) ? null : itemValue.toString());

        if (itemMeta.getMaxLength() != null && value != null && value.length() > itemMeta.getMaxLength()) {
            return itemKey + " - the length of value exceeds the max length : " + itemMeta.getMaxLength();
        }

        if (itemMeta.getMinLength() != null && (value == null || value.length() < itemMeta.getMinLength())) {
            if (value == null) {
                return itemKey + " - then shouldn't be null";
            } else {
                return itemKey + " - the length of value less than min length : " + itemMeta.getMinLength();
            }
        }

        if (CollectionUtils.isNotEmpty(itemMeta.getOptions())) {
            boolean isOptionValue = false;
            for (ValueOption itemOption : itemMeta.getOptions()) {
                String optValue = (String) itemOption.getValue();
                if (optValue.equalsIgnoreCase(value)) {
                    isOptionValue = true;
                    break;
                }
            }

            if (!isOptionValue) {
                return itemKey + " - the value couldn't be found in the option value list - "
                        + convertOptionIntoString(itemMeta.getOptions());
            }
        }
    } else if (itemMeta.getType().equals("integer") || itemMeta.getType().equals("int")) {
        if (itemValue == null) {
            if (CollectionUtils.isNotEmpty(itemMeta.getOptions())) {
                return itemKey + " - the value couldn't be null.";
            }
        } else {
            Integer value = null;
            try {
                value = Integer.valueOf(itemValue.toString());
            } catch (NumberFormatException e) {
                return itemKey + " - the value is not integer format.";
            }

            if (value != null && CollectionUtils.isNotEmpty(itemMeta.getOptions())) {
                boolean isOptionValue = false;
                for (ValueOption itemOption : itemMeta.getOptions()) {
                    Integer optValue = Integer.valueOf(itemOption.getValue().toString());
                    if (value.equals(optValue)) {
                        isOptionValue = true;
                        break;
                    }
                }

                if (!isOptionValue) {
                    return itemKey + " - the value couldn't be found in the option value list - "
                            + convertOptionIntoString(itemMeta.getOptions());
                }
            }
        }
    } else if (itemMeta.getType().equals("number") || itemMeta.getType().equals("float")) {
        if (itemValue == null) {
            if (CollectionUtils.isNotEmpty(itemMeta.getOptions())) {
                return itemKey + " - the value couldn't be null.";
            }
        } else {
            Double value = null;
            try {
                value = Double.valueOf(itemValue.toString());
            } catch (NumberFormatException e) {
                return itemKey + " - the value is not number format.";
            }

            if (value != null && CollectionUtils.isNotEmpty(itemMeta.getOptions())) {
                boolean isOptionValue = false;
                for (ValueOption itemOption : itemMeta.getOptions()) {
                    Double optValue = Double.valueOf(itemOption.getValue().toString());
                    if (DoubleMath.fuzzyEquals(value, optValue, Constants.TOLERANCE)) {
                        isOptionValue = true;
                        break;
                    }
                }

                if (!isOptionValue) {
                    return itemKey + " - the value couldn't be found in the option value list - "
                            + convertOptionIntoString(itemMeta.getOptions());
                }
            }
        }
    } else if (itemMeta.getType().equals("boolean")) {
        if (itemValue == null) {
            return itemKey + " - the value couldn't be null. Only true/false are perimited.";
        }

        if (!itemValue.toString().equalsIgnoreCase("true") && !itemValue.toString().equalsIgnoreCase("false")) {
            return itemKey + " - the value is illegal.  Only true/false are perimited.";
        }
    } else if (itemMeta.getType().equals("list")) {
        if (itemValue != null && itemMeta.getElement() != null) {
            @SuppressWarnings("unchecked")
            List<Object> valueList = (List<Object>) itemValue;

            for (Object obj : valueList) {
                if (itemMeta.getElementType().equals("object")) {
                    ValidateResult result = iterateCheck(isGridSearch, itemKey, obj);
                    if (!result.getStatus()) {
                        return result.getCauses().get(0);
                    }
                } else {
                    String validateStr = validate(isGridSearch, itemKey + ITEM_KEY_SEPERATOR + DUMMY, obj);
                    if (!validateStr.equals(VALIDATE_OK)) {
                        return validateStr;
                    }
                }
            }
        }
    } else if (itemMeta.getType().equals("map")) {
        if (itemValue != null && itemMeta.getElement() != null) {
            @SuppressWarnings("unchecked")
            Map<String, Object> valueMap = (Map<String, Object>) itemValue;

            Iterator<Entry<String, Object>> iterator = valueMap.entrySet().iterator();
            while (iterator.hasNext()) {
                Entry<String, Object> entry = iterator.next();
                String key = entry.getKey();
                Object value = entry.getValue();

                String validateStr = validate(isGridSearch, itemKey + ITEM_KEY_SEPERATOR + key, value);
                if (!validateStr.equals(VALIDATE_OK)) {
                    return validateStr;
                }
            }
        }
    }

    return VALIDATE_OK;
}

From source file:com.intuit.wasabi.analytics.impl.AnalysisToolsImpl.java

/**
 * {@inheritDoc}/*from   w  w  w.ja  va  2  s  .  co  m*/
 */
@Override
public void generateProgress(ExperimentStatistics experiment) {
    Map<Bucket.Label, BucketStatistics> buckets = experiment.getBuckets();
    Set<Event.Name> actionNames = new HashSet<>();
    //calculate joint action progress
    Set<Bucket.Label> winners = new HashSet<>();
    Set<Bucket.Label> losers = new HashSet<>();
    Set<Bucket.Label> winnersToAdd = new HashSet<>();
    Double fractionOfData = null;
    Map<Bucket.Label, Integer> bucketWins = new HashMap<>();

    for (BucketStatistics bucket : buckets.values()) {
        Bucket.Label bucketName = bucket.getLabel();

        if (bucket.getBucketComparisons() != null) {
            for (BucketComparison comparison : bucket.getBucketComparisons().values()) {
                ComparisonStatistics jointComparison = comparison.getJointActionComparison();

                if (comparison.getJointActionComparison().getClearComparisonWinner() != null) {
                    //count the number of times a bucket is clearly better than another (a "win")
                    CountComparisonWinnerOrAddBucketToLosers(losers, bucketWins, bucketName, jointComparison);
                }

                //keep the smallest fraction of data value
                Double jointFraction = jointComparison.getFractionDataCollected();

                if (jointFraction != null) {
                    if (fractionOfData == null) {
                        fractionOfData = jointFraction;
                    } else {
                        fractionOfData = min(fractionOfData, jointFraction);
                    }
                }

                //create set of all actions comparisons to be used later
                if (comparison.getActionComparisons() != null) {
                    actionNames.addAll(comparison.getActionComparisons().keySet());
                }
            }
        }
    }

    //calculate the maximum number of wins
    int maxWins = 0;
    for (Integer wins : bucketWins.values()) {
        maxWins = max(maxWins, wins);
    }

    //and add the buckets with that number to the set of winners
    if (maxWins > 0) {
        for (Map.Entry<Bucket.Label, Integer> entry : bucketWins.entrySet()) {
            if (entry.getValue() == maxWins) {
                winners.add(entry.getKey());
            }
        }

        //also add any bucket with is not clearly worse than these buckets
        for (Bucket.Label bucketLabel : winners) {
            BucketStatistics bucket = buckets.get(bucketLabel);
            if (bucket.getBucketComparisons() != null) {
                for (BucketComparison comparison : bucket.getBucketComparisons().values()) {
                    Bucket.Label winnerName = comparison.getJointActionComparison().getClearComparisonWinner();
                    Bucket.Label otherBucketName = comparison.getOtherLabel();

                    if (winnerName == null || winnerName.equals(otherBucketName)) {
                        winnersToAdd.add(otherBucketName);
                    }
                }
            }
        }
    }
    winners.addAll(winnersToAdd);

    //convert sets to lists
    List<Bucket.Label> winnersList = new ArrayList<>();
    winnersList.addAll(winners);
    List<Bucket.Label> losersList = new ArrayList<>();
    losersList.addAll(losers);

    boolean sufficientData = false;
    if (fractionOfData != null) {
        sufficientData = DoubleMath.fuzzyEquals(fractionOfData, 1.0, Math.ulp(1.0));
    }

    Progress jointProgress = new Progress.Builder().withWinnersSoFar(winnersList).withLosersSoFar(losersList)
            .withSufficientData(sufficientData).withFractionDataCollected(fractionOfData).build();
    experiment.setJointProgress(jointProgress);

    //loop over actions to calculate progress
    Map<Event.Name, ActionProgress> actionProgresses = new HashMap<>();

    for (Event.Name actionName : actionNames) {
        winners = new HashSet<>();
        losers = new HashSet<>();
        winnersToAdd = new HashSet<>();
        fractionOfData = null;
        bucketWins = new HashMap<>();

        for (BucketStatistics bucket : buckets.values()) {
            Bucket.Label bucketName = bucket.getLabel();

            if (bucket.getBucketComparisons() != null) {
                for (BucketComparison comparison : bucket.getBucketComparisons().values()) {

                    ActionComparisonStatistics action = comparison.getActionComparisons().get(actionName);
                    if (action != null) {
                        if (action.getClearComparisonWinner() != null) {
                            //count the number of times a bucket is clearly better than another (a "win")
                            CountComparisonWinnerOrAddBucketToLosers(losers, bucketWins, bucketName, action);
                        }

                        //keep the smallest fraction of data value
                        Double actionFraction = action.getFractionDataCollected();

                        if (actionFraction != null) {
                            if (fractionOfData == null) {
                                fractionOfData = actionFraction;
                            } else {
                                fractionOfData = min(fractionOfData, actionFraction);
                            }
                        }
                    }
                }
            }
        }

        //calculate the maximum number of wins
        maxWins = 0;
        for (Integer wins : bucketWins.values()) {
            maxWins = max(maxWins, wins);
        }

        //and add the buckets with that number to the set of winners
        if (maxWins > 0) {
            for (Map.Entry<Bucket.Label, Integer> entry : bucketWins.entrySet()) {
                if (entry.getValue() == maxWins) {
                    winners.add(entry.getKey());
                }
            }
            //also add any bucket with is not clearly worse than these buckets
            for (Bucket.Label bucketLabel : winners) {
                BucketStatistics bucket = buckets.get(bucketLabel);
                if (bucket.getBucketComparisons() != null) {
                    for (BucketComparison comparison : bucket.getBucketComparisons().values()) {
                        ActionComparisonStatistics action = comparison.getActionComparisons().get(actionName);
                        if (action != null) {
                            Bucket.Label winnerName = action.getClearComparisonWinner();
                            Bucket.Label otherBucketName = comparison.getOtherLabel();

                            if (winnerName == null || winnerName.equals(otherBucketName)) {
                                winnersToAdd.add(otherBucketName);
                            }
                        }
                    }
                }
            }
        }
        winners.addAll(winnersToAdd);

        //convert sets to lists
        winnersList = new ArrayList<>();
        winnersList.addAll(winners);
        losersList = new ArrayList<>();
        losersList.addAll(losers);

        sufficientData = false;
        if (fractionOfData != null) {
            sufficientData = DoubleMath.fuzzyEquals(fractionOfData, 1.0, Math.ulp(1.0));
        }

        ActionProgress actionProgress = new ActionProgress.Builder().withActionName(actionName)
                .withWinnersSoFarList(winnersList).withLosersSoFarList(losersList)
                .withSufficientData(sufficientData).withFractionDataCollected(fractionOfData).build();

        actionProgresses.put(actionName, actionProgress);
    }

    experiment.setActionProgress(actionProgresses);
    //calculate the experiment-level progress from the action-level progresses
    winners = null;
    losers = null;
    fractionOfData = null;
    int numberActions = 0;

    for (ActionProgress actionProgress : actionProgresses.values()) {
        //take the set intersection for winners and losers
        Set<Bucket.Label> winnersSet = new HashSet<>(actionProgress.getWinnersSoFar());
        if (winners == null) {
            winners = winnersSet;
        } else {
            winners.retainAll(winnersSet);
        }

        Set<Bucket.Label> losersSet = new HashSet<>(actionProgress.getLosersSoFar());
        if (losers == null) {
            losers = losersSet;
        } else {
            losers.retainAll(losersSet);
        }

        //average the fraction of data--here we keep the running sum
        Double actionFraction = actionProgress.getFractionDataCollected();
        if (actionFraction != null) {
            numberActions += 1;

            if (fractionOfData == null) {
                fractionOfData = actionFraction;
            } else {
                fractionOfData += actionFraction;
            }
        }
    }

    //now divide to get the average for fraction of data
    if (fractionOfData != null) {
        fractionOfData /= numberActions;
    }

    //convert sets to lists
    winnersList = new ArrayList<>();
    if (winners != null) {
        winnersList.addAll(winners);
    }
    losersList = new ArrayList<>();
    if (losers != null) {
        losersList.addAll(losers);
    }

    sufficientData = fractionOfData != null
            && (Math.abs(fractionOfData - 1.0f) < FLOAT_POINT_DIFFERENCE_EPSILON);

    experiment.setExperimentProgress(
            new Progress.Builder().withWinnersSoFar(winnersList).withLosersSoFar(losersList)
                    .withSufficientData(sufficientData).withFractionDataCollected(fractionOfData).build());
}

From source file:com.opengamma.strata.pricer.impl.volatility.smile.SabrHaganVolatilityFunctionProvider.java

/**
 * Computes the first and second order derivatives of the Black implied volatility in the SABR model.
 * <p>//  w  w  w .j  a  v  a 2  s  .  com
 * The first derivative values will be stored in the input array {@code volatilityD} 
 * The array contains, [0] Derivative w.r.t the forward, [1] the derivative w.r.t the strike, [2] the derivative w.r.t. to alpha,
 * [3] the derivative w.r.t. to beta, [4] the derivative w.r.t. to rho, and [5] the derivative w.r.t. to nu.
 * Thus the length of the array should be 6.
 * <p>
 * The second derivative values will be stored in the input array {@code volatilityD2}. 
 * Only the second order derivative with respect to the forward and strike are implemented.
 * The array contains [0][0] forward-forward; [0][1] forward-strike; [1][1] strike-strike.
 * Thus the size should be 2 x 2.
 * <p>
 * Around ATM, a first order expansion is used to due to some 0/0-type indetermination.
 * The second order derivative produced is poor around ATM.
 * 
 * @param forward  the forward value of the underlying
 * @param strike  the strike value of the option
 * @param timeToExpiry  the time to expiry of the option
 * @param data  the SABR data.
 * @param volatilityD  the array used to return the first order derivative
 * @param volatilityD2  the array of array used to return the second order derivative
 * @return the Black implied volatility
 */
@Override
public double volatilityAdjoint2(double forward, double strike, double timeToExpiry, SabrFormulaData data,
        double[] volatilityD, double[][] volatilityD2) {
    double k = Math.max(strike, 0.000001);
    double alpha = data.getAlpha();
    double beta = data.getBeta();
    double rho = data.getRho();
    double nu = data.getNu();
    // Forward
    double h0 = (1 - beta) / 2;
    double h1 = forward * k;
    double h1h0 = Math.pow(h1, h0);
    double h12 = h1h0 * h1h0;
    double h2 = Math.log(forward / k);
    double h22 = h2 * h2;
    double h23 = h22 * h2;
    double h24 = h23 * h2;
    double f1 = h1h0 * (1 + h0 * h0 / 6.0 * (h22 + h0 * h0 / 20.0 * h24));
    double f2 = nu / alpha * h1h0 * h2;
    double f3 = h0 * h0 / 6.0 * alpha * alpha / h12 + rho * beta * nu * alpha / 4.0 / h1h0
            + (2 - 3 * rho * rho) / 24.0 * nu * nu;
    double sqrtf2 = Math.sqrt(1 - 2 * rho * f2 + f2 * f2);
    double f2x = 0.0;
    double x = 0.0, xp = 0, xpp = 0;
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        f2x = 1.0 - 0.5 * f2 * rho; //small f2 expansion to f2^2 terms
    } else {
        if (DoubleMath.fuzzyEquals(rho, 1.0, RHO_EPS)) {
            x = f2 < 1.0 ? -Math.log(1.0 - f2) - 0.5 * Math.pow(f2 / (f2 - 1.0), 2) * (1.0 - rho)
                    : Math.log(2.0 * f2 - 2.0) - Math.log(1.0 - rho);
        } else {
            x = Math.log((sqrtf2 + f2 - rho) / (1 - rho));
        }
        xp = 1. / sqrtf2;
        xpp = (rho - f2) / Math.pow(sqrtf2, 3.0);
        f2x = f2 / x;
    }
    double sigma = Math.max(MIN_VOL, alpha / f1 * f2x * (1 + f3 * timeToExpiry));
    // First level
    double h0Dbeta = -0.5;
    double sigmaDf1 = -sigma / f1;
    double sigmaDf2 = 0;
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        sigmaDf2 = alpha / f1 * (1 + f3 * timeToExpiry) * -0.5 * rho;
    } else {
        sigmaDf2 = alpha / f1 * (1 + f3 * timeToExpiry) * (1.0 / x - f2 * xp / (x * x));
    }
    double sigmaDf3 = alpha / f1 * f2x * timeToExpiry;
    double sigmaDf4 = f2x / f1 * (1 + f3 * timeToExpiry);
    double sigmaDx = -alpha / f1 * f2 / (x * x) * (1 + f3 * timeToExpiry);
    double[][] sigmaD2ff = new double[3][3];
    sigmaD2ff[0][0] = -sigmaDf1 / f1 + sigma / (f1 * f1); //OK
    sigmaD2ff[0][1] = -sigmaDf2 / f1;
    sigmaD2ff[0][2] = -sigmaDf3 / f1;
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        sigmaD2ff[1][2] = alpha / f1 * -0.5 * rho * timeToExpiry;
    } else {
        sigmaD2ff[1][1] = alpha / f1 * (1 + f3 * timeToExpiry)
                * (-2 * xp / (x * x) - f2 * xpp / (x * x) + 2 * f2 * xp * xp / (x * x * x));
        sigmaD2ff[1][2] = alpha / f1 * timeToExpiry * (1.0 / x - f2 * xp / (x * x));
    }
    sigmaD2ff[2][2] = 0.0;
    //      double sigma = alpha / f1 * f2x * (1 + f3 * theta);
    // Second level
    double[] f1Dh = new double[3];
    double[] f2Dh = new double[3];
    double[] f3Dh = new double[3];
    f1Dh[0] = h1h0 * (h0 * (h22 / 3.0 + h0 * h0 / 40.0 * h24)) + Math.log(h1) * f1;
    f1Dh[1] = h0 * f1 / h1;
    f1Dh[2] = h1h0 * (h0 * h0 / 6.0 * (2.0 * h2 + h0 * h0 / 5.0 * h23));
    f2Dh[0] = Math.log(h1) * f2;
    f2Dh[1] = h0 * f2 / h1;
    f2Dh[2] = nu / alpha * h1h0;
    f3Dh[0] = h0 / 3.0 * alpha * alpha / h12 - 2 * h0 * h0 / 6.0 * alpha * alpha / h12 * Math.log(h1)
            - rho * beta * nu * alpha / 4.0 / h1h0 * Math.log(h1);
    f3Dh[1] = -2 * h0 * h0 / 6.0 * alpha * alpha / h12 * h0 / h1
            - rho * beta * nu * alpha / 4.0 / h1h0 * h0 / h1;
    f3Dh[2] = 0.0;
    double[] f1Dp = new double[4]; // Derivative to sabr parameters
    double[] f2Dp = new double[4];
    double[] f3Dp = new double[4];
    double[] f4Dp = new double[4];
    f1Dp[0] = 0.0;
    f1Dp[1] = f1Dh[0] * h0Dbeta;
    f1Dp[2] = 0.0;
    f1Dp[3] = 0.0;
    f2Dp[0] = -f2 / alpha;
    f2Dp[1] = f2Dh[0] * h0Dbeta;
    f2Dp[2] = 0.0;
    f2Dp[3] = h1h0 * h2 / alpha;
    f3Dp[0] = h0 * h0 / 3.0 * alpha / h12 + rho * beta * nu / 4.0 / h1h0;
    f3Dp[1] = rho * nu * alpha / 4.0 / h1h0 + f3Dh[0] * h0Dbeta;
    f3Dp[2] = beta * nu * alpha / 4.0 / h1h0 - rho / 4.0 * nu * nu;
    f3Dp[3] = rho * beta * alpha / 4.0 / h1h0 + (2 - 3 * rho * rho) / 12.0 * nu;
    f4Dp[0] = 1.0;
    f4Dp[1] = 0.0;
    f4Dp[2] = 0.0;
    f4Dp[3] = 0.0;
    double sigmaDh1 = sigmaDf1 * f1Dh[1] + sigmaDf2 * f2Dh[1] + sigmaDf3 * f3Dh[1];
    double sigmaDh2 = sigmaDf1 * f1Dh[2] + sigmaDf2 * f2Dh[2] + sigmaDf3 * f3Dh[2];
    double[][] f1D2hh = new double[2][2]; // No h0
    double[][] f2D2hh = new double[2][2];
    double[][] f3D2hh = new double[2][2];
    f1D2hh[0][0] = h0 * (h0 - 1) * f1 / (h1 * h1);
    f1D2hh[0][1] = h0 * h1h0 / h1 * h0 * h0 / 6.0 * (2.0 * h2 + 4.0 * h0 * h0 / 20.0 * h23);
    f1D2hh[1][1] = h1h0 * (h0 * h0 / 6.0 * (2.0 + 12.0 * h0 * h0 / 20.0 * h2));
    f2D2hh[0][0] = h0 * (h0 - 1) * f2 / (h1 * h1);
    f2D2hh[0][1] = nu / alpha * h0 * h1h0 / h1;
    f2D2hh[1][1] = 0.0;
    f3D2hh[0][0] = 2 * h0 * (2 * h0 + 1) * h0 * h0 / 6.0 * alpha * alpha / (h12 * h1 * h1)
            + h0 * (h0 + 1) * rho * beta * nu * alpha / 4.0 / (h1h0 * h1 * h1);
    f3D2hh[0][1] = 0.0;
    f3D2hh[1][1] = 0.0;
    double[][] sigmaD2hh = new double[2][2]; // No h0
    for (int loopx = 0; loopx < 2; loopx++) {
        for (int loopy = loopx; loopy < 2; loopy++) {
            sigmaD2hh[loopx][loopy] = (sigmaD2ff[0][0] * f1Dh[loopy + 1] + sigmaD2ff[0][1] * f2Dh[loopy + 1]
                    + sigmaD2ff[0][2] * f3Dh[loopy + 1]) * f1Dh[loopx + 1]
                    + sigmaDf1 * f1D2hh[loopx][loopy]
                    + (sigmaD2ff[0][1] * f1Dh[loopy + 1] + sigmaD2ff[1][1] * f2Dh[loopy + 1]
                            + sigmaD2ff[1][2] * f3Dh[loopy + 1]) * f2Dh[loopx + 1]
                    + sigmaDf2 * f2D2hh[loopx][loopy]
                    + (sigmaD2ff[0][2] * f1Dh[loopy + 1] + sigmaD2ff[1][2] * f2Dh[loopy + 1]
                            + sigmaD2ff[2][2] * f3Dh[loopy + 1]) * f3Dh[loopx + 1]
                    + sigmaDf3 * f3D2hh[loopx][loopy];
        }
    }
    // Third level
    double h1Df = k;
    double h1Dk = forward;
    double h1D2ff = 0.0;
    double h1D2kf = 1.0;
    double h1D2kk = 0.0;
    double h2Df = 1.0 / forward;
    double h2Dk = -1.0 / k;
    double h2D2ff = -1 / (forward * forward);
    double h2D2fk = 0.0;
    double h2D2kk = 1.0 / (k * k);
    volatilityD[0] = sigmaDh1 * h1Df + sigmaDh2 * h2Df;
    volatilityD[1] = sigmaDh1 * h1Dk + sigmaDh2 * h2Dk;
    volatilityD[2] = sigmaDf1 * f1Dp[0] + sigmaDf2 * f2Dp[0] + sigmaDf3 * f3Dp[0] + sigmaDf4 * f4Dp[0];
    volatilityD[3] = sigmaDf1 * f1Dp[1] + sigmaDf2 * f2Dp[1] + sigmaDf3 * f3Dp[1] + sigmaDf4 * f4Dp[1];
    if (DoubleMath.fuzzyEquals(f2, 0.0, SMALL_Z)) {
        volatilityD[4] = -0.5 * f2 + sigmaDf3 * f3Dp[2];
    } else {
        double xDr;
        if (DoubleMath.fuzzyEquals(rho, 1.0, RHO_EPS)) {
            xDr = f2 > 1.0 ? 1.0 / (1.0 - rho) + (0.5 - f2) / (f2 - 1.0) / (f2 - 1.0)
                    : 0.5 * Math.pow(f2 / (1.0 - f2), 2.0)
                            + 0.25 * (f2 - 4.0) * Math.pow(f2 / (f2 - 1.0), 3) / (f2 - 1.0) * (1.0 - rho);
            if (Doubles.isFinite(xDr)) {
                volatilityD[4] = sigmaDf1 * f1Dp[2] + sigmaDx * xDr + sigmaDf3 * f3Dp[2] + sigmaDf4 * f4Dp[2];
            } else {
                volatilityD[4] = Double.NEGATIVE_INFINITY;
            }
        } else {
            xDr = (-f2 / sqrtf2 - 1 + (sqrtf2 + f2 - rho) / (1 - rho)) / (sqrtf2 + f2 - rho);
            volatilityD[4] = sigmaDf1 * f1Dp[2] + sigmaDx * xDr + sigmaDf3 * f3Dp[2] + sigmaDf4 * f4Dp[2];
        }
    }
    volatilityD[5] = sigmaDf1 * f1Dp[3] + sigmaDf2 * f2Dp[3] + sigmaDf3 * f3Dp[3] + sigmaDf4 * f4Dp[3];
    volatilityD2[0][0] = (sigmaD2hh[0][0] * h1Df + sigmaD2hh[0][1] * h2Df) * h1Df + sigmaDh1 * h1D2ff
            + (sigmaD2hh[0][1] * h1Df + sigmaD2hh[1][1] * h2Df) * h2Df + sigmaDh2 * h2D2ff;
    volatilityD2[0][1] = (sigmaD2hh[0][0] * h1Dk + sigmaD2hh[0][1] * h2Dk) * h1Df + sigmaDh1 * h1D2kf
            + (sigmaD2hh[0][1] * h1Dk + sigmaD2hh[1][1] * h2Dk) * h2Df + sigmaDh2 * h2D2fk;
    volatilityD2[1][0] = volatilityD2[0][1];
    volatilityD2[1][1] = (sigmaD2hh[0][0] * h1Dk + sigmaD2hh[0][1] * h2Dk) * h1Dk + sigmaDh1 * h1D2kk
            + (sigmaD2hh[0][1] * h1Dk + sigmaD2hh[1][1] * h2Dk) * h2Dk + sigmaDh2 * h2D2kk;
    return sigma;
}

From source file:com.opengamma.strata.pricer.fxopt.ImpliedTrinomialTreeFxSingleBarrierOptionProductPricer.java

private void validateData(ResolvedFxSingleBarrierOption option, RatesProvider ratesProvider,
        BlackFxOptionVolatilities volatilities, RecombiningTrinomialTreeData data) {

    ResolvedFxVanillaOption underlyingOption = option.getUnderlyingOption();
    ArgChecker.isTrue(//from  w w w . ja va2  s.  c  o  m
            DoubleMath.fuzzyEquals(data.getTime(data.getNumberOfSteps()),
                    volatilities.relativeTime(underlyingOption.getExpiry()), SMALL),
            "time to expiry mismatch between pricing option and trinomial tree data");
    ArgChecker.isTrue(
            DoubleMath.fuzzyEquals(data.getSpot(),
                    ratesProvider.fxRate(underlyingOption.getUnderlying().getCurrencyPair()), SMALL),
            "today's FX rate mismatch between rates provider and trinomial tree data");
}

From source file:com.opengamma.strata.pricer.impl.volatility.smile.function.SabrHaganVolatilityFunctionProvider.java

private double getZOverChi(double rho, double z) {

    // Implementation comment: To avoid numerical instability (0/0) around ATM the first order approximation is used.
    if (DoubleMath.fuzzyEquals(z, 0.0, SMALL_Z)) {
        return 1.0 - rho * z / 2.0;
    }//www.  ja va2s . co m

    double rhoStar = 1 - rho;
    if (DoubleMath.fuzzyEquals(rhoStar, 0.0, RHO_EPS)) {
        if (z > 1.0) {
            if (rhoStar == 0.0) {
                return 0.0;
            }
            return z / (Math.log(2 * (z - 1)) - Math.log(rhoStar));
        } else if (z < 1.0) {
            return z / (-Math.log(1 - z) - 0.5 * Math.pow(z / (z - 1.0), 2) * rhoStar);
        } else {
            return 0.0;
        }
    }

    double rhoHat = 1 + rho;
    if (DoubleMath.fuzzyEquals(rhoHat, 0.0, RHO_EPS_NEGATIVE)) {
        if (z > -1) {
            return z / Math.log(1 + z);
        } else if (z < -1) {
            if (rhoHat == 0) {
                return 0.0;
            }
            double chi = Math.log(rhoHat) - Math.log(-(1 + z) / rhoStar);
            return z / chi;
        } else {
            return 0.0;
        }
    }

    double arg;
    if (z < LARGE_NEG_Z) {
        arg = (rho * rho - 1) / 2 / z; //get rounding errors due to fine balanced cancellation for very large negative z
    } else if (z > LARGE_POS_Z) {
        arg = 2 * (z - rho);
    } else {
        arg = (Math.sqrt(1 - 2 * rho * z + z * z) + z - rho);
        //Mathematically this cannot be less than zero, but you know what computers are like.
        if (arg <= 0.0) {
            return 0.0;
        }
    }

    double chi = Math.log(arg) - Math.log(rhoStar);
    return z / chi;
}

From source file:com.linkedin.pinot.integration.tests.ClusterIntegrationTestUtils.java

/**
 * Run equivalent Pinot and H2 query and compare the results.
 * <p>LIMITATIONS:/*from   w  w  w  .j  av  a2  s  .co  m*/
 * <ul>
 *   <li>Skip comparison for selection and aggregation group-by when H2 results are too large to exhaust.</li>
 *   <li>Do not examine the order of result records.</li>
 * </ul>
 *
 * @param pqlQuery Pinot PQL query
 * @param brokerUrl Pinot broker URL
 * @param pinotConnection Pinot connection
 * @param sqlQueries H2 SQL queries
 * @param h2Connection H2 connection
 * @throws Exception
 */
public static void testQuery(@Nonnull String pqlQuery, @Nonnull String brokerUrl,
        @Nonnull com.linkedin.pinot.client.Connection pinotConnection, @Nullable List<String> sqlQueries,
        @Nullable Connection h2Connection) throws Exception {
    // Use broker response for metadata check, connection response for value check
    JSONObject pinotResponse = ClusterTest.postQuery(pqlQuery, brokerUrl);
    ResultSetGroup pinotResultSetGroup = pinotConnection.execute(pqlQuery);

    // Skip comparison if SQL queries are not specified
    if (sqlQueries == null) {
        return;
    }

    Assert.assertNotNull(h2Connection);
    Statement h2statement = h2Connection.createStatement(ResultSet.TYPE_FORWARD_ONLY,
            ResultSet.CONCUR_READ_ONLY);

    int pinotNumRecordsSelected = pinotResponse.getInt("numDocsScanned");

    // Aggregation results
    if (pinotResponse.has("aggregationResults")) {
        // Check number of aggregation results
        int numAggregationResults = pinotResultSetGroup.getResultSetCount();
        int numSqlQueries = sqlQueries.size();
        if (numAggregationResults != numSqlQueries) {
            String failureMessage = "Number of aggregation results: " + numAggregationResults
                    + " does not match number of SQL queries: " + numSqlQueries;
            failure(pqlQuery, sqlQueries, failureMessage);
        }

        // Get aggregation type
        JSONObject pinotFirstAggregationResult = pinotResponse.getJSONArray("aggregationResults")
                .getJSONObject(0);

        // Aggregation-only results
        if (pinotFirstAggregationResult.has("value")) {
            // Check over all aggregation functions
            for (int aggregationIndex = 0; aggregationIndex < numAggregationResults; aggregationIndex++) {
                // Get expected value for the aggregation
                h2statement.execute(sqlQueries.get(aggregationIndex));
                ResultSet h2ResultSet = h2statement.getResultSet();
                h2ResultSet.first();
                String h2Value = h2ResultSet.getString(1);

                // If H2 value is null, it means no record selected in H2
                if (h2Value == null) {
                    if (pinotNumRecordsSelected != 0) {
                        String failureMessage = "No record selected in H2 but " + pinotNumRecordsSelected
                                + " records selected in Pinot";
                        failure(pqlQuery, sqlQueries, failureMessage);
                    }

                    // Skip further comparison
                    return;
                }

                // Fuzzy compare expected value and actual value
                double expectedValue = Double.parseDouble(h2Value);
                String pinotValue = pinotResultSetGroup.getResultSet(aggregationIndex).getString(0);
                double actualValue = Double.parseDouble(pinotValue);
                if (!DoubleMath.fuzzyEquals(actualValue, expectedValue, 1.0)) {
                    String failureMessage = "Value: " + aggregationIndex + " does not match, expected: "
                            + h2Value + ", got: " + pinotValue;
                    failure(pqlQuery, sqlQueries, failureMessage);
                }
            }

            return;
        }

        // Group-by results
        if (pinotFirstAggregationResult.has("groupByResult")) {
            // Get number of groups
            com.linkedin.pinot.client.ResultSet pinotFirstGroupByResultSet = pinotResultSetGroup
                    .getResultSet(0);
            int pinotNumGroups = pinotFirstGroupByResultSet.getRowCount();

            // Get number of group keys in each group
            // If no group-by result returned by Pinot, set numGroupKeys to 0 since no comparison needed
            int pinotNumGroupKeys;
            if (pinotNumGroups == 0) {
                pinotNumGroupKeys = 0;
            } else {
                pinotNumGroupKeys = pinotFirstGroupByResultSet.getGroupKeyLength();
            }

            // Check over all aggregation functions
            for (int aggregationIndex = 0; aggregationIndex < numAggregationResults; aggregationIndex++) {
                // Construct expected result map from concatenated group keys to value
                h2statement.execute(sqlQueries.get(aggregationIndex));
                ResultSet h2ResultSet = h2statement.getResultSet();
                Map<String, String> expectedValues = new HashMap<>();
                int h2NumGroups;
                for (h2NumGroups = 0; h2ResultSet.next()
                        && h2NumGroups < MAX_NUM_ROWS_TO_COMPARE; h2NumGroups++) {
                    if (pinotNumGroupKeys != 0) {
                        StringBuilder groupKey = new StringBuilder();
                        for (int groupKeyIndex = 1; groupKeyIndex <= pinotNumGroupKeys; groupKeyIndex++) {
                            // Convert boolean value to lower case
                            groupKey.append(convertBooleanToLowerCase(h2ResultSet.getString(groupKeyIndex)))
                                    .append(' ');
                        }
                        expectedValues.put(groupKey.toString(), h2ResultSet.getString(pinotNumGroupKeys + 1));
                    }
                }

                // No record selected in H2
                if (h2NumGroups == 0) {
                    if (pinotNumGroups != 0) {
                        String failureMessage = "No group returned in H2 but " + pinotNumGroups
                                + " groups returned in Pinot";
                        failure(pqlQuery, sqlQueries, failureMessage);
                    }

                    // If the query has a HAVING clause and both H2 and Pinot have no groups, that is expected, so we don't need
                    // to compare the number of docs scanned
                    if (pqlQuery.contains("HAVING")) {
                        return;
                    }

                    if (pinotNumRecordsSelected != 0) {
                        String failureMessage = "No group returned in Pinot but " + pinotNumRecordsSelected
                                + " records selected";
                        failure(pqlQuery, sqlQueries, failureMessage);
                    }

                    // Skip further comparison
                    return;
                }

                // Only compare exhausted results
                if (h2NumGroups < MAX_NUM_ROWS_TO_COMPARE) {
                    // Check if all Pinot results are contained in the H2 results
                    com.linkedin.pinot.client.ResultSet pinotGroupByResultSet = pinotResultSetGroup
                            .getResultSet(aggregationIndex);
                    for (int groupIndex = 0; groupIndex < pinotNumGroups; groupIndex++) {
                        // Concatenate Pinot group keys
                        StringBuilder groupKeyBuilder = new StringBuilder();
                        for (int groupKeyIndex = 0; groupKeyIndex < pinotNumGroupKeys; groupKeyIndex++) {
                            groupKeyBuilder
                                    .append(pinotGroupByResultSet.getGroupKeyString(groupIndex, groupKeyIndex))
                                    .append(' ');
                        }
                        String groupKey = groupKeyBuilder.toString();

                        // Fuzzy compare expected value and actual value
                        String h2Value = expectedValues.get(groupKey);
                        if (h2Value == null) {
                            String failureMessage = "Group returned in Pinot but not in H2: " + groupKey;
                            failure(pqlQuery, sqlQueries, failureMessage);
                            return;
                        }
                        double expectedValue = Double.parseDouble(h2Value);
                        String pinotValue = pinotGroupByResultSet.getString(groupIndex);
                        double actualValue = Double.parseDouble(pinotValue);
                        if (!DoubleMath.fuzzyEquals(actualValue, expectedValue, 1.0)) {
                            String failureMessage = "Value: " + aggregationIndex + " does not match, expected: "
                                    + h2Value + ", got: " + pinotValue + ", for group: " + groupKey;
                            failure(pqlQuery, sqlQueries, failureMessage);
                        }
                    }
                }
            }

            return;
        }

        // Neither aggregation-only or group-by results
        String failureMessage = "Inside aggregation results, no aggregation-only or group-by results found";
        failure(pqlQuery, sqlQueries, failureMessage);
    }

    // Selection results
    if (pinotResponse.has("selectionResults")) {
        // Construct expected result set
        h2statement.execute(sqlQueries.get(0));
        ResultSet h2ResultSet = h2statement.getResultSet();
        ResultSetMetaData h2MetaData = h2ResultSet.getMetaData();

        Set<String> expectedValues = new HashSet<>();
        Map<String, String> reusableExpectedValueMap = new HashMap<>();
        Map<String, List<String>> reusableMultiValuesMap = new HashMap<>();
        List<String> reusableColumnOrder = new ArrayList<>();
        int h2NumRows;
        for (h2NumRows = 0; h2ResultSet.next() && h2NumRows < MAX_NUM_ROWS_TO_COMPARE; h2NumRows++) {
            reusableExpectedValueMap.clear();
            reusableMultiValuesMap.clear();
            reusableColumnOrder.clear();

            int numColumns = h2MetaData.getColumnCount();
            for (int columnIndex = 1; columnIndex <= numColumns; columnIndex++) {
                String columnName = h2MetaData.getColumnName(columnIndex);

                // Handle null result and convert boolean value to lower case
                String columnValue = h2ResultSet.getString(columnIndex);
                if (columnValue == null) {
                    columnValue = "null";
                } else {
                    columnValue = convertBooleanToLowerCase(columnValue);
                }

                // Handle multi-value columns
                int length = columnName.length();
                if (length > 5 && columnName.substring(length - 5, length - 1).equals("__MV")) {
                    // Multi-value column
                    String multiValueColumnName = columnName.substring(0, length - 5);
                    List<String> multiValue = reusableMultiValuesMap.get(multiValueColumnName);
                    if (multiValue == null) {
                        multiValue = new ArrayList<>();
                        reusableMultiValuesMap.put(multiValueColumnName, multiValue);
                        reusableColumnOrder.add(multiValueColumnName);
                    }
                    multiValue.add(columnValue);
                } else {
                    // Single-value column
                    reusableExpectedValueMap.put(columnName, columnValue);
                    reusableColumnOrder.add(columnName);
                }
            }

            // Add multi-value column results to the expected values
            // The reason for this step is that Pinot does not maintain order of elements in multi-value columns
            for (Map.Entry<String, List<String>> entry : reusableMultiValuesMap.entrySet()) {
                List<String> multiValue = entry.getValue();
                Collections.sort(multiValue);
                reusableExpectedValueMap.put(entry.getKey(), multiValue.toString());
            }

            // Build expected value String
            StringBuilder expectedValue = new StringBuilder();
            for (String column : reusableColumnOrder) {
                expectedValue.append(column).append(':').append(reusableExpectedValueMap.get(column))
                        .append(' ');
            }

            expectedValues.add(expectedValue.toString());
        }

        com.linkedin.pinot.client.ResultSet pinotSelectionResultSet = pinotResultSetGroup.getResultSet(0);
        int pinotNumRows = pinotSelectionResultSet.getRowCount();

        // No record selected in H2
        if (h2NumRows == 0) {
            if (pinotNumRows != 0) {
                String failureMessage = "No record selected in H2 but number of records selected in Pinot: "
                        + pinotNumRows;
                failure(pqlQuery, sqlQueries, failureMessage);
                return;
            }

            if (pinotNumRecordsSelected != 0) {
                String failureMessage = "No selection result returned in Pinot but number of records selected: "
                        + pinotNumRecordsSelected;
                failure(pqlQuery, sqlQueries, failureMessage);
                return;
            }

            // Skip further comparison
            return;
        }

        // Only compare exhausted results
        if (h2NumRows < MAX_NUM_ROWS_TO_COMPARE) {
            // Check that Pinot results are contained in the H2 results
            int numColumns = pinotSelectionResultSet.getColumnCount();

            for (int rowIndex = 0; rowIndex < pinotNumRows; rowIndex++) {
                // Build actual value String.
                StringBuilder actualValueBuilder = new StringBuilder();
                for (int columnIndex = 0; columnIndex < numColumns; columnIndex++) {
                    // Convert column name to all uppercase to make it compatible with H2
                    String columnName = pinotSelectionResultSet.getColumnName(columnIndex).toUpperCase();
                    String columnResult = pinotSelectionResultSet.getString(rowIndex, columnIndex);

                    // TODO: Find a better way to identify multi-value column
                    if (columnResult.charAt(0) == '[') {
                        // Multi-value column
                        JSONArray columnValues = new JSONArray(columnResult);
                        List<String> multiValue = new ArrayList<>();
                        int length = columnValues.length();
                        for (int elementIndex = 0; elementIndex < length; elementIndex++) {
                            multiValue.add(columnValues.getString(elementIndex));
                        }
                        for (int elementIndex = length; elementIndex < MAX_NUM_ELEMENTS_IN_MULTI_VALUE_TO_COMPARE; elementIndex++) {
                            multiValue.add("null");
                        }
                        Collections.sort(multiValue);
                        actualValueBuilder.append(columnName).append(':').append(multiValue.toString())
                                .append(' ');
                    } else {
                        // Single-value column
                        actualValueBuilder.append(columnName).append(':').append(columnResult).append(' ');
                    }
                }
                String actualValue = actualValueBuilder.toString();

                // Check actual value in expected values set
                if (!expectedValues.contains(actualValue)) {
                    String failureMessage = "Selection result returned in Pinot but not in H2: " + actualValue;
                    failure(pqlQuery, sqlQueries, failureMessage);
                    return;
                }
            }
        }
    } else {
        // Neither aggregation or selection results
        String failureMessage = "No aggregation or selection results found for query: " + pqlQuery;
        failure(pqlQuery, sqlQueries, failureMessage);
    }
}

From source file:com.opengamma.strata.pricer.impl.volatility.smile.SabrHaganVolatilityFunctionProvider.java

private double getZOverChi(double rho, double z) {

    // Implementation comment: To avoid numerical instability (0/0) around ATM the first order approximation is used.
    if (DoubleMath.fuzzyEquals(z, 0.0, SMALL_Z)) {
        return 1.0 - rho * z / 2.0;
    }//from  www  .ja v  a2 s.co m

    double rhoStar = 1 - rho;
    if (DoubleMath.fuzzyEquals(rhoStar, 0.0, RHO_EPS)) {
        if (z < 1.0) {
            return -z / Math.log(1.0d - z);
        } else {
            throw new IllegalArgumentException("can't handle z>=1, rho=1");
        }
    }

    double rhoHat = 1 + rho;
    if (DoubleMath.fuzzyEquals(rhoHat, 0.0, RHO_EPS_NEGATIVE)) {
        if (z > -1) {
            return z / Math.log(1 + z);
        } else if (z < -1) {
            if (rhoHat == 0) {
                return 0.0;
            }
            double chi = Math.log(rhoHat) - Math.log(-(1 + z) / rhoStar);
            return z / chi;
        } else {
            return 0.0;
        }
    }

    double arg;
    if (z < LARGE_NEG_Z) {
        arg = (rho * rho - 1) / 2 / z; //get rounding errors due to fine balanced cancellation for very large negative z
    } else if (z > LARGE_POS_Z) {
        arg = 2 * (z - rho);
    } else {
        arg = (Math.sqrt(1 - 2 * rho * z + z * z) + z - rho);
        //Mathematically this cannot be less than zero, but you know what computers are like.
        if (arg <= 0.0) {
            return 0.0;
        }
    }

    double chi = Math.log(arg) - Math.log(rhoStar);
    return z / chi;
}