Example usage for java.lang Double POSITIVE_INFINITY

List of usage examples for java.lang Double POSITIVE_INFINITY

Introduction

In this page you can find the example usage for java.lang Double POSITIVE_INFINITY.

Prototype

double POSITIVE_INFINITY

To view the source code for java.lang Double POSITIVE_INFINITY.

Click Source Link

Document

A constant holding the positive infinity of type double .

Usage

From source file:com.opengamma.analytics.financial.model.volatility.BlackScholesFormulaRepository.java

/**
* The dual delta (first derivative of option price with respect to strike)
* @param spot The spot value of the underlying
* @param strike The Strike// w ww  .  j av a2 s .  c o m
* @param timeToExpiry The time-to-expiry
* @param lognormalVol The log-normal volatility
* @param interestRate The interest rate 
* @param costOfCarry The cost-of-carry  rate
* @param isCall true for call
* @return The dual delta
*/
@ExternalFunction
public static double dualDelta(final double spot, final double strike, final double timeToExpiry,
        final double lognormalVol, final double interestRate, final double costOfCarry, final boolean isCall) {
    ArgumentChecker.isTrue(spot >= 0.0, "negative/NaN spot; have {}", spot);
    ArgumentChecker.isTrue(strike >= 0.0, "negative/NaN strike; have {}", strike);
    ArgumentChecker.isTrue(timeToExpiry >= 0.0, "negative/NaN timeToExpiry; have {}", timeToExpiry);
    ArgumentChecker.isTrue(lognormalVol >= 0.0, "negative/NaN lognormalVol; have {}", lognormalVol);
    ArgumentChecker.isFalse(Double.isNaN(interestRate), "interestRate is NaN");
    ArgumentChecker.isFalse(Double.isNaN(costOfCarry), "costOfCarry is NaN");

    double discount = 0.;
    if (-interestRate > LARGE) {
        return isCall ? Double.NEGATIVE_INFINITY : (costOfCarry > LARGE ? 0. : Double.POSITIVE_INFINITY);
    }
    if (interestRate > LARGE) {
        return 0.;
    }
    discount = (Math.abs(interestRate) < SMALL && timeToExpiry > LARGE) ? 1.
            : Math.exp(-interestRate * timeToExpiry);

    if (spot > LARGE * strike) {
        return isCall ? -discount : 0.;
    }
    if (spot < SMALL * strike) {
        return isCall ? 0. : discount;
    }

    final int sign = isCall ? 1 : -1;
    final double rootT = Math.sqrt(timeToExpiry);
    double sigmaRootT = lognormalVol * rootT;
    if (Double.isNaN(sigmaRootT)) {
        sigmaRootT = 1.; //ref value is returned
    }

    double factor = Math.exp(costOfCarry * timeToExpiry);
    if (Double.isNaN(factor)) {
        factor = 1.; //ref value is returned
    }
    double rescaledSpot = spot * factor;

    double d2 = 0.;
    if (Math.abs(spot - strike) < SMALL || sigmaRootT > LARGE || (spot > LARGE && strike > LARGE)) {
        final double coefD2 = (costOfCarry / lognormalVol - 0.5 * lognormalVol);
        final double tmp = coefD2 * rootT;
        d2 = Double.isNaN(tmp) ? 0. : tmp;
    } else {
        if (sigmaRootT < SMALL) {
            return isCall ? (rescaledSpot > strike ? -discount : 0.) : (rescaledSpot < strike ? discount : 0.);
        }
        final double tmp = costOfCarry * rootT / lognormalVol;
        final double sig = (costOfCarry >= 0.) ? 1. : -1.;
        final double scnd = Double.isNaN(tmp)
                ? ((lognormalVol < LARGE && lognormalVol > SMALL) ? sig / lognormalVol : sig * rootT)
                : tmp;
        d2 = Math.log(spot / strike) / sigmaRootT + scnd - 0.5 * sigmaRootT;
    }
    //    if (Double.isNaN(d2)) {
    //      throw new IllegalArgumentException("NaN found");
    //    }
    final double norm = NORMAL.getCDF(sign * d2);

    return norm < SMALL ? 0. : -sign * discount * norm;
}

From source file:com.opengamma.analytics.financial.model.finitedifference.CrankNicolsonFiniteDifferenceSOR.java

@SuppressWarnings("deprecation")
public PDEResults1D solve(final ZZConvectionDiffusionPDEDataBundle pdeData, final double[] timeGrid,
        final double[] spaceGrid, final BoundaryCondition lowerBoundary, final BoundaryCondition upperBoundary,
        final Surface<Double, Double, Double> freeBoundary) {
    Validate.notNull(pdeData, "pde data");

    final PDEGrid1D grid = new PDEGrid1D(timeGrid, spaceGrid);

    final int tNodes = timeGrid.length;
    final int xNodes = spaceGrid.length;
    Validate.isTrue(tNodes > 1, "need at least 2 time nodes");
    Validate.isTrue(xNodes > 2, "need at least 3 space nodes");

    // TODO would like more sophistication that simply checking to the grid is consistent with the boundary level
    Validate.isTrue(Math.abs(spaceGrid[0] - lowerBoundary.getLevel()) < 1e-7,
            "space grid not consistent with boundary level");
    Validate.isTrue(Math.abs(spaceGrid[xNodes - 1] - upperBoundary.getLevel()) < 1e-7,
            "space grid not consistent with boundary level");

    final double[] dt = new double[tNodes - 1];
    for (int n = 0; n < tNodes - 1; n++) {
        dt[n] = timeGrid[n + 1] - timeGrid[n];
        Validate.isTrue(dt[n] > 0, "time steps must be increasing");
    }/*from www.ja v  a 2s  .c  o  m*/

    final double[] dx = new double[xNodes - 1];
    for (int i = 0; i < xNodes - 1; i++) {
        dx[i] = spaceGrid[i + 1] - spaceGrid[i];
        Validate.isTrue(dx[i] > 0, "space steps must be increasing");
    }

    final double[] f = new double[xNodes];
    final double[] q = new double[xNodes];
    final double[][] m = new double[xNodes][xNodes];

    double a, b, c, aa, bb, cc;

    for (int i = 0; i < xNodes; i++) {
        f[i] = pdeData.getInitialValue(spaceGrid[i]);
    }

    for (int n = 1; n < tNodes; n++) {

        for (int i = 1; i < xNodes - 1; i++) {
            a = pdeData.getA(timeGrid[n - 1], spaceGrid[i]);
            b = pdeData.getB(timeGrid[n - 1], spaceGrid[i]);
            c = pdeData.getC(timeGrid[n - 1], spaceGrid[i]);

            aa = (1 - _theta) * dt[n - 1]
                    * (-2 / dx[i - 1] / (dx[i - 1] + dx[i]) * a + dx[i] / dx[i - 1] / (dx[i - 1] + dx[i]) * b);
            bb = 1 + (1 - _theta) * dt[n - 1]
                    * (2 / dx[i - 1] / dx[i] * a - (dx[i] - dx[i - 1]) / dx[i - 1] / dx[i] * b - c);
            cc = (1 - _theta) * dt[n - 1]
                    * (-2 / dx[i] / (dx[i - 1] + dx[i]) * a - dx[i - 1] / dx[i] / (dx[i - 1] + dx[i]) * b);
            q[i] = aa * f[i - 1] + bb * f[i] + cc * f[i + 1];

            // TODO could store these
            a = pdeData.getA(timeGrid[n], spaceGrid[i]);
            b = pdeData.getB(timeGrid[n], spaceGrid[i]);
            c = pdeData.getC(timeGrid[n], spaceGrid[i]);
            aa = dt[n - 1]
                    * (-2 / dx[i - 1] / (dx[i - 1] + dx[i]) * a + dx[i] / dx[i - 1] / (dx[i - 1] + dx[i]) * b);
            bb = dt[n - 1] * (2 / dx[i - 1] / dx[i] * a - (dx[i] - dx[i - 1]) / dx[i - 1] / dx[i] * b - c);
            cc = dt[n - 1]
                    * (-2 / dx[i] / (dx[i - 1] + dx[i]) * a - dx[i - 1] / dx[i] / (dx[i - 1] + dx[i]) * b);
            m[i][i - 1] = -_theta * aa;
            m[i][i] = 1 - _theta * bb;
            m[i][i + 1] = -_theta * cc;
        }

        double[] temp = lowerBoundary.getLeftMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]);
        for (int k = 0; k < temp.length; k++) {
            m[0][k] = temp[k];
        }

        temp = upperBoundary.getLeftMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]);
        for (int k = 0; k < temp.length; k++) {
            m[xNodes - 1][xNodes - 1 - k] = temp[k];
        }
        // debug
        // m[xNodes - 1][xNodes - 3] = 2 / dx[xNodes - 3] / (dx[xNodes - 3] + dx[xNodes - 2]);
        // m[xNodes - 1][xNodes - 2] = -2 / dx[xNodes - 3] / dx[xNodes - 2];
        // m[xNodes - 1][xNodes - 1] = 2 / dx[xNodes - 2] / (dx[xNodes - 3] + dx[xNodes - 2]);

        temp = lowerBoundary.getRightMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]);
        double sum = 0;
        for (int k = 0; k < temp.length; k++) {
            sum += temp[k] * f[k];
        }
        q[0] = sum + lowerBoundary.getConstant(pdeData.getCoefficients(), timeGrid[n]); // TODO need to change how boundary are calculated - dx[0] wrong for non-constant grid

        temp = upperBoundary.getRightMatrixCondition(pdeData.getCoefficients(), grid, timeGrid[n]);
        sum = 0;
        for (int k = 0; k < temp.length; k++) {
            sum += temp[k] * f[xNodes - 1 - k];
        }

        q[xNodes - 1] = sum + upperBoundary.getConstant(pdeData.getCoefficients(), timeGrid[n]);

        // SOR
        final double omega = 1.0;
        double scale = 1.0;
        double errorSqr = Double.POSITIVE_INFINITY;
        while (errorSqr / (scale + 1e-10) > 1e-18) {
            errorSqr = 0.0;
            scale = 0.0;
            for (int j = 0; j < xNodes; j++) {
                sum = 0;
                for (int k = 0; k < xNodes; k++) {
                    sum += m[j][k] * f[k];
                }
                double correction = omega / m[j][j] * (q[j] - sum);
                if (freeBoundary != null) {
                    correction = Math.max(correction, freeBoundary.getZValue(timeGrid[n], spaceGrid[j]) - f[j]);
                }
                errorSqr += correction * correction;
                f[j] += correction;
                scale += f[j] * f[j];
            }
        }

    }

    return new PDETerminalResults1D(grid, f);

}

From source file:ml.shifu.shifu.core.pmml.PMMLTranslator.java

/**
 * Create @ConStats for numerical variable
 * @param columnConfig - ColumnConfig to generate ConStats
 * @return ConStats for variable//from w ww .  ja  v  a  2 s  . c om
 */
private ContStats createConStats(ColumnConfig columnConfig) {
    ContStats conStats = new ContStats();

    List<Interval> intervals = new ArrayList<Interval>();
    for (int i = 0; i < columnConfig.getBinBoundary().size(); i++) {
        Interval interval = new Interval();
        interval.setClosure(Interval.Closure.OPEN_CLOSED);
        interval.setLeftMargin(columnConfig.getBinBoundary().get(i));

        if (i == columnConfig.getBinBoundary().size() - 1) {
            interval.setRightMargin(Double.POSITIVE_INFINITY);
        } else {
            interval.setRightMargin(columnConfig.getBinBoundary().get(i + 1));
        }

        intervals.add(interval);
    }
    conStats.withIntervals(intervals);

    Map<String, String> extensionMap = new HashMap<String, String>();

    extensionMap.put("BinCountPos", columnConfig.getBinCountPos().toString());
    extensionMap.put("BinCountNeg", columnConfig.getBinCountNeg().toString());
    extensionMap.put("BinWeightedCountPos", columnConfig.getBinWeightedPos().toString());
    extensionMap.put("BinWeightedCountNeg", columnConfig.getBinWeightedNeg().toString());
    extensionMap.put("BinPosRate", columnConfig.getBinPosRate().toString());
    extensionMap.put("BinWOE",
            calculateWoe(columnConfig.getBinCountPos(), columnConfig.getBinCountNeg()).toString());
    extensionMap.put("KS", Double.toString(columnConfig.getKs()));
    extensionMap.put("IV", Double.toString(columnConfig.getIv()));
    conStats.withExtensions(createExtensions(extensionMap));

    return conStats;
}

From source file:org.jfree.data.time.TimeSeriesCollectionTest.java

/**
 * A test to cover bug 3445507./*w  w w  .jav a  2 s .c  om*/
 */
@Test
public void testBug3445507() {
    TimeSeries s1 = new TimeSeries("S1");
    s1.add(new Year(2011), null);
    s1.add(new Year(2012), null);

    TimeSeries s2 = new TimeSeries("S2");
    s2.add(new Year(2011), 5.0);
    s2.add(new Year(2012), 6.0);

    TimeSeriesCollection dataset = new TimeSeriesCollection();
    dataset.addSeries(s1);
    dataset.addSeries(s2);

    List keys = new ArrayList();
    keys.add("S1");
    keys.add("S2");
    Range r = dataset.getRangeBounds(keys, new Range(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY),
            false);
    assertEquals(5.0, r.getLowerBound(), EPSILON);
    assertEquals(6.0, r.getUpperBound(), EPSILON);
}

From source file:edu.cornell.med.icb.clustering.QTClusterer.java

/**
 * Groups instances into clusters. Returns the indices of the instances
 * that belong to a cluster as an int array in the list result.
 *
 * @param calculator       The/*  ww  w .j a va2s .c  o m*/
 *                         {@link edu.cornell.med.icb.clustering.SimilarityDistanceCalculator}
 *                         that should be used when clustering
 * @param qualityThreshold The QT clustering algorithm quality threshold (d)
 * @return The list of clusters.
 */
public List<int[]> cluster(final SimilarityDistanceCalculator calculator, final double qualityThreshold) {
    final ProgressLogger clusterProgressLogger = new ProgressLogger(LOGGER, logInterval, "instances clustered");
    clusterProgressLogger.displayFreeMemory = true;
    clusterProgressLogger.expectedUpdates = instanceCount;
    clusterProgressLogger.start("Starting to cluster " + instanceCount + " instances using "
            + parallelTeam.getThreadCount() + " threads.");

    // reset cluster results
    clusterCount = 0;
    // instanceList is the set "G" to cluster
    final LinkedList<Integer> instanceList = new LinkedList<Integer>();
    for (int i = 0; i < instanceCount; i++) {
        clusters[i].clear();

        // set each node in the instance list to it's
        // original position in the source data array
        instanceList.add(i);
    }

    final double ignoreDistance = calculator.getIgnoreDistance();

    // eliminate any instances that will never cluster with anything else
    final IntList singletonClusters = identifySingletonClusters(calculator, qualityThreshold, instanceList,
            clusterProgressLogger);

    final ProgressLogger innerLoopProgressLogger = new ProgressLogger(LOGGER, logInterval,
            "inner loop iterations");
    innerLoopProgressLogger.displayFreeMemory = false;

    final ProgressLogger outerLoopProgressLogger = new ProgressLogger(LOGGER, logInterval,
            "outer loop iterations");
    outerLoopProgressLogger.displayFreeMemory = true;

    try {
        // loop over instances until they have all been added to a cluster
        while (!instanceList.isEmpty()) {
            // cluster remaining instances to find the maximum cardinality
            for (int i = 0; i < instanceList.size(); i++) {
                candidateClusters[i].clear();
            }

            if (logOuterLoopProgress) {
                outerLoopProgressLogger.expectedUpdates = instanceList.size();
                outerLoopProgressLogger.start("Entering outer loop for " + instanceList.size() + " iterations");
            }

            // for each i in G (instance list)
            // find instance j such that distance i,j minimum
            parallelTeam.execute(new ParallelRegion() { // NOPMD

                @Override
                public void run() throws Exception { // NOPMD
                    // each thread will populate a different portion of the "candidateCluster"
                    // array so we shouldn't need to worry about concurrent access
                    execute(0, instanceList.size() - 1, new IntegerForLoop() {
                        @Override
                        public void run(final int first, final int last) {
                            if (LOGGER.isDebugEnabled()) {
                                LOGGER.debug("first = " + first + ", last = " + last);
                            }
                            for (int i = first; i <= last; i++) {
                                @SuppressWarnings("unchecked")
                                final LinkedList<Integer> notClustered = (LinkedList<Integer>) instanceList
                                        .clone();

                                // add the first instance to the next candidate cluster
                                final IntArrayList candidateCluster = candidateClusters[i];
                                candidateCluster.add(notClustered.remove(i));

                                if (logInnerLoopProgress) {
                                    innerLoopProgressLogger.expectedUpdates = notClustered.size();
                                    innerLoopProgressLogger.start(
                                            "Entering inner loop for " + notClustered.size() + " iterations");
                                }

                                // cluster the remaining instances to find the maximum
                                // cardinality find instance j such that distance i,j minimum
                                boolean done = false;
                                while (!done && !notClustered.isEmpty()) {
                                    // find the node that has minimum distance between the
                                    // current cluster and the instances that have not yet
                                    // been clustered.
                                    double minDistance = Double.POSITIVE_INFINITY;
                                    int minDistanceInstanceIndex = 0;
                                    int instanceIndex = 0;
                                    for (final int instance : notClustered) {
                                        double newDistance = ignoreDistance;

                                        final int[] cluster = candidateCluster.elements();
                                        for (int instanceInCluster = 0; instanceInCluster < candidateCluster
                                                .size(); instanceInCluster++) {
                                            final double a = calculator.distance(cluster[instanceInCluster],
                                                    instance);
                                            // if the distance of the instance will force the candidate cluster
                                            // to be larger than the cutoff value, we can stop here
                                            // because we know that this candidate cluster will be too large
                                            if (a >= minDistance) {
                                                newDistance = ignoreDistance;
                                                break;
                                            }
                                            final double b = newDistance;

                                            // This code is inlined from java.lang.Math.max(a, b)
                                            if (a != a) { // a is NaN
                                                newDistance = a;
                                            } else if (a == 0.0d && b == 0.0d
                                                    && Double.doubleToLongBits(a) == negativeZeroDoubleBits) {
                                                newDistance = b;
                                            } else if (a >= b) {
                                                newDistance = a;
                                            } else {
                                                newDistance = b;
                                            }
                                        }

                                        if (newDistance != ignoreDistance && newDistance < minDistance) {
                                            minDistance = newDistance;
                                            minDistanceInstanceIndex = instanceIndex;
                                        }
                                        instanceIndex++;
                                    }
                                    // grow clusters until min distance between new instance
                                    // and cluster reaches quality threshold
                                    // if (diameter(Ai U {j}) > d)
                                    if (minDistance > qualityThreshold) {
                                        done = true;
                                    } else {
                                        // remove the instance from the ones to be considered
                                        final int instance = notClustered.remove(minDistanceInstanceIndex);
                                        // and add it to the newly formed cluster
                                        candidateCluster.add(instance);
                                    }
                                    if (logInnerLoopProgress) {
                                        innerLoopProgressLogger.update();
                                    }
                                }
                                if (logInnerLoopProgress) {
                                    innerLoopProgressLogger.stop("Inner loop completed.");
                                }
                                if (logOuterLoopProgress) {
                                    outerLoopProgressLogger.update();
                                }
                            }
                        }
                    });
                }
            });

            if (logOuterLoopProgress) {
                outerLoopProgressLogger.stop("Outer loop completed.");
            }

            // identify cluster (set C) with maximum cardinality
            int maxCardinality = 0;
            int selectedClusterIndex = -1;
            for (int i = 0; i < instanceList.size(); i++) {
                final int size = candidateClusters[i].size();
                if (LOGGER.isTraceEnabled() && size > 0) {
                    LOGGER.trace("potential cluster " + i + ": " + ArrayUtils.toString(candidateClusters[i]));
                }
                if (size > maxCardinality) {
                    maxCardinality = size;
                    selectedClusterIndex = i;
                }
            }

            final IntArrayList selectedCluster = candidateClusters[selectedClusterIndex];

            if (LOGGER.isTraceEnabled()) {
                LOGGER.trace("adding " + selectedCluster.size() + " instances to cluster " + clusterCount);
            }
            // and add that cluster to the final result
            clusters[clusterCount].addAll(selectedCluster);

            // remove instances in cluster C so they are no longer considered
            instanceList.removeAll(selectedCluster);

            if (logClusterProgress) {
                final int selectedClusterSize = selectedCluster.size();
                int i = 0;
                while (i < selectedClusterSize - 1) {
                    clusterProgressLogger.lightUpdate();
                    i++;
                }
                // make sure there is at least one "full" update per loop
                if (i < selectedClusterSize) {
                    clusterProgressLogger.update();
                }
            }

            // we just created a new cluster
            clusterCount++;

            // next iteration is over (G - C)
        }
    } catch (RuntimeException e) {
        LOGGER.error("Caught runtime exception - rethrowing", e);
        throw e;
    } catch (Exception e) {
        LOGGER.error("Caught exception - rethrowing as ClusteringException", e);
        throw new ClusteringException(e);
    }

    // add singleton clusters to the end so the largest clusters are at the start of the list
    for (final int singleton : singletonClusters) {
        clusters[clusterCount].add(singleton);
        clusterCount++;
    }

    clusterProgressLogger.stop("Clustering completed.");
    return getClusters();
}

From source file:fri.cbw.ThermodynamicSimulationEngine.DDE23.java

/**
 *
 * @param ddes delay differential equation
 * @param lags array of time delay values, does not have to be sorted
 * @param history values of variables for t < 0
 * @param t0 time to start the integration, it should be 0
 * @param tfinal the end time/*from w w  w.  j a va 2s. c  o  m*/
 * @return Class representing the result of the integration
 */
public static IntegrationResult integrate(FirstOrderDelayDifferentialEquations ddes, double[] lags,
        double[] history, double t0, double tfinal) {
    IntegrationResult sol = new IntegrationResult();

    //% Initialize statistics.
    int nsteps = 0;
    int nfailed = 0;
    int nfevals = 0;

    if (tfinal <= t0) {
        throw new IllegalArgumentException("Must have t0 < tfinal.");
    }

    double[] temp;
    temp = new double[history.length];
    System.arraycopy(history, 0, temp, 0, history.length);

    double[] y0;
    y0 = new double[temp.length];
    System.arraycopy(temp, 0, y0, 0, temp.length);
    int maxlevel = 4;

    double t = t0;
    double[] y;
    y = new double[y0.length];
    System.arraycopy(y0, 0, y, 0, y0.length);
    int neq = ddes.getDimension();

    //% If solving a DDE, locate potential discontinuities. We need to step to
    //% each of the points of potential lack of smoothness. Because we start at
    //% t0, we can remove it from discont.  The solver always steps to tfinal,
    //% so it is convenient to add it to discont.
    List<Double> discont = new ArrayList<Double>();
    double minlag = Double.POSITIVE_INFINITY;
    if (lags.length == 0) {
        discont.add(tfinal);
    } else {
        for (int i = 0; i < lags.length; i++) {
            if (lags[i] < minlag) {
                minlag = lags[i];
            }
        }
        if (minlag <= 0) {
            throw new IllegalArgumentException("The lags must all be positive.");
        }
        List<Double> vl = new ArrayList<Double>();
        vl.add(t0);

        for (int level = 1; level < maxlevel; level++) { // it is not <=, comparing discont with matlab
            List<Double> vlp1 = new ArrayList<Double>();
            for (int i = 0; i < vl.size(); i++) {
                for (int x = 0; x < lags.length; x++) { // should probably be from 1 // probably not
                    vlp1.add(vl.get(i) + lags[x]);
                }
            }
            //% Restrict to tspan.
            vl.clear(); // FIXME: this looks like a mistake
            for (double x : vlp1) {
                if (x <= tfinal) {
                    vl.add(x);
                }
                if (vl.isEmpty()) {
                    break;
                }
                int nvl = vl.size();
                if (nvl > 1) //% Purge duplicates in vl.
                {
                    vl = purgeDuplicates(vl);
                }
            }
            discont.addAll(vl); // FIXME: make sure this is where it should be
        }
        if (discont.size() > 1) {
            //% Purge duplicates.
            discont = purgeDuplicates(discont);
        }
    }

    //% Add tfinal to the list of discontinuities if it is not already included.
    int end = discont.size() - 1;
    if (Math.abs(tfinal - discont.get(end)) <= 10 * eps * Math.abs(tfinal)) {
        discont.set(end, tfinal);
    } else {
        discont.add(tfinal);
    }
    sol.setDiscont(discont);
    //% Discard t0 and discontinuities in the history.
    List<Integer> indices = new ArrayList<Integer>();
    for (int i = 0; i < discont.size(); i++) {
        if (discont.get(i) <= t0) {
            indices.add(i);
        }
    }
    discont.removeAll(indices);

    int nextdsc = 0; // zero based
    List<Integer> ndxdsc = new ArrayList<Integer>();
    ndxdsc.add(1);

    //% Get options, and set defaults.
    double rtol = 1e-3;

    //        if (rtol < 100 * eps) {
    //           rtol = 100 * eps;
    //           warning(['RelTol has been increased to ' num2str(rtol) '.']);
    //        }
    double atol = 1e-6;
    double threshold = atol / rtol;

    //% By default, hmax is 1/10 of the interval of integration.
    double hmax = Math.min((tfinal - t), Math.abs(0.1 * (tfinal - t)));
    if (hmax <= 0) {
        throw new IllegalArgumentException("MaxStep must be greater than zero.");
    }
    double minstep = 0;
    boolean rept_minh = false;

    boolean printstats = true; // FIXME: for debugging

    //% Allocate storage for output arrays and initialize them.
    int chunk = Math.min(100, (int) Math.floor((2 ^ 13) / neq));

    double[] tout = new double[chunk];
    double[][] yout = new double[chunk][neq]; // flip to C style
    double[][] ypout = new double[chunk][neq];
    int nout = 1; // FIXME: should be 0-based? // probably not
    tout[nout] = t;
    for (int k = 0; k < neq; k++) {
        for (int l = 0; l < chunk; l++) {
            yout[l][k] = y[k]; // FIXME: think about this
        }
    }

    //% Initialize method parameters.
    double pow = 1.0 / 3;
    double[][] B = { // transposed
            { 1.0 / 2, 0, 0, 0 }, { 0, 3.0 / 4, 0, 0, 0 }, { 2.0 / 9, 1.0 / 3, 4.0 / 9.0, 0 } };
    double[] E = { -5.0 / 72, 1.0 / 12, 1.0 / 9, -1.0 / 8 };

    double[][] f = new double[4][neq]; // flipped to C indexing

    //% Evaluate initial history at t0 - lags.
    double[][] emptyarr = new double[0][];
    double[] fakeX = { t }; // FIXME: try to understand what is happening here with t/X
    double[][] fakeY = { y }; // FIXME: DTTO
    double[][] Z = lagvals(t, lags, history, fakeX, fakeY, emptyarr);

    double[] f0 = new double[neq];
    ddes.computeDerivatives(t, y, Z, f0);
    for (int k = 0; k < neq; k++) {
        ypout[nout][k] = f0[k]; // FIXME: think about this // should be correct now
    }
    nfevals = nfevals + 1; //% stats
    int m = f0.length;
    if (m != neq) {
        // FIXME: find better class to throw?
        throw new IllegalArgumentException("returned derivative and history vectors of different lengths.");
    }

    double hmin = 16 * eps * t;

    //% Compute an initial step size h using y'(t).
    double h = Math.min(hmax, tfinal - t0);
    double n = Double.NEGATIVE_INFINITY; // compute norm(xk, inf)
    for (int k = 0; k < f0.length; k++) {
        double xk = Math.abs(f0[k] / Math.max(Math.abs(y[k]), threshold));
        if (xk > n) {
            n = xk;
        }
    }
    double rh = n / (0.8 * Math.pow(rtol, pow));
    if (h * rh > 1) {
        h = 1.0 / rh; // NOTE: used to have 1 / rh there, did not work :(
    }
    h = Math.max(h, hmin);

    //% Make sure that the first step is explicit so that the code can
    //% properly initialize the interpolant.
    h = Math.min(h, 0.5 * minlag);

    System.arraycopy(f0, 0, f[0], 0, neq);

    //% THE MAIN LOOP
    double[] last_y = new double[neq];
    boolean done = false;
    while (!done) {
        //% By default, hmin is a small number such that t+hmin is only slightly
        //% different than t.  It might be 0 if t is 0.
        hmin = 16 * eps * t;
        h = Math.min(hmax, Math.max(hmin, h)); //% couldn't limit h until new hmin

        //% Adjust step size to hit discontinuity. tfinal = discont(end).
        boolean hitdsc = false;
        double distance = discont.get(nextdsc) - t;
        if (Math.min(1.1 * h, hmax) >= distance) { //% stretch
            h = distance;
            hitdsc = true;
        } else if (2 * h >= distance) { //% look-ahead
            h = distance / 2;
        }
        if (!hitdsc && (minlag < h) && (h < 2 * minlag)) {
            h = minlag;
        }

        //% LOOP FOR ADVANCING ONE STEP.
        double tnew;
        double ynew[] = new double[neq];
        boolean itfail;
        boolean nofailed = true; //% no failed attempts
        double err;
        while (true) {
            double[][] hB = new double[3][4]; // dimensions of B
            for (int k = 0; k < 3; k++) {
                for (int l = 0; l < 4; l++) {
                    hB[k][l] = h * B[k][l];
                }
            }
            double t1 = t + 0.5 * h;
            double t2 = t + 0.75 * h;
            tnew = t + h;

            //% If a lagged argument falls in the current step, we evaluate the
            //% formula by iteration. Extrapolation is used for the evaluation
            //% of the history terms in the first iteration and the tnew,ynew,
            //% ypnew of the current iteration are used in the evaluation of
            //% these terms in the next iteration.
            int maxit;
            if (minlag < h) {
                maxit = 5;
            } else {
                maxit = 1;
            }
            // FIXME: maybe it should be implemented as appending to a List
            double[] X = new double[nout + 1]; // looks like +1 is unavoidable. nout is the largest index, it is 1 based
            System.arraycopy(tout, 0, X, 0, nout + 1); // so need nout+1 length
            double[][] Y = new double[nout + 1][neq];
            System.arraycopy(yout, 0, Y, 0, nout + 1);
            double[][] YP = new double[nout + 1][neq];
            System.arraycopy(ypout, 0, YP, 0, nout + 1);

            itfail = false;
            for (int iter = 1; iter <= maxit; iter++) { //FIXME: 0-based? // no, from 1, maybe <= // try <=
                double[] vecmul1 = new double[neq];
                for (int k = 0; k < neq; k++) { // FIXME: merge the loops, it should be possible // it is not
                    for (int l = 0; l < 4; l++) {
                        vecmul1[k] += f[l][k] * hB[0][l];
                    }
                }
                double yt1[] = new double[neq];
                for (int k = 0; k < f[0].length; k++) { // changed from f.length
                    yt1[k] = y[k] + vecmul1[k]; // FIXME: indices?
                }

                Z = lagvals(t1, lags, history, X, Y, YP);
                ddes.computeDerivatives(t1, yt1, Z, f[1]);

                double[] vecmul2 = new double[neq];
                for (int k = 0; k < neq; k++) { // FIXME: merge the loops, it should be possible // it is not
                    for (int l = 0; l < 4; l++) {
                        vecmul2[k] += f[l][k] * hB[1][l];
                    }
                }
                double yt2[] = new double[neq];
                for (int k = 0; k < f[0].length; k++) { // changed from f.length
                    yt2[k] = y[k] + vecmul2[k];
                }

                Z = lagvals(t2, lags, history, X, Y, YP);
                ddes.computeDerivatives(t2, yt2, Z, f[2]);

                double[] vecmul3 = new double[neq];
                for (int k = 0; k < neq; k++) {
                    for (int l = 0; l < 4; l++) {
                        vecmul3[k] += f[l][k] * hB[2][l];
                    }
                }
                for (int k = 0; k < f[0].length; k++) {
                    ynew[k] = y[k] + vecmul3[k];
                }

                Z = lagvals(tnew, lags, history, X, Y, YP);
                ddes.computeDerivatives(tnew, ynew, Z, f[3]);

                nfevals = nfevals + 3;
                if (maxit > 1) {
                    if (iter > 1) {
                        double errit = Double.NEGATIVE_INFINITY;
                        for (int k = 0; k < neq; k++) { // another norm(erritk, inf)
                            // missed this norm the first time
                            double erritk = Math.abs((ynew[k] - last_y[k])
                                    / Math.max(Math.max(Math.abs(y[k]), Math.abs(ynew[k])), threshold));
                            if (erritk > errit) {
                                errit = erritk;
                            }
                        }

                        if (errit <= 0.1 * rtol) {
                            break;
                        }
                    }
                    //% Use the tentative solution at tnew in the evaluation of the
                    //% history terms of the next iteration.
                    X = Arrays.copyOf(X, X.length + 1);
                    X[X.length - 1] = tnew;

                    Y = Arrays.copyOf(Y, Y.length + 1);
                    Y[Y.length - 1] = ynew;

                    YP = Arrays.copyOf(YP, YP.length + 1);
                    YP[YP.length - 1] = f[3];

                    System.arraycopy(ynew, 0, last_y, 0, ynew.length); // had switched last_y and ynew
                    itfail = (iter == maxit);
                }
            }

            // FIXME: matrix multiplication?
            // had bug: first compute n, then do h*n, not the other way
            double[] vecmul = new double[neq];
            for (int l = 0; l < neq; l++) {
                for (int o = 0; o < E.length; o++) {
                    vecmul[l] += (f[o][l] * E[o]);
                }
            }
            n = Double.NEGATIVE_INFINITY;
            for (int k = 0; k < neq; k++) {
                // had bug: infty norm is in abs value
                // another ona, had multiplication instead of division. took me two hours to realize :(
                double nk = Math
                        .abs(vecmul[k] / Math.max(Math.max(Math.abs(y[k]), Math.abs(ynew[k])), threshold)); // FIXME: indexing of f
                if (nk > n) {
                    n = nk;
                }
            }

            //% Estimate the error.
            err = h * n;

            //% If h <= minstep, adjust err so that the step will be accepted.
            //% Note that minstep < minlag, so maxit = 1 and itfail = false.  Report
            //% once that a step of minimum size was taken.
            if (h <= minstep) {
                err = Math.min(err, rtol);
                if (rept_minh) {
                    Logger.getGlobal().log(Level.INFO, "Steps of size MinStep were taken.");
                    rept_minh = false;
                }
            }

            //% Accept the solution only if the weighted error is no more than the
            //% tolerance rtol.  Estimate an h that will yield an error of rtol on
            //% the next step or the next try at taking this step, as the case may be,
            //% and use 0.8 of this value to avoid failures.
            if ((err > rtol) || itfail) { //% Failed step
                nfailed = nfailed + 1;
                Logger logger = Logger.getGlobal();
                if (h <= hmin) {
                    String msg = String.format("Failure at t=%e.  Unable to meet integration "
                            + "tolerances without reducing the step size below "
                            + "the smallest value allowed (%e) at time t.\n", t, hmin);
                    logger.log(Level.WARNING, msg);
                    if (printstats) {
                        logger.log(Level.INFO, "%g successful steps", nsteps);
                        logger.log(Level.INFO, "%g failed attempts", nfailed);
                        logger.log(Level.INFO, "%g function evaluations", nfevals);
                    }
                    //% Trim output arrays, place in solution structure, and return.
                    sol = trim(sol, history, nout, tout, yout, ypout, ndxdsc);
                    return sol;
                }

                if (itfail) {
                    h = 0.5 * h;
                    if (h < 2 * minlag) {
                        h = minlag;
                    }
                } else if (nofailed) {
                    nofailed = false;
                    h = Math.max(hmin, h * Math.max(0.5, 0.8 * Math.pow(rtol / err, pow)));
                } else {
                    h = Math.max(hmin, 0.5 * h);
                }
                hitdsc = false;
            } else { //% Successful step
                break;
            }
        }
        nsteps = nsteps + 1; //% stats

        //% Advance the integration one step.
        t = tnew;
        y = ynew;
        System.arraycopy(f[3], 0, f[0], 0, neq); //% BS(2,3) is FSAL.
        nout = nout + 1;

        // reallocate arrays
        if (nout > tout.length - 1) {
            tout = Arrays.copyOf(tout, tout.length + chunk);

            yout = Arrays.copyOf(yout, yout.length + chunk);
            ypout = Arrays.copyOf(ypout, ypout.length + chunk);
            // allocate the second dimensions
            int upto = yout.length;
            for (int k = yout.length - chunk; k < upto; k++) {
                yout[k] = new double[neq];
            }
            upto = ypout.length;
            for (int k = ypout.length - chunk; k < upto; k++) {
                ypout[k] = new double[neq];
            }
        }
        tout[nout] = t;
        for (int k = 0; k < neq; k++) {
            yout[nout][k] = y[k];
            ypout[nout][k] = f[0][k];
        }

        //% If there were no failures, compute a new h.
        if (nofailed && !itfail) {
            //% Note that h may shrink by 0.8, and that err may be 0.
            double temp2 = 1.25 * Math.pow(err / rtol, pow);
            if (temp2 > 0.2) {
                h = h / temp2;
            } else {
                h = 5 * h;
            }
            h = Math.max(h, minstep); //FIXME: NetBeans says value never used
        }

        //% Have we hit tfinal = discont(end)?
        if (hitdsc) {
            nextdsc = nextdsc + 1;
            done = nextdsc > discont.size() - 1;
            if (!done) {
                ndxdsc.add(nout);
            }
        }
    }
    //% Successful integration:
    if (printstats) {
        if (printstats) {
            Logger logger = Logger.getGlobal();
            logger.log(Level.INFO, "%g successful steps", nsteps);
            logger.log(Level.INFO, "%g failed attempts", nfailed);
            logger.log(Level.INFO, "%g function evaluations", nfevals);
        }
    }

    //% Trim output arrays, place in solution structure, and return.
    sol = trim(sol, history, nout, tout, yout, ypout, ndxdsc);
    return sol;
    //% End of function dde23.
}

From source file:egat.replicatordynamics.SymmetricConstrainedFeasibleAmoebaSearch.java

public StrategySearchResult runForResult(Strategy initialStrategy, Set<Action> restrictedActions,
        boolean randomize, Action[] actions, PayoffMatrix pm) {

    boolean[] mask = new boolean[actions.length];

    int restrictedCount = 0;

    for (int i = 0; i < actions.length; i++) {

        mask[i] = restrictedActions.contains(actions[i]);

        if (mask[i]) {

            restrictedCount++;//from w  w  w . ja  va 2s . c  o  m

        }

    }

    int[] restricted = new int[restrictedCount];

    for (int i = 0, restrictedIndex = 0; i < actions.length; i++) {

        if (mask[i]) {

            restricted[restrictedIndex++] = i;

        }
    }

    // Normalize to a uniform distribution
    double[] distribution = new double[actions.length];

    double[] bestDist = new double[actions.length];

    if (initialStrategy == null) {
        double sum = 0.0;
        for (int i = 0; i < bestDist.length; i++) {
            if (mask[i]) {
                bestDist[i] = Math.random();
                sum += bestDist[i];
            }
        }

        for (int i = 0; i < bestDist.length; i++) {
            if (mask[i]) {
                bestDist[i] /= sum;
            }
        }

    } else {
        for (int i = 0; i < actions.length; i++) {
            if (mask[i]) {
                bestDist[i] = initialStrategy.getProbability(actions[i]).doubleValue();
            }
        }
    }

    double bestRegret = pm.regret(bestDist);

    // Initialize current strategy
    Strategy currentStrategy = buildStrategy(actions, bestDist, restricted);

    fireUpdatedStrategy(currentStrategy, 0, Double.NaN);

    int SIMPLICES = restricted.length;
    double[][] X = new double[SIMPLICES][SIMPLICES - 1];
    double[] centroid = new double[SIMPLICES - 1];
    double[] reflect = new double[SIMPLICES - 1];
    double[] expand = new double[SIMPLICES - 1];
    double[] contract = new double[SIMPLICES - 1];
    double[] V = new double[SIMPLICES];
    double[] cur = distribution.clone();

    if (!randomize) {
        for (int r = 0, n = SIMPLICES - 1; r < n; r++) {
            X[0][r] = 0.0;
        }

        V[0] = calculateObjective(pm, X[0], restricted, distribution);

        for (int s = 1; s < SIMPLICES; s++) {
            System.arraycopy(X[0], 0, X[s], 0, SIMPLICES - 1);

            X[s][s - 1] = 1.0;

            V[s] = calculateObjective(pm, X[s], restricted, distribution);
        }
    } else {

        for (int s = 0; s < SIMPLICES; s++) {
            generateSimplex(X[s]);
            V[s] = calculateObjective(pm, X[s], restricted, distribution);
        }
    }

    int highest = -1;
    int lowest = -1;
    int second = -1;

    for (int s = 0; s < SIMPLICES; s++) {
        if (lowest < 0 || V[s] < V[lowest]) {
            lowest = s;
        }

        if (second < 0 || V[s] > V[second]) {

            if (highest < 0 || V[s] > V[highest]) {
                second = highest;
                highest = s;
            } else {
                second = s;
            }

        }
    }

    double curRegret = Double.POSITIVE_INFINITY;

    int iteration = 0;
    double norm = Double.POSITIVE_INFINITY;

    loop: for (iteration = 1; SIMPLICES > 1; iteration++) {

        generateCentroid(centroid, X, SIMPLICES, highest);

        generateFeasibleTowards(reflect, centroid, X[highest], 1.0, pm, restricted, distribution);

        double reflectV = calculateObjective(pm, reflect, restricted, distribution);

        // Reflect
        if (V[lowest] <= reflectV && reflectV < V[second]) {
            replaceHighest(reflect, reflectV, X, V, highest);

            // Expand
        } else if (reflectV < V[lowest]) {

            generateFeasibleTowards(expand, centroid, X[highest], 2.0, pm, restricted, distribution);

            double expandV = calculateObjective(pm, expand, restricted, distribution);

            if (expandV < reflectV) {
                replaceHighest(expand, expandV, X, V, highest);
            } else {
                replaceHighest(reflect, reflectV, X, V, highest);
            }

            // Contract
        } else {

            generateAway(contract, X[highest], centroid, 0.5);

            double contractV = calculateObjective(pm, contract, restricted, distribution);

            if (contractV < V[highest]) {
                replaceHighest(contract, contractV, X, V, highest);

                // Reduction
            } else {
                for (int s = 0; s < SIMPLICES; s++) {
                    if (s != lowest) {

                        generateAway(X[s], X[lowest], X[s], 0.5);

                        V[s] = calculateObjective(pm, X[s], restricted, distribution);
                    }
                }
            }
        }

        highest = -1;
        lowest = -1;
        second = -1;

        for (int s = 0; s < SIMPLICES; s++) {
            if (lowest < 0 || V[s] < V[lowest]) {
                lowest = s;
            }

            if (second < 0 || V[s] > V[second]) {

                if (highest < 0 || V[s] > V[highest]) {
                    second = highest;
                    highest = s;
                } else {
                    second = s;
                }

            }
        }

        Arrays.fill(cur, 0.0);
        double sum = 0.0;
        for (int r = 0; r < restricted.length - 1; r++) {
            cur[restricted[r]] = Math.max(0, X[lowest][r]);
            sum += cur[restricted[r]];
        }

        cur[restricted[restricted.length - 1]] = Math.max(0.0, 1 - sum);
        sum += cur[restricted[restricted.length - 1]];

        for (int r = 0; r < restricted.length; r++) {
            cur[restricted[r]] /= sum;
        }

        curRegret = pm.regret(cur);

        if (curRegret < bestRegret) {
            System.arraycopy(cur, 0, bestDist, 0, cur.length);
            bestRegret = curRegret;
        }

        // Build the new strategy
        currentStrategy = buildStrategy(actions, bestDist, restricted);

        fireUpdatedStrategy(currentStrategy, iteration, bestRegret);

        // Calculate the norm
        norm = V[highest] - V[lowest];

        // Check termination condition
        if (terminate(norm, iteration))
            break;
    }

    return new StrategySearchResult(currentStrategy, bestRegret, iteration, norm);
}

From source file:de._13ducks.cor.game.server.movement.SubSectorPathfinder.java

private static SubSectorEdge shortestCommonEdge(SubSectorNode from, SubSectorNode to) {
    ArrayList<SubSectorEdge> toEdges = to.getMyEdges();
    SubSectorEdge shortesCommon = null;/*from   w w  w  . ja va2  s.com*/
    double minLength = Double.POSITIVE_INFINITY;
    for (SubSectorEdge edge : from.getMyEdges()) {
        if (toEdges.contains(edge)) {
            if (edge.getLength() < minLength) {
                shortesCommon = edge;
                minLength = edge.getLength();
            }
        }
    }
    return shortesCommon;
}

From source file:etomica.potential.EwaldSummation.java

public double getRange() {
    return Double.POSITIVE_INFINITY;
}

From source file:com.joptimizer.optimizers.LPStandardConverterTest.java

/**
 * Standardization of a problem on the form:
 * min(c) s.t./*from  ww w.  ja  v  a 2 s  . com*/
 * G.x < h
 * A.x = b
 */
public void testCGhAb2() throws Exception {
    log.debug("testCGhAb2");

    String problemId = "2";

    double[] c = Utils.loadDoubleArrayFromFile(
            "lp" + File.separator + "standardization" + File.separator + "c" + problemId + ".txt");
    double[][] G = Utils.loadDoubleMatrixFromFile(
            "lp" + File.separator + "standardization" + File.separator + "G" + problemId + ".csv",
            ",".charAt(0));
    double[] h = Utils.loadDoubleArrayFromFile(
            "lp" + File.separator + "standardization" + File.separator + "h" + problemId + ".txt");
    ;
    double[][] A = Utils.loadDoubleMatrixFromFile(
            "lp" + File.separator + "standardization" + File.separator + "A" + problemId + ".csv",
            ",".charAt(0));
    double[] b = Utils.loadDoubleArrayFromFile(
            "lp" + File.separator + "standardization" + File.separator + "b" + problemId + ".txt");
    double[] expectedSol = Utils.loadDoubleArrayFromFile(
            "lp" + File.separator + "standardization" + File.separator + "sol" + problemId + ".txt");
    double expectedValue = Utils.loadDoubleArrayFromFile(
            "lp" + File.separator + "standardization" + File.separator + "value" + problemId + ".txt")[0];
    double expectedTolerance = MatrixUtils.createRealMatrix(A)
            .operate(MatrixUtils.createRealVector(expectedSol)).subtract(MatrixUtils.createRealVector(b))
            .getNorm();

    //standard form conversion
    double unboundedLBValue = Double.NEGATIVE_INFINITY;
    double unboundedUBValue = Double.POSITIVE_INFINITY;
    LPStandardConverter lpConverter = new LPStandardConverter(unboundedLBValue, unboundedUBValue);
    lpConverter.toStandardForm(c, G, h, A, b, null, null);

    int n = lpConverter.getStandardN();
    int s = lpConverter.getStandardS();
    c = lpConverter.getStandardC().toArray();
    A = lpConverter.getStandardA().toArray();
    b = lpConverter.getStandardB().toArray();
    double[] lb = lpConverter.getStandardLB().toArray();
    double[] ub = lpConverter.getStandardUB().toArray();
    log.debug("n : " + n);
    log.debug("s : " + s);
    log.debug("c : " + ArrayUtils.toString(c));
    log.debug("A : " + ArrayUtils.toString(A));
    log.debug("b : " + ArrayUtils.toString(b));
    log.debug("lb : " + ArrayUtils.toString(lb));
    log.debug("ub : " + ArrayUtils.toString(ub));

    //check consistency
    assertEquals(G.length, s);
    assertEquals(A[0].length, n);
    assertEquals(s + lpConverter.getOriginalN(), n);
    assertEquals(lb.length, n);
    assertEquals(ub.length, n);

    //check constraints
    RealMatrix GOrig = new Array2DRowRealMatrix(G);
    RealVector hOrig = new ArrayRealVector(h);
    RealMatrix AStandard = new Array2DRowRealMatrix(A);
    RealVector bStandard = new ArrayRealVector(b);
    RealVector expectedSolVector = new ArrayRealVector(expectedSol);
    RealVector Gxh = GOrig.operate(expectedSolVector).subtract(hOrig);//G.x - h
    RealVector slackVariables = new ArrayRealVector(s);
    for (int i = 0; i < s; i++) {
        slackVariables.setEntry(i, 0. - Gxh.getEntry(i));//the difference from 0
        assertTrue(slackVariables.getEntry(i) >= 0.);
    }
    RealVector sol = slackVariables.append(expectedSolVector);
    RealVector Axmb = AStandard.operate(sol).subtract(bStandard);
    assertEquals(0., Axmb.getNorm(), expectedTolerance);

    //      Utils.writeDoubleArrayToFile(new double[]{s}, "target" + File.separator   + "standardS"+problemId+".txt");
    //      Utils.writeDoubleArrayToFile(c, "target" + File.separator   + "standardC"+problemId+".txt");
    //      Utils.writeDoubleMatrixToFile(A, "target" + File.separator   + "standardA"+problemId+".txt");
    //      Utils.writeDoubleArrayToFile(b, "target" + File.separator   + "standardB"+problemId+".txt");
    //      Utils.writeDoubleArrayToFile(lb, "target" + File.separator   + "standardLB"+problemId+".txt");
    //      Utils.writeDoubleArrayToFile(ub, "target" + File.separator   + "standardUB"+problemId+".txt");
}