Example usage for java.io StreamTokenizer TT_EOF

List of usage examples for java.io StreamTokenizer TT_EOF

Introduction

In this page you can find the example usage for java.io StreamTokenizer TT_EOF.

Prototype

int TT_EOF

To view the source code for java.io StreamTokenizer TT_EOF.

Click Source Link

Document

A constant indicating that the end of the stream has been read.

Usage

From source file:com.redskyit.scriptDriver.RunTests.java

private boolean runString(String source, File file, String cmd) throws Exception {
    StreamTokenizer tokenizer = openString(source);
    while (tokenizer.nextToken() != StreamTokenizer.TT_EOF) {
        if (tokenizer.ttype == StreamTokenizer.TT_WORD) {
            runCommand(tokenizer, file, cmd, null);
        }/*from  w  w  w.  j  ava  2 s. c o m*/
    }
    return true;
}

From source file:net.sf.jftp.net.WebdavConnection.java

private void work(String file, String outfile) {
    Log.out("transfer started\nfile: " + file + "\noutfile: " + outfile);

    BufferedInputStream in = null;
    BufferedOutputStream out = null;

    try {/*from  w w w.ja  v a 2 s  . c om*/
        if (outfile.startsWith("http://")) {
            //out = new BufferedOutputStream(new FileOutputStream(new WebdavResource(new HttpURL(file)).getMethodData());
            //new WebdavFile(new URL(outfile), user, pass)));
            //in = new BufferedInputStream(new FileInputStream(file));
            String resPath = outfile.substring(0, outfile.lastIndexOf("/") + 1);
            String name = outfile.substring(outfile.lastIndexOf("/") + 1);

            Log.debug("Uploading " + file + " to " + resPath + " as " + name);

            //HttpURL url = getURL(resPath);
            WebdavResource res = getResource(resPath);

            //new WebdavResource(url);

            /*
            if(res.checkinMethod()) Log.debug("Checkin OK");
            else Log.debug("Checkin FAILED");
                    
            Enumeration e = res.getAllowedMethods();
            while(e != null && e.hasMoreElements())
            {
                Log.debug("Method: " + e.nextElement().toString());
            }
            */
            if (res.putMethod(new File(file))) {
                fireProgressUpdate(file, DataConnection.FINISHED, -1);
            } else {
                Log.debug("Upload failed.");
                fireProgressUpdate(file, DataConnection.FAILED, -1);
            }

            return;
        }

        Log.debug("Downloading " + file + " to " + outfile);

        out = new BufferedOutputStream(new FileOutputStream(outfile));
        in = new BufferedInputStream(getResource(file).getMethodData());

        //new WebdavResource(getURL(file)).getMethodData());
        byte[] buf = new byte[webdavBuffer];
        int len = 0;
        int reallen = 0;

        //System.out.println(file+":"+getLocalPath()+outfile);
        while (true) {
            len = in.read(buf);

            //System.out.print(".");
            if (len == StreamTokenizer.TT_EOF) {
                break;
            }

            out.write(buf, 0, len);

            reallen += len;
            fireProgressUpdate(StringUtils.getFile(file), DataConnection.GET, reallen);
        }

        fireProgressUpdate(file, DataConnection.FINISHED, -1);
    } catch (IOException ex) {
        Log.debug("Error with file IO (" + ex + ")!");
        ex.printStackTrace();
        fireProgressUpdate(file, DataConnection.FAILED, -1);
    } finally {
        try {
            out.flush();
            out.close();
            in.close();
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeAggregateStatisticsForOptimisationConstraints_ISARHP_ISARCEP(String a_OutputDir_String)
        throws Exception {
    HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap();
    File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs);
    OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream);
    a_FileOutputStream.flush();//from   w ww.  j  av a  2s .c  om
    Object[] fitnessCounts;
    HashMap<String, Integer> a_SARCounts = null;
    TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet();
    String s2;
    String s1;
    Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator();
    while (a_Iterator_String.hasNext()) {
        // Need to reorder data for each LAD as OAs not necessarily returned
        // in any order and an ordered result is wanted
        TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>();
        boolean setPrevious_OA_String = true;
        s1 = a_Iterator_String.next();
        s2 = s1.substring(0, 3);
        File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv");
        // A few results are missing
        if (resultsFile.exists()) {
            System.out.println(resultsFile.toString() + " exists");
            String previous_OA_String = "";
            BufferedReader aBufferedReader = new BufferedReader(
                    new InputStreamReader(new FileInputStream(resultsFile)));
            StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader);
            Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer);
            String line = "";
            int tokenType = aStreamTokenizer.nextToken();
            while (tokenType != StreamTokenizer.TT_EOF) {
                switch (tokenType) {
                case StreamTokenizer.TT_EOL:
                    //System.out.println(line);
                    String[] lineFields = line.split(",");
                    String a_OA_String = lineFields[0];
                    if (previous_OA_String.equalsIgnoreCase(a_OA_String)) {
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                        }
                    } else {
                        // Store result
                        if (setPrevious_OA_String) {
                            previous_OA_String = a_OA_String;
                            setPrevious_OA_String = false;
                        } else {
                            // Store
                            resultsForLAD.put(previous_OA_String, a_SARCounts);
                        }
                        // Initialise/Re-initialise
                        CASDataRecord a_CASDataRecord = (CASDataRecord) _CASDataHandler
                                .getDataRecord(a_OA_String);
                        fitnessCounts = GeneticAlgorithm_ISARHP_ISARCEP.getFitnessCounts(a_CASDataRecord);
                        a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1];
                        // Start a new aggregation
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsHP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_ISARDataRecord.toString());
                        }
                        //a_OA_String = lineFields[0];
                    }
                    previous_OA_String = a_OA_String;
                    break;
                case StreamTokenizer.TT_WORD:
                    line = aStreamTokenizer.sval;
                    break;
                }
                tokenType = aStreamTokenizer.nextToken();
            }
        } else {
            System.out.println(resultsFile.toString() + " !exists");
        }
        Iterator<String> string_Iterator = resultsForLAD.keySet().iterator();
        while (string_Iterator.hasNext()) {
            String oa_Code = string_Iterator.next();
            a_SARCounts = resultsForLAD.get(oa_Code);
            //GeneticAlgorithm_ISARHP_ISARCEP.addToCountsCEP(null, a_ID_RecordID_HashMap, _Random)
            OutputDataHandler_OptimisationConstraints.writeISARHP_ISARCEP(a_SARCounts, oa_Code,
                    a_FileOutputStream);
        }
    }
    a_FileOutputStream.close();
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.Comparison.java

/**
 * Aim is to produce an aggregated data set for comparison totalling males
 * and females by MSOA to compare with CASUV003DataRecord
 *///ww w .ja  v a  2 s  .co  m
private void run3() throws IOException {
    boolean aggregateToMSOA = true;
    // boolean aggregateToMSOA = false;
    ToyModelDataHandler tToyModelDataHandler = new ToyModelDataHandler();
    String startOfFilename = "C:/Work/Projects/MoSeS/Workspace/Leeds/ToyModel_SWR_OA_HSARHP_ISARCEP_0_5_5000_3_30_12_20";
    // String startOfFilename = new String(
    // "C:/Work/Projects/MoSeS/Workspace/Leeds/ToyModel_SWR_OA_HSARHP_ISARCEP_0_5_1000_3_30_12_20"
    // );
    // String startOfFilename = new String(
    // "C:/Work/Projects/MoSeS/Workspace/Leeds/ToyModel_SWR_OA_ISARHP_ISARCEP_0_5_200_3_30_12_20"
    // );
    File tToyModelDataRecord2CSVFile = new File(startOfFilename + ".csv");
    File tToyModelDataRecordMaleFemaleComparisonFile;
    if (aggregateToMSOA) {
        tToyModelDataRecordMaleFemaleComparisonFile = new File(
                startOfFilename + "_MSOAMaleFemaleComparison.csv");
    } else {
        tToyModelDataRecordMaleFemaleComparisonFile = new File(startOfFilename + "_OAMaleFemaleComparison.csv");
    }
    if (!tToyModelDataRecordMaleFemaleComparisonFile.exists()) {
        tToyModelDataRecordMaleFemaleComparisonFile.createNewFile();
    }
    PrintWriter tToyModelDataRecordMaleFemaleComparisonFilePrintWriter = new PrintWriter(
            tToyModelDataRecordMaleFemaleComparisonFile);
    // CASUV003DataHandler tCASUV003DataHandler = new CASUV003DataHandler(
    // new File(
    // "C:/Work/Projects/MoSeS/Workspace/Leeds/CASUV003DataRecordsMSOA.dat"
    // ) );
    CASUV003DataHandler tCASUV003DataHandler;
    CAS001DataHandler tCAS001DataHandler;
    if (aggregateToMSOA) {
        tCASUV003DataHandler = new CASUV003DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/Leeds/CASUV003DataRecordsMSOA.dat"));
        tCAS001DataHandler = new CAS001DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/Leeds/CAS001DataRecordsMSOA.dat"));
    } else {
        tCASUV003DataHandler = new CASUV003DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/CASUV003DataRecords.dat"));
        tCAS001DataHandler = new CAS001DataHandler(
                new File("C:/Work/Projects/MoSeS/Workspace/CAS001DataRecords.dat"));
    }
    CASUV003DataRecord aCASUV003DataRecord;
    CAS001DataRecord aCAS001DataRecord;
    BufferedReader tBufferedReader = new BufferedReader(
            new InputStreamReader(new FileInputStream(tToyModelDataRecord2CSVFile)));
    StreamTokenizer tStreamTokenizer = new StreamTokenizer(tBufferedReader);
    Generic_StaticIO.setStreamTokenizerSyntax1(tStreamTokenizer);
    // Initialise
    int tMaleCount;
    int tFemaleCount;
    int tMaleCEPCount;
    int tMaleHPCount;
    int tFemaleCEPCount;
    int tFemaleHPCount;
    int tokenType = tStreamTokenizer.nextToken();
    ToyModelDataRecord_2 aToyModelDataRecord2;
    String aZoneCode;
    HashMap tLookUpMSOAfromOAHashMap = null;
    CASDataHandler tCASDataHandler = new CASDataHandler();
    if (aggregateToMSOA) {
        tLookUpMSOAfromOAHashMap = tCASDataHandler.get_LookUpMSOAfromOAHashMap();
    }
    Counts aCounts;
    tToyModelDataRecordMaleFemaleComparisonFilePrintWriter.println(
            "ZoneCode,CAS001HPFemales,CAS001CEPFemales,CAS001Females,CASUV003Females,ToyModelFemales,ToyModelHPFemales,ToyModelCEPFemales,CAS001HPMales,CAS001CEPMales,CAS001Males,CASUV003Males,ToyModelMales,ToyModelHPMales,ToyModelCEPMales");
    TreeMap result = new TreeMap();
    while (tokenType != StreamTokenizer.TT_EOF) {
        switch (tokenType) {
        case StreamTokenizer.TT_WORD:
            aToyModelDataRecord2 = new ToyModelDataRecord_2(tToyModelDataHandler, tStreamTokenizer.sval);
            if (aggregateToMSOA) {
                aZoneCode = (String) tLookUpMSOAfromOAHashMap
                        .get(new String(aToyModelDataRecord2.getZone_Code()));
            } else {
                aZoneCode = String.valueOf(aToyModelDataRecord2.getZone_Code());
            }
            if (aToyModelDataRecord2.SEX == 0) {
                tFemaleCount = 1;
                if (aToyModelDataRecord2.tHouseholdID != -9) {
                    tFemaleHPCount = 1;
                    tFemaleCEPCount = 0;
                } else {
                    tFemaleHPCount = 0;
                    tFemaleCEPCount = 1;
                }
                tMaleCount = 0;
                tMaleHPCount = 0;
                tMaleCEPCount = 0;
            } else {
                tMaleCount = 1;
                if (aToyModelDataRecord2.tHouseholdID != -9) {
                    tMaleHPCount = 1;
                    tMaleCEPCount = 0;
                } else {
                    tMaleHPCount = 0;
                    tMaleCEPCount = 1;
                }
                tFemaleCount = 0;
                tFemaleHPCount = 0;
                tFemaleCEPCount = 0;
            }
            if (result.containsKey(aZoneCode)) {
                aCounts = (Counts) result.get(aZoneCode);
                result.remove(aZoneCode);
                aCounts.addToCounts(tMaleCount, tMaleCEPCount, tMaleHPCount, tFemaleCount, tFemaleCEPCount,
                        tFemaleHPCount);
                result.put(aZoneCode, aCounts);
            } else {
                aCounts = new Counts();
                aCounts.addToCounts(tMaleCount, tMaleCEPCount, tMaleHPCount, tFemaleCount, tFemaleCEPCount,
                        tFemaleHPCount);
                result.put(aZoneCode, aCounts);
            }
        }
        tokenType = tStreamTokenizer.nextToken();
    }
    Iterator aIterator = result.keySet().iterator();
    Object key;
    while (aIterator.hasNext()) {
        key = aIterator.next();
        aCounts = (Counts) result.get(key);
        aZoneCode = (String) key;
        aCASUV003DataRecord = (CASUV003DataRecord) tCASUV003DataHandler.getDataRecord(aZoneCode);
        aCAS001DataRecord = (CAS001DataRecord) tCAS001DataHandler.getDataRecord(aZoneCode);
        tToyModelDataRecordMaleFemaleComparisonFilePrintWriter.println("" + aZoneCode + ", "
                + aCAS001DataRecord.getHouseholdResidentsFemales() + ", "
                + aCAS001DataRecord.getCommunalEstablishmentResidentsFemales() + ", "
                + (aCAS001DataRecord.getHouseholdResidentsFemales()
                        + aCAS001DataRecord.getCommunalEstablishmentResidentsFemales())
                + ", " + aCASUV003DataRecord.getFemales() + ", " + aCounts.tFemaleCount + ", "
                + aCounts.tFemaleHPCount + ", " + aCounts.tFemaleCEPCount + ", "
                + aCAS001DataRecord.getHouseholdResidentsMales() + ", "
                + aCAS001DataRecord.getCommunalEstablishmentResidentsMales() + ", "
                + (aCAS001DataRecord.getHouseholdResidentsMales()
                        + aCAS001DataRecord.getCommunalEstablishmentResidentsMales())
                + ", " + aCASUV003DataRecord.getMales() + ", " + aCounts.tMaleCount + ", "
                + aCounts.tMaleHPCount + ", " + aCounts.tMaleCEPCount);
    }
    tBufferedReader.close();
    tToyModelDataRecordMaleFemaleComparisonFilePrintWriter.close();
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport.java

protected static Object[] loadData(File _SARExpectedFile, File _CASObservedFile) throws IOException {
    Object[] result = new Object[3];
    BufferedReader _SARExpectedBufferedReader = new BufferedReader(
            new InputStreamReader(new FileInputStream(_SARExpectedFile)));
    StreamTokenizer _SARExpectedStreamTokenizer = new StreamTokenizer(_SARExpectedBufferedReader);
    Generic_StaticIO.setStreamTokenizerSyntax3(_SARExpectedStreamTokenizer);
    int _SARExpectedTokenType = _SARExpectedStreamTokenizer.nextToken();
    BufferedReader _CASObservedBufferedReader = new BufferedReader(
            new InputStreamReader(new FileInputStream(_CASObservedFile)));
    StreamTokenizer _CASObservedStreamTokenizer = new StreamTokenizer(_CASObservedBufferedReader);
    Generic_StaticIO.setStreamTokenizerSyntax3(_CASObservedStreamTokenizer);
    int _CASObservedTokenType = _CASObservedStreamTokenizer.nextToken();
    // Read Headers
    String a_SARExpectedLine = _SARExpectedStreamTokenizer.sval;
    String[] _SARExpectedVariables = a_SARExpectedLine.split(",");
    String a_CASObservedLine = _CASObservedStreamTokenizer.sval;
    String[] _CASObservedVariables = a_CASObservedLine.split(",");
    int _NumberNumericalVariables = 0;
    // Check variables names the same
    if (_SARExpectedVariables.length != _CASObservedVariables.length) {
        System.out.println("t_SARExpectedVariables.length != _CASObservedVariables.length");
    } else {//from w w w  .j  a va 2s  .  c  om
        _NumberNumericalVariables = _SARExpectedVariables.length - 1;
        for (int i = 0; i < _SARExpectedVariables.length; i++) {
            if (!_CASObservedVariables[i].equalsIgnoreCase(_SARExpectedVariables[i])) {
                System.out.print(_CASObservedVariables[i] + " != " + _SARExpectedVariables[i]);
            }
        }
    }
    result[0] = _SARExpectedVariables; // Variable Names
    // Read Data
    double[] a_SARExpectedRow = new double[_NumberNumericalVariables];
    ArrayList<double[]> _SARExpectedRows = new ArrayList<double[]>();
    double[] a_CASObservedRow = new double[_NumberNumericalVariables];
    ArrayList<double[]> _CASObservedRows = new ArrayList<double[]>();
    _SARExpectedTokenType = _SARExpectedStreamTokenizer.nextToken();
    _CASObservedTokenType = _CASObservedStreamTokenizer.nextToken();
    //ArrayList<String> _ZoneCodes = new ArrayList<String>();

    int _NumberOfAreas = 0;
    while (_SARExpectedTokenType != StreamTokenizer.TT_EOF && _CASObservedTokenType != StreamTokenizer.TT_EOF) {
        if (_SARExpectedTokenType != _CASObservedTokenType) {
            System.out.println("t_SARExpectedTokenType != _CASObservedTokenType");
        } else {
            switch (_SARExpectedTokenType) {
            case StreamTokenizer.TT_WORD:
                _NumberOfAreas++;
                a_SARExpectedRow = new double[_NumberNumericalVariables];
                a_SARExpectedLine = _SARExpectedStreamTokenizer.sval;
                _SARExpectedVariables = a_SARExpectedLine.split(",");
                a_CASObservedLine = _CASObservedStreamTokenizer.sval;
                a_CASObservedRow = new double[_NumberNumericalVariables];
                _CASObservedVariables = a_CASObservedLine.split(",");
                if (_SARExpectedVariables.length != _CASObservedVariables.length) {
                    System.out.println("t_SARExpectedVariables.length != _CASObservedVariables.length");
                }
                if (_NumberNumericalVariables != _SARExpectedVariables.length - 1) {
                    System.out.println("t_NumberNumericalVariables != _SARExpectedVariables.length - 1");
                }
                // if ( _CASObservedVariables[ 0 ].startsWith(
                // _SARExpectedVariables[ 0 ] ) ) {
                //_ZoneCodes.add(_CASObservedVariables[0]);
                for (int i = 0; i < _NumberNumericalVariables; i++) {
                    a_SARExpectedRow[i] = Double.valueOf(_SARExpectedVariables[i + 1]);
                    a_CASObservedRow[i] = Double.valueOf(_CASObservedVariables[i + 1]);
                    if (i == 1 && (a_SARExpectedRow[i] != a_CASObservedRow[i])) {
                        System.out.println("Warning ! constraint that allHouseholds observed ( "
                                + a_CASObservedRow[i] + ") = allHouseholds expected ( " + a_SARExpectedRow[i]
                                + " ) not met for " + _CASObservedVariables[0]);
                    }
                }
                _SARExpectedRows.add(a_SARExpectedRow);
                _CASObservedRows.add(a_CASObservedRow);
                // } else {
                // System.out.println( _CASObservedVariables[ 0 ] + " != " +
                // _SARExpectedVariables[ 0 ] );
                // }
            }
        }
        _SARExpectedTokenType = _SARExpectedStreamTokenizer.nextToken();
        _CASObservedTokenType = _CASObservedStreamTokenizer.nextToken();
    }
    if (_SARExpectedRows.size() != _CASObservedRows.size()) {
        System.out.println("t_SARExpectedRows.size() != _CASObservedRows.size()");
    }
    if (_NumberOfAreas != _SARExpectedRows.size()) {
        System.out.println("t_NumberOfAreas != _SARExpectedRows.size()");
    }
    // Format (Flip) data
    double[][] _SARExpectedData = new double[_NumberNumericalVariables][_NumberOfAreas];
    double[][] _CASObservedData = new double[_NumberNumericalVariables][_NumberOfAreas];
    for (int j = 0; j < _NumberOfAreas; j++) {
        a_SARExpectedRow = (double[]) _SARExpectedRows.get(j);
        a_CASObservedRow = (double[]) _CASObservedRows.get(j);
        for (int i = 0; i < _NumberNumericalVariables; i++) {
            _SARExpectedData[i][j] = a_SARExpectedRow[i];
            _CASObservedData[i][j] = a_CASObservedRow[i];
        }
    }
    result[1] = _SARExpectedData;
    result[2] = _CASObservedData;
    return result;
}

From source file:com.fluffypeople.managesieve.ManageSieveClient.java

private String tokenToString(final int c) {
    if (c > 0) {

        return new String(Character.toChars(c));
    } else {//from  w  w  w  . j a va2s.  c  o m
        switch (c) {
        case StreamTokenizer.TT_EOF:
            return "EOF";
        case StreamTokenizer.TT_NUMBER:
            return "NUMBER";
        case StreamTokenizer.TT_EOL:
            return "EOL";
        case StreamTokenizer.TT_WORD:
            return ("WORD [" + in.sval + "]");
        default:
            return "UNKNOWN";
        }
    }
}

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeAggregateStatisticsForOptimisationConstraints_HSARHP_ISARCEP(String a_OutputDir_String)
        throws Exception {
    HashMap a_HID_HSARDataRecordVector_HashMap = _HSARDataHandler.get_HID_HSARDataRecordVector_HashMap();
    HashMap a_ID_RecordID_HashMap = _ISARDataHandler.get_ID_RecordID_HashMap();
    File optimisationConstraints_SARs = new File(a_OutputDir_String, "OptimisationConstraints_SARs.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(optimisationConstraints_SARs);
    OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEPHeader(a_FileOutputStream);
    a_FileOutputStream.flush();/*from w  w w  . j  av a 2s .c o m*/
    HashMap<String, Integer> a_SARCounts = null;
    CASDataRecord a_CASDataRecord;
    TreeSet<String> a_LADCodes_TreeSet = _CASDataHandler.getLADCodes_TreeSet();
    String s2;
    String s1;
    Iterator<String> a_Iterator_String = a_LADCodes_TreeSet.iterator();
    while (a_Iterator_String.hasNext()) {
        // Need to reorder data for each LAD as OAs not necessarily returned
        // in any order and an ordered result is wanted
        TreeMap<String, HashMap<String, Integer>> resultsForLAD = new TreeMap<String, HashMap<String, Integer>>();
        boolean setPrevious_OA_String = true;
        s1 = a_Iterator_String.next();
        s2 = s1.substring(0, 3);
        File resultsFile = new File(a_OutputDir_String + s2 + "/" + s1 + "/population.csv");
        // A few results are missing
        if (resultsFile.exists()) {
            System.out.println(resultsFile.toString() + " exists");
            String previous_OA_String = "";
            BufferedReader aBufferedReader = new BufferedReader(
                    new InputStreamReader(new FileInputStream(resultsFile)));
            StreamTokenizer aStreamTokenizer = new StreamTokenizer(aBufferedReader);
            Generic_StaticIO.setStreamTokenizerSyntax1(aStreamTokenizer);
            String line = "";
            int tokenType = aStreamTokenizer.nextToken();
            while (tokenType != StreamTokenizer.TT_EOF) {
                switch (tokenType) {
                case StreamTokenizer.TT_EOL:
                    //System.out.println(line);
                    String[] lineFields = line.split(",");
                    String a_OA_String = lineFields[0];
                    if (previous_OA_String.equalsIgnoreCase(a_OA_String)) {
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            // From the id of a household get a Vector 
                            // of HSARDataRecords
                            Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap
                                    .get(new Integer(lineFields[2]));
                            HSARDataRecord a_HSARDataRecord;
                            for (int i = 0; i < household.size(); i++) {
                                a_HSARDataRecord = (HSARDataRecord) household.elementAt(i);
                                GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts,
                                        _Random);
                            }
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                        }
                    } else {
                        // Store result
                        if (setPrevious_OA_String) {
                            previous_OA_String = a_OA_String;
                            setPrevious_OA_String = false;
                        } else {
                            // Store
                            resultsForLAD.put(previous_OA_String, a_SARCounts);
                        }
                        // Initialise/Re-initialise
                        a_CASDataRecord = (CASDataRecord) _CASDataHandler.getDataRecord(a_OA_String);
                        Object[] fitnessCounts = GeneticAlgorithm_HSARHP_ISARCEP
                                .getFitnessCounts(a_CASDataRecord);
                        a_SARCounts = (HashMap<String, Integer>) fitnessCounts[1];
                        // Start a new aggregation
                        if (lineFields[1].equalsIgnoreCase("HP")) {
                            //System.out.println("HP");
                            // From the id of a household get a Vector
                            // of HSARDataRecords
                            Vector household = (Vector) a_HID_HSARDataRecordVector_HashMap
                                    .get(new Integer(lineFields[2]));
                            HSARDataRecord a_HSARDataRecord;
                            for (int i = 0; i < household.size(); i++) {
                                a_HSARDataRecord = (HSARDataRecord) household.elementAt(i);
                                GeneticAlgorithm_HSARHP_ISARCEP.addToCounts(a_HSARDataRecord, a_SARCounts,
                                        _Random);
                            }
                            //System.out.println(a_HSARDataRecord.toString());
                        } else {
                            //System.out.println("CEP");
                            // From the id of the ISARDataRecord get the
                            // ISARRecordID.
                            long a_ISARRecordID = (Long) a_ID_RecordID_HashMap.get(new Long(lineFields[2]));
                            ISARDataRecord a_ISARDataRecord = _ISARDataHandler
                                    .getISARDataRecord(a_ISARRecordID);
                            GeneticAlgorithm_HSARHP_ISARCEP.addToCountsCEP(a_ISARDataRecord, a_SARCounts,
                                    _Random);
                            //System.out.println(a_ISARDataRecord.toString());
                        }
                        //a_OA_String = lineFields[0];
                    }
                    previous_OA_String = a_OA_String;
                    break;
                case StreamTokenizer.TT_WORD:
                    line = aStreamTokenizer.sval;
                    break;
                }
                tokenType = aStreamTokenizer.nextToken();
            }
        } else {
            System.out.println(resultsFile.toString() + " !exists");
        }
        Iterator<String> string_Iterator = resultsForLAD.keySet().iterator();
        while (string_Iterator.hasNext()) {
            String oa_Code = string_Iterator.next();
            OutputDataHandler_OptimisationConstraints.writeHSARHP_ISARCEP(resultsForLAD.get(oa_Code), oa_Code,
                    a_FileOutputStream);
        }
    }
    a_FileOutputStream.close();
}

From source file:Matrix.java

/**
 * Read a matrix from a stream. The format is the same the print method, so
 * printed matrices can be read back in (provided they were printed using US
 * Locale). Elements are separated by whitespace, all the elements for each
 * row appear on a single line, the last row is followed by a blank line.
 * //from w  w w .  j  ava2s. c o m
 * @param input
 *            the input stream.
 */

public static Matrix read(BufferedReader input) throws java.io.IOException {
    StreamTokenizer tokenizer = new StreamTokenizer(input);

    // Although StreamTokenizer will parse numbers, it doesn't recognize
    // scientific notation (E or D); however, Double.valueOf does.
    // The strategy here is to disable StreamTokenizer's number parsing.
    // We'll only get whitespace delimited words, EOL's and EOF's.
    // These words should all be numbers, for Double.valueOf to parse.

    tokenizer.resetSyntax();
    tokenizer.wordChars(0, 255);
    tokenizer.whitespaceChars(0, ' ');
    tokenizer.eolIsSignificant(true);
    java.util.Vector v = new java.util.Vector();

    // Ignore initial empty lines
    while (tokenizer.nextToken() == StreamTokenizer.TT_EOL)
        ;
    if (tokenizer.ttype == StreamTokenizer.TT_EOF)
        throw new java.io.IOException("Unexpected EOF on matrix read.");
    do {
        v.addElement(Double.valueOf(tokenizer.sval)); // Read & store 1st
        // row.
    } while (tokenizer.nextToken() == StreamTokenizer.TT_WORD);

    int n = v.size(); // Now we've got the number of columns!
    double row[] = new double[n];
    for (int j = 0; j < n; j++)
        // extract the elements of the 1st row.
        row[j] = ((Double) v.elementAt(j)).doubleValue();
    v.removeAllElements();
    v.addElement(row); // Start storing rows instead of columns.
    while (tokenizer.nextToken() == StreamTokenizer.TT_WORD) {
        // While non-empty lines
        v.addElement(row = new double[n]);
        int j = 0;
        do {
            if (j >= n)
                throw new java.io.IOException("Row " + v.size() + " is too long.");
            row[j++] = Double.valueOf(tokenizer.sval).doubleValue();
        } while (tokenizer.nextToken() == StreamTokenizer.TT_WORD);
        if (j < n)
            throw new java.io.IOException("Row " + v.size() + " is too short.");
    }
    int m = v.size(); // Now we've got the number of rows.
    double[][] A = new double[m][];
    v.copyInto(A); // copy the rows out of the vector
    return new Matrix(A);
}

From source file:com.zimbra.common.calendar.ZoneInfo2iCalendar.java

private static void readExtraData(Reader reader) throws IOException, ParseException {
    char dquote = '"';
    StreamTokenizer tokenizer = new StreamTokenizer(reader);
    tokenizer.resetSyntax();/* w  w w. j av  a2  s  .  c  o  m*/
    tokenizer.wordChars(32, 126);
    tokenizer.whitespaceChars(' ', ' ');
    tokenizer.whitespaceChars('\t', '\t');
    tokenizer.whitespaceChars(0, 20);
    tokenizer.commentChar('#');
    tokenizer.quoteChar(dquote);
    tokenizer.eolIsSignificant(true);

    List<String> tokenList = new ArrayList<String>();
    LineType lineType = LineType.UNKNOWN;
    boolean atLineStart = true;

    int ttype;
    int prevTtype = StreamTokenizer.TT_EOL; // used for empty line detection
    while ((ttype = tokenizer.nextToken()) != StreamTokenizer.TT_EOF) {
        int lineNum = tokenizer.lineno();
        if (ttype == StreamTokenizer.TT_WORD || ttype == dquote) {
            String token = tokenizer.sval;
            if (atLineStart) {
                lineType = LineType.lookUp(token);
                if (LineType.UNKNOWN.equals(lineType))
                    throw new ParseException("Invalid line type", lineNum);
            } else {
                tokenList.add(token);
            }
            atLineStart = false;
        } else if (ttype == StreamTokenizer.TT_EOL) {
            if (prevTtype == StreamTokenizer.TT_EOL) {
                prevTtype = ttype;
                continue;
            }
            atLineStart = true;
            switch (lineType) {
            case PRIMARYZONE:
                if (tokenList.size() < 1)
                    throw new ParseException("Not enough fields in a PrimaryZone line", lineNum);
                String primaryTZID = tokenList.get(0);
                sPrimaryTZIDs.add(primaryTZID);
                break;
            case ZONEMATCHSCORE:
                if (tokenList.size() < 2)
                    throw new ParseException("Not enough fields in a ZoneMatchScore line", lineNum);
                String zoneName = tokenList.get(0);
                String zoneMatchScoreStr = tokenList.get(1);
                int zoneMatchScore = 0;
                try {
                    zoneMatchScore = Integer.parseInt(zoneMatchScoreStr);
                } catch (NumberFormatException e) {
                    throw new ParseException("Zone match score must be an integer: " + zoneMatchScoreStr,
                            lineNum);
                }
                sMatchScores.put(zoneName, zoneMatchScore);
                break;
            }
            if (atLineStart) {
                tokenList.clear();
                lineType = LineType.UNKNOWN;
            }
        } else if (ttype == StreamTokenizer.TT_NUMBER) {
            // shouldn't happen
            throw new ParseException("Invalid parser state: TT_NUMBER found", lineNum);
        }
        prevTtype = ttype;
    }
}

From source file:com.rapidminer.tools.Tools.java

/** Delivers the next token and checks if its the end of line. */
public static void getLastToken(StreamTokenizer tokenizer, boolean endOfFileOk) throws IOException {
    if (tokenizer.nextToken() != StreamTokenizer.TT_EOL
            && (tokenizer.ttype != StreamTokenizer.TT_EOF || !endOfFileOk)) {
        throw new IOException("expected the end of the line " + tokenizer.lineno());
    }/*from  w w w .j  ava  2  s  .  c  om*/
}