Example usage for java.io FileReader close

List of usage examples for java.io FileReader close

Introduction

In this page you can find the example usage for java.io FileReader close.

Prototype

public void close() throws IOException 

Source Link

Usage

From source file:skoa.helpers.Graficos.java

private void compararFicheros(String freferencia, String fcomparar) {
    System.out.println("min=" + rangoMinutos + ".");
    File fr = new File(ruta + freferencia); //El primer fichero es el de referencia, y del que se sacan los incrementos.
    File fc = new File(ruta + fcomparar);
    FileReader fr1 = null, fr2 = null;
    BufferedReader linea1 = null, linea2 = null;
    String L1 = null, L2 = null;/*from   w  w w .j a  v a2s. co m*/
    SimpleDateFormat formatoDelTexto = new SimpleDateFormat("yyyy-MM-dd HH:mm");
    //Vamos a abrir los dos ficheros a la vez, leyendo lnea por lnea y comparando.
    try {
        File temp = new File(ruta + "temp.txt");
        if (!temp.exists())
            temp.createNewFile(); //Crea el fichero
        //LEEMOS EL PRIMER FICHERO
        fr1 = new FileReader(fr);
        linea1 = new BufferedReader(fr1);
        Vector<String> fechas = new Vector<String>(); //vector donde irn todas las fechas
        Date d1 = null, dini = null, dfin = null;
        boolean primeravez = true, nuevo = true;
        float media1 = 0;
        int nelem1 = 0, segd1 = 0;
        String fecha = "", aux = "", lant = "";
        while ((L1 = linea1.readLine()) != null) {
            aux = L1.substring(0, L1.indexOf("\t")); //Fecha de la lnea.
            d1 = formatoDelTexto.parse(aux);
            String Ssegd1 = aux.substring(aux.indexOf(" ") + 7, aux.indexOf(" ") + 9);
            segd1 = Integer.parseInt(Ssegd1);
            d1.setSeconds(segd1);
            if (nuevo) { //Si terminamos el intervalo, pasamos al siguiente.
                Calendar c = Calendar.getInstance();
                if (!primeravez) {//Si no es la primera iteracion, se guarda antes de inicializar.
                    //System.out.println(media1+"  "+nelem1);
                    media1 = media1 / nelem1;
                    BufferedWriter bw = new BufferedWriter(new FileWriter(ruta + "temp.txt", true)); //Para escribir.
                    String x1 = "" + media1;
                    if (!x1.contentEquals("NaN"))
                        bw.write("" + fecha + "\t" + media1 + " " + unidad + "\n");
                    bw.close();
                    media1 = nelem1 = 0;
                    String v = lant.substring(lant.indexOf("\t") + 1);
                    v = v.substring(0, v.indexOf(" "));
                    media1 = Float.parseFloat(v);//Se inicializan con los valores de la anterior linea.
                    nelem1 = 1;
                } else {
                    String u = L1.substring(L1.indexOf("\t") + 1);
                    unidad = u.substring(u.indexOf(" ") + 1);
                    lant = L1;
                    //Si es la 1 vez, tambin se inicializa la fecha inicial.
                }
                primeravez = false;
                fecha = lant.substring(0, lant.indexOf("\t"));
                fechas.add(fecha);
                //Inserta la fecha en el vector de fechas, para luego usarlo en el 2 fichero.
                Date diniaux = formatoDelTexto.parse(fecha);
                String Ssegd2 = fecha.substring(fecha.indexOf(" ") + 7, fecha.indexOf(" ") + 9);
                int segd2 = Integer.parseInt(Ssegd2);
                c.setTime(diniaux);
                dini = c.getTime();
                dini.setSeconds(segd2);
                //System.out.println("Ini="+dini);
                c.add(Calendar.MINUTE, Integer.parseInt(rangoMinutos));
                dfin = c.getTime();
                dfin.setSeconds(segd2);
                //System.out.println("Fin="+dfin);
                nuevo = false;//Esta variable se usa para definir otra vez un nuevo intervalo
            }
            if (d1.compareTo(dini) == 0) { //Fechas Iguales
                aux = L1.substring(L1.indexOf("\t") + 1);
                aux = aux.substring(0, aux.indexOf(" "));
                media1 = media1 + Float.parseFloat(aux);
                nelem1++;
            } else if (d1.compareTo(dini) > 0 && d1.compareTo(dfin) < 0) { //Est dentro del intervalo
                aux = L1.substring(L1.indexOf("\t") + 1);
                aux = aux.substring(0, aux.indexOf(" "));
                media1 = media1 + Float.parseFloat(aux);
                nelem1++;
            } else {//Si la fecha es menor que la fecha inicial o mayor que la final, se cambia de intervalo.
                nuevo = true;
            }
            lant = L1;
        }
        //guardo lo ultimo y penultimo si lo hay
        media1 = media1 / nelem1;
        BufferedWriter bw = new BufferedWriter(new FileWriter(ruta + "temp.txt", true)); //Para escribir.
        String x1 = "" + media1;
        String auxi = dini.toGMTString(); //d mon yyyy hh:mm:ss
        String d = auxi.substring(0, auxi.indexOf(" "));
        if (Integer.parseInt(d) < 10)
            d = "0" + d;
        auxi = auxi.substring(auxi.indexOf(" ") + 1);
        String m = auxi.substring(0, auxi.indexOf(" "));
        if (m.contentEquals("Jan"))
            m = "01";
        if (m.contentEquals("Feb"))
            m = "02";
        if (m.contentEquals("Mar"))
            m = "03";
        if (m.contentEquals("Apr"))
            m = "04";
        if (m.contentEquals("May"))
            m = "05";
        if (m.contentEquals("Jun"))
            m = "06";
        if (m.contentEquals("Jul"))
            m = "07";
        if (m.contentEquals("Aug"))
            m = "08";
        if (m.contentEquals("Sep"))
            m = "09";
        if (m.contentEquals("Oct"))
            m = "10";
        if (m.contentEquals("Nov"))
            m = "11";
        if (m.contentEquals("Dec"))
            m = "12";
        auxi = auxi.substring(auxi.indexOf(" ") + 1);
        String y = auxi.substring(0, auxi.indexOf(" "));
        auxi = auxi.substring(auxi.indexOf(" ") + 1);
        String h = auxi.substring(0, auxi.indexOf(" "));
        //System.out.println(y+"-"+m+"-"+d+" "+h);
        if (!x1.contentEquals("NaN"))
            bw.write("" + y + "-" + m + "-" + d + " " + h + "\t" + media1 + " " + unidad + "\n");
        bw.close();
        fechas.add(y + "-" + m + "-" + d + " " + h);
        fecha = lant.substring(0, lant.indexOf("\t"));
        if (!fecha.isEmpty()) {
            String auxr = lant.substring(lant.indexOf("\t") + 1);
            auxr = auxr.substring(0, auxr.indexOf(" "));
            media1 = Float.parseFloat(auxr);
            bw = new BufferedWriter(new FileWriter(ruta + "temp.txt", true)); //Para escribir.
            x1 = "" + media1;
            if (!x1.contentEquals("NaN"))
                bw.write("" + fecha + "\t" + media1 + " " + unidad + "\n");
            bw.close();
            fechas.add(fecha);
        }
        fr1.close();
        //for (int i=0;i<fechas.size();i++) System.out.println("*"+fechas.elementAt(i));

        //Leido el primer fichero, leo el segundo.
        File temp2 = new File(ruta + "temp2.txt");
        if (!temp2.exists())
            temp2.createNewFile(); //Crea el fichero
        fr2 = new FileReader(fc);
        linea2 = new BufferedReader(fr2);
        int pos = 0;
        String fechaf = "";
        media1 = nelem1 = 0;
        nuevo = true;
        primeravez = true;
        while ((L2 = linea2.readLine()) != null) {
            aux = L2.substring(0, L2.indexOf("\t")); //Fecha de la lnea.
            d1 = formatoDelTexto.parse(aux);
            String Ssegd1 = aux.substring(aux.indexOf(" ") + 7, aux.indexOf(" ") + 9);
            segd1 = Integer.parseInt(Ssegd1);
            d1.setSeconds(segd1);
            if (nuevo) { //Si terminamos el intervalo, pasamos al siguiente.            
                Calendar c = Calendar.getInstance();
                if (!primeravez) {//Si no es la primera iteracion, se guarda antes de inicializar.
                    media1 = media1 / nelem1;
                    BufferedWriter bw1 = new BufferedWriter(new FileWriter(ruta + "temp2.txt", true)); //Para escribir.
                    x1 = "" + media1;
                    if (!x1.contentEquals("NaN"))
                        bw1.write("" + /*fechas.elementAt(pos)+"\t"+*/media1 + " " + unidad + "\n");
                    bw1.close();
                    pos++;
                    //media1=nelem1=0;
                    String v = lant.substring(lant.indexOf("\t") + 1);
                    v = v.substring(0, v.indexOf(" "));
                    media1 = Float.parseFloat(v);//Se inicializan con los valores de la anterior linea.
                    nelem1 = 1;
                } else {
                    String u = L2.substring(L2.indexOf("\t") + 1);
                    unidad = u.substring(u.indexOf(" ") + 1);
                    lant = L1;
                    pos = 0;
                    //Si es la 1 vez, tambin se inicializa la fecha inicial.
                }
                //System.out.println(fechas.elementAt(pos));
                primeravez = false;
                fecha = fechas.elementAt(pos);
                Date diniaux = formatoDelTexto.parse(fecha);
                String Ssegd2 = fecha.substring(fecha.indexOf(" ") + 7, fecha.indexOf(" ") + 9);
                int segd2 = Integer.parseInt(Ssegd2);
                c.setTime(diniaux);
                dini = c.getTime(); //FECHA INICIAL.                    
                dini.setSeconds(segd2);

                if (pos + 1 >= fechas.size())
                    break;
                fechaf = fechas.elementAt(pos + 1);
                Date dfinaux = formatoDelTexto.parse(fechaf);
                Ssegd2 = fecha.substring(fechaf.indexOf(" ") + 7, fechaf.indexOf(" ") + 9);
                segd2 = Integer.parseInt(Ssegd2);
                c.setTime(dfinaux);
                dfin = c.getTime(); //FECHA FINAL
                dfin.setSeconds(segd2);
                //System.out.println("INI="+dini);
                //System.out.println("FIN="+dfin);
                nuevo = false;//Esta variable se usa para definir otra vez un nuevo intervalo
            }
            if (d1.compareTo(dini) == 0) { //Fechas Iguales
                aux = L2.substring(L2.indexOf("\t") + 1);
                aux = aux.substring(0, aux.indexOf(" "));
                media1 = media1 + Float.parseFloat(aux);
                nelem1++;
            } else if (d1.compareTo(dini) > 0 && d1.compareTo(dfin) < 0) { //Est dentro del intervalo
                aux = L2.substring(L2.indexOf("\t") + 1);
                aux = aux.substring(0, aux.indexOf(" "));
                media1 = media1 + Float.parseFloat(aux);
                nelem1++;
            } else {//Si la fecha es menor que la fecha inicial o mayor que la final, se cambia de intervalo.
                nuevo = true;
            }
            lant = L2;
        }
        //guardo lo ultimo si lo hay
        fecha = lant.substring(0, lant.indexOf("\t"));
        if (!fecha.isEmpty()) {
            String auxr = lant.substring(lant.indexOf("\t") + 1);
            auxr = auxr.substring(0, auxr.indexOf(" "));
            media1 = Float.parseFloat(auxr);
            BufferedWriter bw2 = new BufferedWriter(new FileWriter(ruta + "temp2.txt", true)); //Para escribir.
            x1 = "" + media1;
            if (!x1.contentEquals("NaN"))
                bw2.write("" + /*fechas.elementAt(pos+1)+"\t"+*/media1 + " " + unidad + "\n");
            bw2.close();
        }
        fr2.close();
        //CREAMOS EL UNIFICADO
        File unificado = new File(ruta + "unificado.txt");
        if (!unificado.exists())
            unificado.createNewFile(); //Crea el fichero
        fr1 = new FileReader(temp);
        linea1 = new BufferedReader(fr1);
        fr2 = new FileReader(temp2);
        linea2 = new BufferedReader(fr2);
        L1 = L2 = "";
        BufferedWriter bwf = new BufferedWriter(new FileWriter(ruta + "unificado.txt", true)); //Para escribir.
        while ((L1 = linea1.readLine()) != null && (L2 = linea2.readLine()) != null) {
            bwf.write(L1 + "\t" + L2 + "\n");
        }
        bwf.close();
        fechas.removeAllElements();
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        try {
            if (null != fr1)
                fr1.close(); //cierra el 1 fichero.
            if (null != fr2)
                fr2.close(); //cierra el 2 fichero.
        } catch (Exception e2) { //Sino salta una excepcion.
            e2.printStackTrace();
        }
    }
    File temp1 = new File(ruta + "temp.txt");
    File temp2 = new File(ruta + "temp2.txt");
    temp1.delete();
    temp2.delete();
}

From source file:GestoSAT.GestoSAT.java

public boolean actualizarConfiguracion(Vector<String> mySQL, Vector<String> confSeg, int iva, String logo)
        throws Exception {
    FileReader file;
    try {/* www .  j  a v a2  s .  c o  m*/
        this.iva = Math.abs(iva);

        BufferedImage image = null;
        byte[] imageByte;

        BASE64Decoder decoder = new BASE64Decoder();
        imageByte = decoder.decodeBuffer(logo.split(",")[1]);
        ByteArrayInputStream bis = new ByteArrayInputStream(imageByte);
        image = ImageIO.read(bis);
        bis.close();

        // write the image to a file
        File outputfile = new File("logo");
        String formato = logo.split("/")[1].split(";")[0];
        ImageIO.write(image, formato, outputfile);

        // MySQL
        if (mySQL.elementAt(0).equals(this.mySQL.elementAt(0))) {
            if (!mySQL.elementAt(1).equals(this.mySQL.elementAt(1))
                    && !mySQL.elementAt(2).equals(this.mySQL.elementAt(2))
                    && (!mySQL.elementAt(3).equals(this.mySQL.elementAt(3))
                            || !mySQL.elementAt(0).equals(""))) {
                Class.forName("com.mysql.jdbc.Driver");
                this.con.close();
                this.con = DriverManager.getConnection("jdbc:mysql://" + mySQL.elementAt(0) + ":"
                        + Math.abs(Integer.parseInt(mySQL.elementAt(1))) + "/gestosat?user="
                        + mySQL.elementAt(2) + "&password=" + mySQL.elementAt(3));

                this.mySQL.set(0, mySQL.elementAt(0));
                this.mySQL.set(1, Math.abs(Integer.parseInt(mySQL.elementAt(1))) + "");
                this.mySQL.set(2, mySQL.elementAt(2));
                this.mySQL.set(3, mySQL.elementAt(3));
            }
        } else {
            // Comprobar que pass != ""
            Process pGet = Runtime.getRuntime()
                    .exec("mysqldump -u " + this.mySQL.elementAt(2) + " -p" + this.mySQL.elementAt(3) + " -h "
                            + this.mySQL.elementAt(0) + " -P " + this.mySQL.elementAt(1) + " gestosat");

            InputStream is = pGet.getInputStream();
            FileOutputStream fos = new FileOutputStream("backupGestoSAT.sql");
            byte[] bufferOut = new byte[1000];

            int leido = is.read(bufferOut);
            while (leido > 0) {
                fos.write(bufferOut, 0, leido);
                leido = is.read(bufferOut);
            }
            fos.close();

            Class.forName("com.mysql.jdbc.Driver");
            this.con.close();
            this.con = DriverManager.getConnection(
                    "jdbc:mysql://" + mySQL.elementAt(0) + ":" + Math.abs(Integer.parseInt(mySQL.elementAt(1)))
                            + "/gestosat?user=" + mySQL.elementAt(2) + "&password=" + mySQL.elementAt(3));

            this.mySQL.set(0, mySQL.elementAt(0));
            this.mySQL.set(1, Math.abs(Integer.parseInt(mySQL.elementAt(1))) + "");
            this.mySQL.set(2, mySQL.elementAt(2));
            this.mySQL.set(3, mySQL.elementAt(3));

            Process pPut = Runtime.getRuntime()
                    .exec("mysql -u " + mySQL.elementAt(2) + " -p" + mySQL.elementAt(3) + " -h "
                            + mySQL.elementAt(0) + " -P " + Math.abs(Integer.parseInt(mySQL.elementAt(1)))
                            + " gestosat");

            OutputStream os = pPut.getOutputStream();
            FileInputStream fis = new FileInputStream("backupGestoSAT.sql");
            byte[] bufferIn = new byte[1000];

            int escrito = fis.read(bufferIn);
            while (escrito > 0) {
                os.write(bufferIn, 0, leido);
                escrito = fis.read(bufferIn);
            }

            os.flush();
            os.close();
            fis.close();
        }

        // FTP

        FTPClient cliente = new FTPClient();
        if (!confSeg.elementAt(3).equals("")) {
            cliente.connect(confSeg.elementAt(0), Integer.parseInt(confSeg.elementAt(1)));

            if (cliente.login(confSeg.elementAt(2), confSeg.elementAt(3))) {
                cliente.setFileType(FTP.BINARY_FILE_TYPE);
                BufferedInputStream buffIn = new BufferedInputStream(new FileInputStream("backupGestoSAT.sql"));
                cliente.enterLocalPassiveMode();
                cliente.storeFile("backupGestoSAT.sql", buffIn);
                buffIn.close();
                cliente.logout();
                cliente.disconnect();

                this.confSeg = confSeg;
            } else
                return false;
        }

        File archConf = new File("confGestoSAT");
        BufferedWriter bw = new BufferedWriter(new FileWriter(archConf));
        bw.write(this.mySQL.elementAt(0) + ";" + Math.abs(Integer.parseInt(this.mySQL.elementAt(1))) + ";"
                + this.mySQL.elementAt(2) + ";" + this.mySQL.elementAt(3) + ";" + this.confSeg.elementAt(0)
                + ";" + Math.abs(Integer.parseInt(this.confSeg.elementAt(1))) + ";" + this.confSeg.elementAt(2)
                + ";" + this.confSeg.elementAt(3) + ";" + Math.abs(iva));
        bw.close();

        return true;
    } catch (Exception ex) {
        file = new FileReader("confGestoSAT");
        BufferedReader b = new BufferedReader(file);
        String cadena;
        cadena = b.readLine();
        String[] valores = cadena.split(";");

        this.mySQL.add(valores[0]);
        this.mySQL.add(Math.abs(Integer.parseInt(valores[1])) + "");
        this.mySQL.add(valores[2]);
        this.mySQL.add(valores[3]);
        con.close();
        Class.forName("com.mysql.jdbc.Driver");
        con = DriverManager
                .getConnection("jdbc:mysql://" + this.mySQL.elementAt(0) + ":" + this.mySQL.elementAt(1)
                        + "/gestosat?user=" + this.mySQL.elementAt(2) + "&password=" + this.mySQL.elementAt(3));

        this.confSeg.add(valores[4]);
        this.confSeg.add(Math.abs(Integer.parseInt(valores[5])) + "");
        this.confSeg.add(valores[6]);
        this.confSeg.add(valores[7]);

        file.close();
        Logger.getLogger(GestoSAT.class.getName()).log(Level.SEVERE, null, ex);
        return false;
    }
}

From source file:org.kuali.kfs.module.ld.batch.service.impl.LaborScrubberProcess.java

/**
 * This will process a group of origin entries. The COBOL code was refactored a lot to get this so there isn't a 1 to 1 section
 * of Cobol relating to this./*from ww w  .  java2s.c  o m*/
 *
 * @param originEntryGroup Group to process
 */
protected void processGroup() {
    LaborOriginEntry lastEntry = null;
    scrubCostShareAmount = KualiDecimal.ZERO;
    unitOfWork = new UnitOfWorkInfo();
    FileReader INPUT_GLE_FILE = null;
    String GLEN_RECORD;
    BufferedReader INPUT_GLE_FILE_br;
    PrintStream OUTPUT_GLE_FILE_ps;
    PrintStream OUTPUT_ERR_FILE_ps;
    PrintStream OUTPUT_EXP_FILE_ps;
    try {
        INPUT_GLE_FILE = new FileReader(inputFile);
    } catch (FileNotFoundException e) {
        throw new RuntimeException("Unable to find input file: " + inputFile, e);
    }
    try {
        OUTPUT_GLE_FILE_ps = new PrintStream(validFile);
        OUTPUT_ERR_FILE_ps = new PrintStream(errorFile);
        OUTPUT_EXP_FILE_ps = new PrintStream(expiredFile);
    } catch (IOException e) {
        throw new RuntimeException("Problem opening output files", e);
    }

    INPUT_GLE_FILE_br = new BufferedReader(INPUT_GLE_FILE);
    LOG.debug("Starting Scrubber Process process group...");

    int lineNumber = 0;
    int loadedCount = 0;
    boolean errorsLoading = false;

    LedgerSummaryReport laborLedgerSummaryReport = new LedgerSummaryReport();
    LaborOriginEntry unscrubbedEntry = new LaborOriginEntry();
    List<Message> tmperrors = new ArrayList<Message>();
    try {
        String currentLine = INPUT_GLE_FILE_br.readLine();

        while (currentLine != null) {
            boolean saveErrorTransaction = false;
            boolean saveValidTransaction = false;
            LaborOriginEntry scrubbedEntry = new LaborOriginEntry();
            try {
                lineNumber++;

                if (!StringUtils.isEmpty(currentLine) && !StringUtils.isBlank(currentLine.trim())) {
                    unscrubbedEntry = new LaborOriginEntry();
                    tmperrors = unscrubbedEntry.setFromTextFileForBatch(currentLine, lineNumber);
                    loadedCount++;

                    // just test entry with the entry loaded above
                    scrubberReport.incrementUnscrubbedRecordsRead();
                    List<Message> transactionErrors = new ArrayList<Message>();

                    // This is done so if the code modifies this row, then saves it, it will be an insert,
                    // and it won't touch the original. The Scrubber never modifies input rows/groups.
                    unscrubbedEntry.setGroup(null);
                    unscrubbedEntry.setVersionNumber(null);
                    unscrubbedEntry.setEntryId(null);
                    saveErrorTransaction = false;
                    saveValidTransaction = false;

                    // Build a scrubbed entry
                    // Labor has more fields
                    buildScrubbedEntry(unscrubbedEntry, scrubbedEntry);

                    // For Labor Scrubber
                    boolean laborIndicator = true;
                    laborLedgerSummaryReport.summarizeEntry(unscrubbedEntry);

                    try {
                        tmperrors.addAll(scrubberValidator.validateTransaction(unscrubbedEntry, scrubbedEntry,
                                universityRunDate, laborIndicator, laborAccountingCycleCachingService));
                    } catch (Exception e) {
                        transactionErrors.add(
                                new Message(e.toString() + " occurred for this record.", Message.TYPE_FATAL));
                        saveValidTransaction = false;
                    }
                    transactionErrors.addAll(tmperrors);

                    // Expired account?
                    Account unscrubbedEntryAccount = laborAccountingCycleCachingService.getAccount(
                            unscrubbedEntry.getChartOfAccountsCode(), unscrubbedEntry.getAccountNumber());
                    if (ObjectUtils.isNotNull(unscrubbedEntry.getAccount())
                            && (scrubberValidator.isAccountExpired(unscrubbedEntryAccount, universityRunDate)
                                    || unscrubbedEntryAccount.isClosed())) {
                        // Make a copy of it so OJB doesn't just update the row in the original
                        // group. It needs to make a new one in the expired group
                        LaborOriginEntry expiredEntry = new LaborOriginEntry(scrubbedEntry);

                        createOutputEntry(expiredEntry, OUTPUT_EXP_FILE_ps);
                        scrubberReport.incrementExpiredAccountFound();
                    }

                    if (!isFatal(transactionErrors)) {
                        saveValidTransaction = true;

                        // See if unit of work has changed
                        if (!unitOfWork.isSameUnitOfWork(scrubbedEntry)) {
                            // Generate offset for last unit of work
                            unitOfWork = new UnitOfWorkInfo(scrubbedEntry);
                        }
                        KualiDecimal transactionAmount = scrubbedEntry.getTransactionLedgerEntryAmount();
                        ParameterEvaluator offsetFiscalPeriods = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.OFFSET_FISCAL_PERIOD_CODES,
                                        scrubbedEntry.getUniversityFiscalPeriodCode());
                        BalanceType scrubbedEntryBalanceType = laborAccountingCycleCachingService
                                .getBalanceType(scrubbedEntry.getFinancialBalanceTypeCode());
                        if (scrubbedEntryBalanceType.isFinancialOffsetGenerationIndicator()
                                && offsetFiscalPeriods.evaluationSucceeds()) {
                            if (scrubbedEntry.isDebit()) {
                                unitOfWork.offsetAmount = unitOfWork.offsetAmount.add(transactionAmount);
                            } else {
                                unitOfWork.offsetAmount = unitOfWork.offsetAmount.subtract(transactionAmount);
                            }
                        }

                        // The sub account type code will only exist if there is a valid sub account
                        // TODO: GLConstants.getSpaceSubAccountTypeCode();
                        String subAccountTypeCode = "  ";

                        A21SubAccount scrubbedEntryA21SubAccount = laborAccountingCycleCachingService
                                .getA21SubAccount(scrubbedEntry.getChartOfAccountsCode(),
                                        scrubbedEntry.getAccountNumber(), scrubbedEntry.getSubAccountNumber());
                        if (ObjectUtils.isNotNull(scrubbedEntryA21SubAccount)) {
                            subAccountTypeCode = scrubbedEntryA21SubAccount.getSubAccountTypeCode();
                        }

                        if (transactionErrors.size() > 0) {
                            this.laborMainReportWriterService.writeError(unscrubbedEntry, transactionErrors);
                        }

                        lastEntry = scrubbedEntry;
                    } else {
                        // Error transaction
                        saveErrorTransaction = true;
                        this.laborMainReportWriterService.writeError(unscrubbedEntry, transactionErrors);
                    }

                    if (saveValidTransaction) {
                        scrubbedEntry.setTransactionScrubberOffsetGenerationIndicator(false);
                        createOutputEntry(scrubbedEntry, OUTPUT_GLE_FILE_ps);
                        scrubberReport.incrementScrubbedRecordWritten();
                    }

                    if (saveErrorTransaction) {
                        // Make a copy of it so OJB doesn't just update the row in the original
                        // group. It needs to make a new one in the error group
                        LaborOriginEntry errorEntry = new LaborOriginEntry(unscrubbedEntry);
                        errorEntry.setTransactionScrubberOffsetGenerationIndicator(false);
                        createOutputEntry(currentLine, OUTPUT_ERR_FILE_ps);
                        scrubberReport.incrementErrorRecordWritten();
                    }
                }
                currentLine = INPUT_GLE_FILE_br.readLine();

            } catch (IOException ioe) {
                // catch here again, it should be from postSingleEntryIntoLaborLedger
                LOG.error("processGroup() stopped due to: " + ioe.getMessage() + " on line number : "
                        + loadedCount, ioe);
                throw new RuntimeException("processGroup() stopped due to: " + ioe.getMessage()
                        + " on line number : " + loadedCount, ioe);
            }
        }
        INPUT_GLE_FILE_br.close();
        INPUT_GLE_FILE.close();
        OUTPUT_GLE_FILE_ps.close();
        OUTPUT_ERR_FILE_ps.close();
        OUTPUT_EXP_FILE_ps.close();

        this.laborMainReportWriterService.writeStatisticLine("UNSCRUBBED RECORDS READ              %,9d",
                scrubberReport.getNumberOfUnscrubbedRecordsRead());
        this.laborMainReportWriterService.writeStatisticLine("SCRUBBED RECORDS WRITTEN             %,9d",
                scrubberReport.getNumberOfScrubbedRecordsWritten());
        this.laborMainReportWriterService.writeStatisticLine("ERROR RECORDS WRITTEN                %,9d",
                scrubberReport.getNumberOfErrorRecordsWritten());
        this.laborMainReportWriterService.writeStatisticLine("TOTAL OUTPUT RECORDS WRITTEN         %,9d",
                scrubberReport.getTotalNumberOfRecordsWritten());
        this.laborMainReportWriterService.writeStatisticLine("EXPIRED ACCOUNTS FOUND               %,9d",
                scrubberReport.getNumberOfExpiredAccountsFound());

        laborLedgerSummaryReport.writeReport(this.laborLedgerReportWriterService);
    } catch (IOException ioe) {
        LOG.error("processGroup() stopped due to: " + ioe.getMessage(), ioe);
        throw new RuntimeException("processGroup() stopped due to: " + ioe.getMessage(), ioe);
    }
}

From source file:base.BasePlayer.FileRead.java

static void setBedTrack(BedTrack addTrack) {
    try {//from w w  w  .j ava2 s.c o  m
        /* if(!addTrack.file.getName().endsWith(".gz")) {
            return;
         }*/
        if (addTrack.getBBfileReader() != null) {
            return;
        }
        String name = "";
        if (addTrack.file != null) {
            name = addTrack.file.getName().toLowerCase();
        } else {
            name = addTrack.url.toString().toLowerCase();
        }
        if (name.endsWith(".bw") || name.endsWith(".bigwig") || name.endsWith(".bb")
                || name.endsWith(".bigbed")) {
            return;
        }
        InputStream in = null;
        String[] split;
        BufferedReader reader = null;
        GZIPInputStream gzip = null;
        FileReader freader = null;
        if (addTrack.file != null) {
            if (addTrack.file.getName().endsWith(".gz") || addTrack.file.getName().endsWith(".bgz")) {
                gzip = new GZIPInputStream(new FileInputStream(addTrack.file));
                reader = new BufferedReader(new InputStreamReader(gzip));
            } else {
                freader = new FileReader(addTrack.file);
                reader = new BufferedReader(freader);
            }
        } else {
            in = addTrack.url.openStream();
            gzip = new GZIPInputStream(in);
            reader = new BufferedReader(new InputStreamReader(gzip));
        }

        int count = 0;
        if (name.endsWith(".gff.gz") || name.endsWith(".gff3.gz")) {
            addTrack.iszerobased = 1;
            addTrack.getZerobased().setSelected(false);
            while (count < 10) {
                if (reader.readLine().startsWith("#")) {
                    continue;
                }
                split = reader.readLine().split("\\t");

                if (split.length > 5) {
                    if (!Double.isNaN(Double.parseDouble(split[5]))) {
                        addTrack.hasvalues = true;
                    }

                }
                if (Main.SELEXhash.containsKey(split[2].replace(".pfm", ""))) {
                    addTrack.selex = true;
                    addTrack.getAffinityBox().setVisible(true);

                }
                count++;
            }
        } else if (name.endsWith(".bed.gz") || name.endsWith(".bed")) {
            while (count < 10) {
                if (reader.readLine().startsWith("#") || reader.readLine().startsWith("track")) {
                    continue;
                }
                split = reader.readLine().split("\\t");

                if (split.length > 4) {
                    try {
                        if (!Double.isNaN(Double.parseDouble(split[4]))) {
                            addTrack.hasvalues = true;
                        }
                    } catch (Exception e) {

                    }

                }
                if (split.length > 3 && Main.SELEXhash.containsKey(split[3])) {
                    addTrack.selex = true;
                    addTrack.getAffinityBox().setVisible(true);
                }
                count++;
            }

        } else if (name.endsWith(".tsv.gz") || name.endsWith(".tsv.bgz")) {
            if (addTrack.valuecolumn != null) {
                while (count < 10) {
                    if (reader.readLine().startsWith("#")) {
                        continue;
                    }

                    split = reader.readLine().split("\\t");

                    if (!Double.isNaN(Double.parseDouble(split[addTrack.valuecolumn]))) {

                        addTrack.hasvalues = true;
                        break;
                    }
                    count++;
                }
            }
        }

        if (addTrack.getBBfileReader() == null && !addTrack.hasvalues) {

            addTrack.getLimitField().setVisible(false);
        } else {
            addTrack.getLimitField().setVisible(true);
        }
        if (gzip != null) {
            gzip.close();
        }

        if (freader != null) {
            freader.close();
        }
        if (in != null) {
            in.close();
        }
        reader.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:base.BasePlayer.FileRead.java

static void readGFF(File infile, String outfile, SAMSequenceDictionary dict) {
    BufferedReader reader = null;
    GZIPInputStream gzip = null;/*w  ww  .j  ava  2 s .  co  m*/
    FileReader freader = null;
    String line = "", chrom = "-1";
    HashMap<String, String> lineHash;
    HashMap<String, Gene> genes = new HashMap<String, Gene>();
    HashMap<String, Transcript> transcripts = new HashMap<String, Transcript>();
    Gene addgene;
    Transcript addtranscript;

    try {

        if (infile.getName().endsWith(".gz")) {
            gzip = new GZIPInputStream(new FileInputStream(infile));
            reader = new BufferedReader(new InputStreamReader(gzip));
        } else {
            freader = new FileReader(infile);
            reader = new BufferedReader(freader);
        }
        //   line = reader.readLine();
        while ((line = reader.readLine()) != null) {

            if (line.startsWith("#")) {
                continue;
            }

            lineHash = makeHash(line.split("\t"));
            if (lineHash.get("type").startsWith("region")) {

                if (line.contains("unlocalized")) {
                    chrom = "unlocalized";
                } else if (lineHash.get("chromosome") != null) {
                    chrom = lineHash.get("chromosome").replace("chr", "");
                } else if (lineHash.get("name") != null) {
                    chrom = lineHash.get("name").replace("chr", "");
                }

                continue;
            }

            if (!lineHash.containsKey("parent")) {
                /*if(!lineHash.get("type").contains("gene")) {
                        
                   continue;
                }*/

                Gene gene = new Gene(chrom, lineHash);

                genes.put(getInfoValue(lineHash, "id"), gene);

                continue;
            }
            if (genes.containsKey(lineHash.get("parent"))) {

                addgene = genes.get(lineHash.get("parent"));
                transcripts.put(getInfoValue(lineHash, "id"), new Transcript(lineHash, addgene));
                if (lineHash.get("type").equals("exon")) {
                    addtranscript = transcripts.get(getInfoValue(lineHash, "id"));
                    addtranscript.addExon(lineHash, addtranscript);
                }
                if (addgene.getDescription().equals("-")) {
                    if (lineHash.containsKey("product")) {
                        addgene.setDescription(lineHash.get("product"));
                    }
                }
                continue;
            }
            if (transcripts.containsKey(lineHash.get("parent"))) {

                addtranscript = transcripts.get(lineHash.get("parent"));
                addtranscript.addExon(lineHash, addtranscript);
                continue;
            }

        }

    } catch (Exception e) {
        System.out.println(line);
        e.printStackTrace();
        System.exit(0);
    }
    try {

        Transcript transcript;
        Gene gene;
        StringBuffer exStarts, exEnds, exPhases;
        Iterator<Map.Entry<String, Gene>> it = genes.entrySet().iterator();
        ArrayList<String[]> geneArray = new ArrayList<String[]>();

        while (it.hasNext()) {
            Map.Entry<String, Gene> pair = (Map.Entry<String, Gene>) it.next();
            gene = pair.getValue();

            for (int i = 0; i < gene.getTranscripts().size(); i++) {
                transcript = gene.getTranscripts().get(i);
                exStarts = new StringBuffer("");
                exEnds = new StringBuffer("");
                exPhases = new StringBuffer("");
                for (int e = 0; e < transcript.exonArray.size(); e++) {
                    exStarts.append(transcript.exonArray.get(e).getStart() + ",");
                    exEnds.append(transcript.exonArray.get(e).getEnd() + ",");
                    exPhases.append(transcript.exonArray.get(e).getStartPhase() + ",");
                }

                String[] row = { gene.getChrom(), "" + transcript.getStart(), "" + transcript.getEnd(),
                        gene.getName(), "" + transcript.exonArray.size(),
                        MethodLibrary.getStrand(gene.getStrand()), gene.getID(), transcript.getENST(),
                        transcript.getUniprot(), "-", transcript.getBiotype(), "" + transcript.getCodingStart(),
                        "" + transcript.getCodingEnd(), exStarts.toString(), exEnds.toString(),
                        exPhases.toString(), transcript.getDescription() };
                geneArray.add(row);
            }

            it.remove();
        }

        gffSorter gffsorter = new gffSorter();
        Collections.sort(geneArray, gffsorter);

        if (outfile != null) {
            MethodLibrary.blockCompressAndIndex(geneArray, outfile, false, dict);
        }

        geneArray.clear();
        if (freader != null) {
            freader.close();
        }
        reader.close();

        if (gzip != null) {
            gzip.close();
        }

    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:base.BasePlayer.FileRead.java

static void readGTF(File infile, String outfile, SAMSequenceDictionary dict) {
    BufferedReader reader = null;
    GZIPInputStream gzip = null;// w  w w  .  j  a va  2s.c  o  m
    FileReader freader = null;
    String line = "", chrom = "-1";
    HashMap<String, String> lineHash;
    HashMap<String, Gene> genes = new HashMap<String, Gene>();
    HashMap<String, Transcript> transcripts = new HashMap<String, Transcript>();
    Gene addgene;
    //Boolean found = false;
    Transcript addtranscript;

    try {

        if (infile.getName().endsWith(".gz")) {
            gzip = new GZIPInputStream(new FileInputStream(infile));
            reader = new BufferedReader(new InputStreamReader(gzip));
        } else {
            freader = new FileReader(infile);
            reader = new BufferedReader(freader);
        }
        //   line = reader.readLine();
        while ((line = reader.readLine()) != null) {

            if (line.startsWith("#")) {
                continue;
            }
            /*
            if(!line.contains("Rp1h")) {
               if(found) {
                  break;
               }
               continue;
            }
            found = true;
            */

            lineHash = makeHash(line.split("\t"));
            chrom = lineHash.get("seqid");

            if (!genes.containsKey(lineHash.get("gene_id"))) {
                /*if(genes.size() > 1) {
                   break;
                }*/
                Gene gene = new Gene(chrom, lineHash, true);

                genes.put(lineHash.get("gene_id"), gene);
                if (lineHash.get("transcript_id") == null) {
                    continue;
                }
                //continue;
            }
            if (!transcripts.containsKey(lineHash.get("transcript_id"))) {

                addgene = genes.get(lineHash.get("gene_id"));
                transcripts.put(getInfoValue(lineHash, "transcript_id"), new Transcript(lineHash, addgene));
                if (lineHash.get("type").equals("exon")) {
                    addtranscript = transcripts.get(getInfoValue(lineHash, "transcript_id"));
                    addtranscript.addExon(lineHash, addtranscript);
                }
                if (addgene.getDescription().equals("-")) {
                    if (lineHash.containsKey("gene_symbol")) {
                        addgene.setDescription(lineHash.get("gene_symbol"));
                    }
                }
                continue;
            }
            if (transcripts.containsKey(lineHash.get("transcript_id"))) {
                if (lineHash.get("type").contains("UTR")) {
                    continue;
                }
                addtranscript = transcripts.get(lineHash.get("transcript_id"));
                addtranscript.addExon(lineHash, addtranscript);

                continue;
            }

        }

    } catch (Exception e) {
        System.out.println(line);
        e.printStackTrace();
        System.exit(0);
    }
    try {

        Transcript transcript;
        Gene gene;
        StringBuffer exStarts, exEnds, exPhases;
        Iterator<Map.Entry<String, Gene>> it = genes.entrySet().iterator();
        ArrayList<String[]> geneArray = new ArrayList<String[]>();

        while (it.hasNext()) {
            Map.Entry<String, Gene> pair = (Map.Entry<String, Gene>) it.next();
            gene = pair.getValue();

            for (int i = 0; i < gene.getTranscripts().size(); i++) {
                transcript = gene.getTranscripts().get(i);
                exStarts = new StringBuffer("");
                exEnds = new StringBuffer("");
                exPhases = new StringBuffer("");
                for (int e = 0; e < transcript.exonArray.size(); e++) {
                    exStarts.append(transcript.exonArray.get(e).getStart() + ",");
                    exEnds.append(transcript.exonArray.get(e).getEnd() + ",");
                    exPhases.append(transcript.exonArray.get(e).getStartPhase() + ",");
                }

                String[] row = { gene.getChrom(), "" + transcript.getStart(), "" + transcript.getEnd(),
                        gene.getName(), "" + transcript.exonArray.size(),
                        MethodLibrary.getStrand(gene.getStrand()), gene.getID(), transcript.getENST(),
                        transcript.getUniprot(), "-", transcript.getBiotype(), "" + transcript.getCodingStart(),
                        "" + transcript.getCodingEnd(), exStarts.toString(), exEnds.toString(),
                        exPhases.toString(), transcript.getDescription() };
                if (transcript.getCodingEnd() == -1) {
                    row[11] = "" + gene.getEnd();
                    row[12] = "" + gene.getStart();
                }

                geneArray.add(row);
            }

            it.remove();
        }

        gffSorter gffsorter = new gffSorter();
        Collections.sort(geneArray, gffsorter);

        if (outfile != null) {
            MethodLibrary.blockCompressAndIndex(geneArray, outfile, false, dict);
        }

        geneArray.clear();
        if (freader != null) {
            freader.close();
        }
        reader.close();

        if (gzip != null) {
            gzip.close();
        }

    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:org.executequery.gui.importexport.ImportDelimitedWorker.java

private Object doWork() {

    // the process result
    String processResult = null;//from w  w  w . ja  va  2 s . c o m

    // are we halting on any error
    int onError = getParent().getOnError();
    haltOnError = (onError == ImportExportProcess.STOP_TRANSFER);

    boolean isBatch = getParent().runAsBatchProcess();

    appendProgressText("Beginning import from delimited file process...");
    appendProgressText("Using connection: " + getParent().getDatabaseConnection().getName());

    // ---------------------------------------
    // table specific counters

    // the table statement result
    int tableInsertCount = 0;

    // the records processed for this table
    int tableRowCount = 0;

    // the table commit count
    int tableCommitCount = 0;

    // ---------------------------------------
    // total import process counters

    // the current commit block size
    int commitCount = 0;

    // the total records inserted
    int totalInsertCount = 0;

    // the total records processed
    int totalRecordCount = 0;

    // the error count
    int errorCount = 0;

    // the current line number
    int lineNumber = 0;

    int rollbackSize = getParent().getRollbackSize();
    int rollbackCount = 0;

    FileReader fileReader = null;
    BufferedReader reader = null;
    DateFormat dateFormat = null;

    try {
        // retrieve the import files
        Vector files = getParent().getDataFileVector();
        int fileCount = files.size();

        // whether to trim whitespace
        boolean trimWhitespace = getParent().trimWhitespace();

        // whether this table has a date/time field
        boolean hasDate = false;

        // whether we are parsing date formats
        boolean parsingDates = parseDateValues();

        // column names are first row
        boolean hasColumnNames = getParent().includeColumnNames();

        // currently bound variables in the prepared statement
        Map<ColumnData, String> boundVariables = null;

        // ignored indexes of columns from the file
        List<Integer> ignoredIndexes = null;

        if (hasColumnNames) {
            boundVariables = new HashMap<ColumnData, String>();
            ignoredIndexes = new ArrayList<Integer>();
            appendProgressText("Using column names from input file's first row.");
        }

        // columns to be imported that are in the file
        Map<ColumnData, String> fileImportedColumns = new HashMap<ColumnData, String>();

        // whether the data format failed (switch structure)
        boolean failed = false;

        // define the delimiter
        String delim = getParent().getDelimiter();

        // ---------------------------
        // --- initialise counters ---
        // ---------------------------

        // the table's column count
        int columnCount = -1;

        // the length of each line in the file
        int rowLength = -1;

        // progress bar values
        int progressStatus = -1;

        // ongoing progress value
        int progressCheck = -1;

        // the import file size
        long fileSize = -1;

        // set the date format

        if (parseDateValues()) {

            try {

                dateFormat = createDateFormatter();

            } catch (IllegalArgumentException e) {

                errorCount++;
                outputExceptionError("Error applying date mask", e);

                return FAILED;
            }

        }

        // record the start time
        start();

        // setup the regex matcher for delims

        // ----------------------------------------------------------------
        // below was the original pattern from oreilly book.
        // discovered issues when parsing values with quotes
        // in them - not only around them.
        /*
        String regex =
            "(?:^|\\" +
            delim +
            ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \" | ( [^\"\\" +
            delim + "]*+ ) )";
        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");
        */
        // ----------------------------------------------------------------

        // modified above to regex below
        // added the look-ahead after the close quote
        // and removed the quote from the last regex pattern

        String escapedDelim = escapeDelim(delim);

        String regex = "(?:^|" + escapedDelim + ") (?: \" ( (?> [^\"]*+ ) (?> \"\" [^\"]*+ )*+ ) \"(?="
                + escapedDelim + "?) | ( [^" + escapedDelim + "]*+ ) )";

        // ----------------------------------------------------------------
        // changed above to the following - seems to work for now
        // regex pattern in full - where <delim> is the delimiter to use
        //      \"([^\"]+?)\"<delim>?|([^<delim>]+)<delim>?|<delim>
        //
        // fixed oreilly one - not running this one
        // ----------------------------------------------------------------

        Matcher matcher = Pattern.compile(regex, Pattern.COMMENTS).matcher("");
        Matcher qMatcher = Pattern.compile("\"\"", Pattern.COMMENTS).matcher("");

        // ----------------------------------------
        // --- begin looping through the tables ---
        // ----------------------------------------

        // ensure the connection has auto-commit to false
        conn = getConnection();
        conn.setAutoCommit(false);

        int currentRowLength = 0;
        boolean insertLine = false;

        // the number of columns actually available in the file
        int filesColumnCount = 0;

        for (int i = 0; i < fileCount; i++) {

            lineNumber = 0;
            tableInsertCount = 0;
            tableCommitCount = 0;
            rollbackCount = 0;
            tableRowCount = 0;
            rowLength = 0;

            if (Thread.interrupted()) {
                setProgressStatus(100);
                throw new InterruptedException();
            }

            tableCount++;

            DataTransferObject dto = (DataTransferObject) files.elementAt(i);

            // initialise the file object
            File inputFile = new File(dto.getFileName());

            outputBuffer.append("---------------------------\nTable: ");
            outputBuffer.append(dto.getTableName());
            outputBuffer.append("\nImport File: ");
            outputBuffer.append(inputFile.getName());
            appendProgressText(outputBuffer);

            // setup the reader objects
            fileReader = new FileReader(inputFile);
            reader = new BufferedReader(fileReader);

            // retrieve the columns to be imported (or all)
            Vector<ColumnData> columns = getColumns(dto.getTableName());
            columnCount = columns.size();
            filesColumnCount = columnCount;

            // the wntire row read
            String row = null;

            // the current delimited value
            String value = null;

            // the ignored column count
            int ignoredCount = 0;

            // clear the file columns cache
            fileImportedColumns.clear();

            // if the first row in the file has the column
            // names compare these with the columns selected
            if (hasColumnNames) {

                // init the bound vars cache with the selected columns
                boundVariables.clear();

                for (int k = 0; k < columnCount; k++) {

                    boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                }

                row = reader.readLine();
                lineNumber++;

                String[] _columns = MiscUtils.splitSeparatedValues(row, delim);
                if (_columns != null && _columns.length > 0) {

                    filesColumnCount = _columns.length;

                    // --------------------------------------
                    // first determine if we have any columns in the
                    // input file that were not selected for import

                    // reset the ignored columns
                    ignoredIndexes.clear();

                    // set up another list to re-add the columns in
                    // the order in which they appear in the file.
                    // all other columns will be added to the end
                    Vector<ColumnData> temp = new Vector<ColumnData>(columnCount);

                    ColumnData cd = null;
                    int ignoredIndex = -1;
                    for (int j = 0; j < _columns.length; j++) {
                        ignoredIndex = j;
                        String column = _columns[j];

                        for (int k = 0; k < columnCount; k++) {
                            cd = columns.get(k);
                            String _column = cd.getColumnName();

                            if (_column.equalsIgnoreCase(column)) {
                                temp.add(cd);
                                fileImportedColumns.put(cd, INCLUDED_COLUMN);
                                ignoredIndex = -1;
                                break;
                            }

                        }

                        if (ignoredIndex != -1) {

                            ignoredIndexes.add(Integer.valueOf(ignoredIndex));
                        }

                    }
                    ignoredCount = ignoredIndexes.size();

                    // if we didn't find any columns at all, show warning
                    if (temp.isEmpty()) {

                        String message = "No matching column names were "
                                + "found within the specified file's first line.\n"
                                + "The current file will be ignored.";

                        outputBuffer.append(message);
                        appendProgressWarningText(outputBuffer);

                        int yesNo = GUIUtilities.displayYesNoDialog(message + "\nDo you wish to continue?",
                                "Warning");

                        if (yesNo == JOptionPane.YES_OPTION) {
                            continue;
                        } else {
                            throw new InterruptedException();
                        }

                    } else {

                        // add any other selected columns to the
                        // end of the temp list with the columns
                        // available in the file
                        boolean addColumn = false;
                        for (int k = 0; k < columnCount; k++) {
                            addColumn = false;
                            cd = columns.get(k);
                            for (int j = 0, n = temp.size(); j < n; j++) {
                                addColumn = true;
                                if (temp.get(j) == cd) {
                                    addColumn = false;
                                    break;
                                }
                            }

                            if (addColumn) {
                                temp.add(cd);
                            }

                        }
                        columns = temp; // note: size should not have changed
                    }

                }
            }
            // otherwise just populate the columns in the file
            // with all the selected columns
            else {

                for (int j = 0; j < columnCount; j++) {

                    fileImportedColumns.put(columns.get(j), INCLUDED_COLUMN);
                }

            }

            /*
            Log.debug("ignored count: " + ignoredCount);
            for (int j = 0; j < columnCount; j++) {
            Log.debug("column: " + columns.get(j));
            }
            */

            fileSize = inputFile.length();
            progressStatus = 10;
            progressCheck = (int) (fileSize / progressStatus);

            // prepare the statement
            prepareStatement(dto.getTableName(), columns);

            if (parsingDates && dateFormat == null) {

                // check for a date data type
                for (int j = 0; j < columnCount; j++) {

                    if (dateFormat == null && !hasDate) {

                        ColumnData cd = columns.get(j);

                        if (fileImportedColumns.containsKey(cd)) {

                            if (cd.isDateDataType()) {

                                hasDate = true;
                                break;
                            }

                        }

                    }
                }

                if (hasDate && dateFormat == null) {

                    String pattern = verifyDate();

                    if (StringUtils.isNotBlank(pattern)) {

                        fileReader.close();
                        setProgressStatus(100);
                        throw new InterruptedException();
                    }

                    dateFormat = createDateFormatter(pattern);
                }

            }

            rowLength = 0;

            while ((row = reader.readLine()) != null) {

                insertLine = true;
                lineNumber++;
                tableRowCount++;
                totalRecordCount++;

                if (Thread.interrupted()) {

                    fileReader.close();
                    printTableResult(tableRowCount, tableCommitCount, dto.getTableName());

                    setProgressStatus(100);
                    throw new InterruptedException();
                }

                currentRowLength = row.length();

                if (currentRowLength == 0) {

                    outputBuffer.append("Line ");
                    outputBuffer.append(lineNumber);
                    outputBuffer.append(" contains no delimited values");
                    appendProgressWarningText(outputBuffer);

                    int yesNo = GUIUtilities.displayYesNoDialog("No values provided from line " + lineNumber
                            + " - the row is blank.\n" + "Do you wish to continue?", "Warning");

                    if (yesNo == JOptionPane.YES_OPTION) {
                        continue;
                    } else {
                        throw new InterruptedException();
                    }
                }

                rowLength += currentRowLength;
                if (progressCheck < rowLength) {

                    setProgressStatus(progressStatus);
                    progressStatus += 10;
                    rowLength = 0;
                }

                // reset matcher with current row
                matcher.reset(row);

                int index = 0;
                int lastIndex = -1;
                int loopIgnoredCount = 0;

                //Log.debug(row);

                for (int j = 0; j < filesColumnCount; j++) {

                    if (matcher.find(index)) {

                        String first = matcher.group(2);

                        if (first != null) {

                            value = first;

                        } else {

                            qMatcher.reset(matcher.group(1));
                            value = qMatcher.replaceAll("\"");
                        }

                        index = matcher.end();

                        // check if its an ignored column
                        if (ignoredCount > 0) {

                            if (isIndexIgnored(ignoredIndexes, j)) {

                                loopIgnoredCount++;
                                continue;
                            }

                        }

                    } else {

                        // not enough delims check
                        if (j < (filesColumnCount - 1) && index > (currentRowLength - 1)) {

                            outputBuffer.append("Insufficient number of column ");
                            outputBuffer.append("values provided at line ");
                            outputBuffer.append(lineNumber);
                            appendProgressErrorText(outputBuffer);

                            int yesNo = GUIUtilities
                                    .displayYesNoDialog("Insufficient number of values provided from line "
                                            + lineNumber + ".\n" + "Do you wish to continue?", "Warning");

                            if (yesNo == JOptionPane.YES_OPTION) {

                                insertLine = false;
                                break;

                            } else {

                                throw new InterruptedException();
                            }

                        } else {

                            // check if we're on a delim the matcher didn't pick up

                            int delimLength = delim.length();

                            if (row.substring(index, index + delimLength).equals(delim)) {

                                // increment index
                                index++;
                                // assign as null value
                                value = null;
                            }

                        }

                    }

                    // check if we landed on the same index - likely null value
                    if (index == lastIndex) {
                        index++;
                    }
                    lastIndex = index;

                    if (value != null && value.trim().length() == 0) {
                        value = null;
                    }

                    try {
                        ColumnData cd = columns.get(j - loopIgnoredCount);
                        setValue(value, getIndexOfColumn(columns, cd) + 1, cd.getSQLType(), trimWhitespace,
                                dateFormat);

                        if (hasColumnNames) {
                            boundVariables.put(cd, VARIABLE_BOUND);
                        }

                    } catch (ParseException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing date value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;

                    } catch (NumberFormatException e) {

                        errorCount++;
                        failed = true;
                        outputBuffer.append("Error parsing value - ");
                        outputBuffer.append(value);
                        outputBuffer.append(" - on line ");
                        outputBuffer.append(lineNumber);
                        outputBuffer.append(" at position ");
                        outputBuffer.append(j);
                        outputExceptionError(null, e);
                        break;
                    }

                }

                if (!insertLine) {

                    prepStmnt.clearParameters();
                    continue;
                }

                if (failed && haltOnError) {

                    processResult = FAILED;
                    break;
                }

                // execute the statement
                try {

                    // check all variables are bound if we used
                    // the column names from the first row
                    if (hasColumnNames) {

                        index = 0;
                        // check all variables are bound - insert NULL otherwise

                        for (Map.Entry<ColumnData, String> entry : boundVariables.entrySet()) {

                            ColumnData cd = entry.getKey();

                            if (VARIABLE_NOT_BOUND.equals(entry.getValue())) {

                                index = getIndexOfColumn(columns, cd);
                                prepStmnt.setNull(index + 1, cd.getSQLType());
                            }

                        }

                    }

                    if (isBatch) {
                        prepStmnt.addBatch();
                    } else {
                        int result = prepStmnt.executeUpdate();
                        tableInsertCount += result;
                        commitCount += result;
                    }

                    rollbackCount++;
                    // check the rollback segment
                    if (rollbackCount == rollbackSize) {
                        if (isBatch) {
                            int result = getBatchResult(prepStmnt.executeBatch())[0];
                            tableInsertCount += result;
                            commitCount += result;
                            prepStmnt.clearBatch();
                        }
                        conn.commit();
                        totalInsertCount += commitCount;
                        tableCommitCount = tableInsertCount;
                        rollbackCount = 0;
                        commitCount = 0;
                    }

                    // reset bound variables
                    if (hasColumnNames) {
                        for (int k = 0; k < columnCount; k++) {
                            boundVariables.put(columns.get(k), VARIABLE_NOT_BOUND);
                        }
                    }

                } catch (SQLException e) {
                    logException(e);
                    errorCount++;

                    if (!isBatch) {
                        outputBuffer.append("Error inserting data from line ");
                        outputBuffer.append(lineNumber);
                        outputExceptionError(null, e);
                    } else {
                        outputBuffer.append("Error on last batch execution");
                        outputExceptionError(null, e);
                    }

                    if (haltOnError) {
                        processResult = FAILED;
                        conn.rollback();
                        getParent().cancelTransfer();
                        throw new InterruptedException();
                    }

                }

            }

            // ----------------------------
            // file/table has ended here

            if (isBatch) {

                int[] batchResult = null;

                try {
                    batchResult = getBatchResult(prepStmnt.executeBatch());
                    int result = batchResult[0];
                    tableInsertCount += result;
                    commitCount += result;
                    tableCommitCount = tableInsertCount;
                } catch (BatchUpdateException e) {
                    logException(e);
                    int[] updateCounts = e.getUpdateCounts();
                    batchResult = getBatchResult(updateCounts);
                    errorCount += batchResult[1];
                    if (errorCount == 0) {
                        errorCount = 1;
                    }

                    outputBuffer.append("An error occured during the batch process: ");
                    outputBuffer.append(e.getMessage());

                    SQLException _e = e.getNextException();
                    while (_e != null) {
                        outputBuffer.append("\nNext Exception: ");
                        outputBuffer.append(_e.getMessage());
                        _e = _e.getNextException();
                    }

                    outputBuffer.append("\n\nRecords processed to the point ");
                    outputBuffer.append("where this error occurred: ");
                    outputBuffer.append(updateCounts.length);
                    appendProgressErrorText(outputBuffer);
                    processResult = FAILED;
                }

                //  Log.debug("commitCount: " + commitCount +
                //                      " batch: " + batchResult[0]);

                if (tableRowCount != tableInsertCount) {
                    conn.rollback();

                    if (onError == ImportExportProcess.STOP_TRANSFER) {
                        getParent().cancelTransfer();
                        processResult = FAILED;
                        throw new InterruptedException();
                    }

                }

            }

            boolean doCommit = true;
            if (failed && !isBatch && rollbackSize != ImportExportProcess.COMMIT_END_OF_ALL_FILES) {

                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the last block?",
                        "Confirm commit");

                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            // do the commit if ok from above
            // and if rollback size selected is end of file
            if (rollbackSize == ImportExportProcess.COMMIT_END_OF_FILE) {
                if (doCommit) {
                    conn.commit();
                    totalInsertCount += commitCount;
                    tableCommitCount = tableInsertCount;
                    commitCount = 0;
                } else {
                    conn.rollback();
                }
            }

            // update the progress display
            printTableResult(tableRowCount, tableInsertCount, dto.getTableName());
            setProgressStatus(100);

            // reset the checks
            hasDate = false;
            failed = false;

        }

        // commit the last remaining block or where
        // set to commit at the end of all files
        if (rollbackSize != ImportExportProcess.COMMIT_END_OF_FILE) {
            setProgressStatus(100);
            boolean doCommit = true;
            if (errorCount > 0 && errorCount != totalRecordCount) {
                int yesNo = GUIUtilities.displayYesNoDialog(
                        "The process completed with errors.\n" + "Do you wish to commit the changes?",
                        "Confirm commit");
                doCommit = (yesNo == JOptionPane.YES_OPTION);
            }

            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }

        }

        processResult = SUCCESS;
    } catch (InterruptedException e) {

        if (processResult != FAILED) {
            processResult = CANCELLED;
        }

        try {
            if (prepStmnt != null) {
                prepStmnt.cancel();
            }
            if (conn != null) {
                conn.rollback();
            }
        } catch (SQLException e2) {
            outputExceptionError("Error rolling back transaction", e);
        }

    } catch (Exception e) {
        logException(e);
        outputBuffer.append("Error processing data from line ");
        outputBuffer.append(lineNumber);
        outputExceptionError("\nUnrecoverable error importing table data from file", e);

        int yesNo = GUIUtilities.displayYesNoDialog(
                "The process encountered errors.\n" + "Do you wish to commit the last transaction block?",
                "Confirm commit");
        boolean doCommit = (yesNo == JOptionPane.YES_OPTION);

        try {
            if (doCommit) {
                conn.commit();
                totalInsertCount += commitCount;
            } else {
                conn.rollback();
            }
        } catch (SQLException e2) {
            logException(e2);
            outputExceptionError("Error processing last transaction block", e2);
        }
        processResult = FAILED;
    } finally {
        finish();
        releaseResources(getParent().getDatabaseConnection());

        if (totalRecordCount == 0 || errorCount > 0) {
            processResult = FAILED;
        }

        setTableCount(tableCount);
        setRecordCount(totalRecordCount);
        setRecordCountProcessed(totalInsertCount);
        setErrorCount(errorCount);

        setProgressStatus(100);
        GUIUtilities.scheduleGC();

        if (reader != null) {
            try {
                reader.close();
            } catch (IOException e) {
            }
        }
        if (fileReader != null) {
            try {
                fileReader.close();
            } catch (IOException e) {
            }
        }
        if (prepStmnt != null) {
            try {
                prepStmnt.close();
            } catch (SQLException e) {
            }
        }

    }

    return processResult;
}

From source file:base.BasePlayer.FileRead.java

static void checkMulti(Sample sample) {
    try {//from  ww  w. j av a 2 s .  c o  m
        Sample addSample;
        BufferedReader reader = null;
        GZIPInputStream gzip = null;
        FileReader freader = null;
        String line;
        Boolean somatic = Main.drawCanvas.drawVariables.somatic;
        String[] split;
        if (somatic != null && somatic) {
            asked = true;
        }
        if (sample.getTabixFile().endsWith(".gz")) {
            try {
                gzip = new GZIPInputStream(new FileInputStream(sample.getTabixFile()));
                reader = new BufferedReader(new InputStreamReader(gzip));
            } catch (Exception e) {
                Main.showError("Could not read the file: " + sample.getTabixFile()
                        + "\nCheck that you have permission to read the file or try to bgzip and recreate the index file.",
                        "Error");
                Main.drawCanvas.sampleList.remove(sample);
                Main.varsamples--;
                Main.samples--;

            }
        } else {
            freader = new FileReader(sample.getTabixFile());
            reader = new BufferedReader(freader);
        }

        line = reader.readLine();

        if (!sample.multipart && line != null) {

            while (line != null) {
                try {

                    if (line.startsWith("##INFO")) {
                        if (line.contains("Type=Float") || line.contains("Type=Integer")
                                || line.contains("Number=")) {
                            VariantHandler.addMenuComponents(line);
                        }
                    }
                    if (line.startsWith("##FILTER")) {
                        if (line.contains("ID=") || line.contains("Description=") || line.contains("Number=")) {
                            VariantHandler.addMenuComponents(line);
                        }
                    }
                    if (line.startsWith("##FORMAT")) {
                        if (line.contains("Type=Float") || line.contains("Type=Integer")
                                || line.contains("Number=")) {
                            VariantHandler.addMenuComponents(line);
                        }
                    }
                    if (line.toLowerCase().contains("#chrom")) {
                        headersplit = line.split("\t+");

                        if (headersplit.length > 10) {
                            if (headersplit.length == 11 && !asked) {
                                if (JOptionPane.showConfirmDialog(Main.drawScroll, "Is this somatic project?",
                                        "Somatic?", JOptionPane.YES_NO_OPTION,
                                        JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION) {
                                    somatic = true;
                                    Main.drawCanvas.drawVariables.somatic = true;
                                }
                                asked = true;
                            }
                            if (!somatic) {
                                sample.multiVCF = true;
                                Main.varsamples--;
                                for (int h = 9; h < headersplit.length; h++) {
                                    addSample = new Sample(headersplit[h], (short) (Main.samples), null);
                                    addSample.multipart = true;
                                    Main.drawCanvas.sampleList.add(addSample);
                                    Main.samples++;
                                    Main.varsamples++;
                                    if (sampleString == null) {
                                        sampleString = new StringBuffer("");
                                    }
                                    sampleString.append(addSample.getName() + ";");

                                }
                                VariantHandler.commonSlider.setMaximum(Main.varsamples);
                                VariantHandler.commonSlider.setUpperValue(Main.varsamples);
                                VariantHandler.geneSlider.setMaximum(Main.varsamples);
                                Main.drawCanvas.drawVariables.visiblesamples = (short) (Main.drawCanvas.sampleList
                                        .size());
                                Main.drawCanvas.checkSampleZoom();
                                Main.drawCanvas.resizeCanvas(Main.drawScroll.getViewport().getWidth(),
                                        Main.drawScroll.getViewport().getHeight());
                            }
                        }
                        line = reader.readLine();
                        break;
                    }
                    split = line.split("\t");
                    if (split.length > 2 && split[1].matches("\\d+")) {
                        break;
                    }

                } catch (Exception ex) {
                    ex.printStackTrace();
                }
                line = reader.readLine();
            }
            /*
            VariantHandler.menu.setPreferredSize(new Dimension(300,500));
            VariantHandler.menuPanel.setPreferredSize(new Dimension(300,500));*/
            VariantHandler.menuScroll.setPreferredSize(new Dimension(250, 500));
            VariantHandler.menuScrollIndel.setPreferredSize(new Dimension(250, 500));
            if (line == null) {
                return;
            }

            while (line != null && line.startsWith("#")) {
                line = reader.readLine();
            }
            split = line.split("\t");
            if (line.contains("\"")) {
                sample.oddchar = "\"";
            }
            if (split != null && split.length == 8) {
                sample.annoTrack = true;
            }
            if (line != null) {

                if (line.startsWith("chr")) {
                    sample.vcfchr = "chr";
                }
            }
            if (somatic != null && somatic) {
                line = reader.readLine();
                if (line != null) {

                    headersplit = line.split("\t");

                    if (headersplit.length == 11) {

                        if (headersplit[10].startsWith("0:")
                                || (headersplit[10].charAt(0) == '0' && headersplit[10].charAt(2) == '0')) {
                            sample.somaticColumn = 9;
                        } else {
                            sample.somaticColumn = 10;
                        }
                    }

                }
            }
            checkSamples();
            line = null;
            if (freader != null) {
                freader.close();
            }
            reader.close();
            if (gzip != null) {
                gzip.close();
            }
        } else {
            reader.close();
            if (gzip != null) {
                gzip.close();
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.kuali.kfs.gl.batch.service.impl.ScrubberProcessImpl.java

/**
 * This will process a group of origin entries. The COBOL code was refactored a lot to get this so there isn't a 1 to 1 section
 * of Cobol relating to this./*  w ww  . j  a va2  s . co m*/
 *
 * @param originEntryGroup Group to process
 */
protected void processGroup(boolean reportOnlyMode, ScrubberReportData scrubberReport) {
    OriginEntryFull lastEntry = null;
    scrubCostShareAmount = KualiDecimal.ZERO;
    unitOfWork = new UnitOfWorkInfo();

    FileReader INPUT_GLE_FILE = null;
    String GLEN_RECORD;
    BufferedReader INPUT_GLE_FILE_br;
    try {
        INPUT_GLE_FILE = new FileReader(inputFile);
    } catch (FileNotFoundException e) {
        throw new RuntimeException(e);
    }
    try {
        OUTPUT_GLE_FILE_ps = new PrintStream(validFile);
        OUTPUT_ERR_FILE_ps = new PrintStream(errorFile);
        OUTPUT_EXP_FILE_ps = new PrintStream(expiredFile);
        LOG.info("Successfully opened " + validFile + ", " + errorFile + ", " + expiredFile + " for writing.");
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    INPUT_GLE_FILE_br = new BufferedReader(INPUT_GLE_FILE);
    int line = 0;
    LOG.debug("Starting Scrubber Process process group...");
    try {
        while ((GLEN_RECORD = INPUT_GLE_FILE_br.readLine()) != null) {
            if (!org.apache.commons.lang.StringUtils.isEmpty(GLEN_RECORD)
                    && !org.apache.commons.lang.StringUtils.isBlank(GLEN_RECORD.trim())) {
                line++;
                OriginEntryFull unscrubbedEntry = new OriginEntryFull();
                List<Message> tmperrors = unscrubbedEntry.setFromTextFileForBatch(GLEN_RECORD, line);
                scrubberReport.incrementUnscrubbedRecordsRead();
                transactionErrors = new ArrayList<Message>();

                //
                // This is done so if the code modifies this row, then saves it, it will be an insert,
                // and it won't touch the original. The Scrubber never modifies input rows/groups.
                // not relevant for file version

                boolean saveErrorTransaction = false;
                boolean saveValidTransaction = false;
                boolean fatalErrorOccurred = false;

                // Build a scrubbed entry
                OriginEntryFull scrubbedEntry = new OriginEntryFull();
                scrubbedEntry.setDocumentNumber(unscrubbedEntry.getDocumentNumber());
                scrubbedEntry.setOrganizationDocumentNumber(unscrubbedEntry.getOrganizationDocumentNumber());
                scrubbedEntry.setOrganizationReferenceId(unscrubbedEntry.getOrganizationReferenceId());
                scrubbedEntry.setReferenceFinancialDocumentNumber(
                        unscrubbedEntry.getReferenceFinancialDocumentNumber());

                Integer transactionNumber = unscrubbedEntry.getTransactionLedgerEntrySequenceNumber();
                scrubbedEntry.setTransactionLedgerEntrySequenceNumber(
                        null == transactionNumber ? new Integer(0) : transactionNumber);
                scrubbedEntry.setTransactionLedgerEntryDescription(
                        unscrubbedEntry.getTransactionLedgerEntryDescription());
                scrubbedEntry
                        .setTransactionLedgerEntryAmount(unscrubbedEntry.getTransactionLedgerEntryAmount());
                scrubbedEntry.setTransactionDebitCreditCode(unscrubbedEntry.getTransactionDebitCreditCode());

                if (!collectorMode) {
                    ledgerSummaryReport.summarizeEntry(unscrubbedEntry);
                }

                // For Labor Scrubber
                boolean laborIndicator = false;
                tmperrors.addAll(scrubberValidator.validateTransaction(unscrubbedEntry, scrubbedEntry,
                        universityRunDate, laborIndicator, accountingCycleCachingService));
                transactionErrors.addAll(tmperrors);

                Account unscrubbedEntryAccount = accountingCycleCachingService.getAccount(
                        unscrubbedEntry.getChartOfAccountsCode(), unscrubbedEntry.getAccountNumber());
                // KFSMI-173: both the expired and closed accounts rows are put in the expired account
                if ((unscrubbedEntryAccount != null)
                        && (scrubberValidator.isAccountExpired(unscrubbedEntryAccount, universityRunDate)
                                || unscrubbedEntryAccount.isClosed())) {
                    // Make a copy of it so OJB doesn't just update the row in the original
                    // group. It needs to make a new one in the expired group
                    OriginEntryFull expiredEntry = OriginEntryFull.copyFromOriginEntryable(scrubbedEntry);
                    createOutputEntry(expiredEntry, OUTPUT_EXP_FILE_ps);
                    scrubberReport.incrementExpiredAccountFound();
                }

                // the collector scrubber uses this map to apply the same changes made on an origin entry during scrubbing to
                // the collector detail record
                if (collectorMode) {
                    unscrubbedToScrubbedEntries.put(unscrubbedEntry, scrubbedEntry);
                }

                if (!isFatal(transactionErrors)) {
                    saveValidTransaction = true;

                    if (!collectorMode) {

                        // See if unit of work has changed
                        if (!unitOfWork.isSameUnitOfWork(scrubbedEntry)) {
                            // Generate offset for last unit of work
                            // pass the String line for generating error files
                            generateOffset(lastEntry, scrubberReport);

                            unitOfWork = new UnitOfWorkInfo(scrubbedEntry);
                        }

                        KualiDecimal transactionAmount = scrubbedEntry.getTransactionLedgerEntryAmount();

                        ParameterEvaluator offsetFiscalPeriods = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.OFFSET_FISCAL_PERIOD_CODES,
                                        scrubbedEntry.getUniversityFiscalPeriodCode());

                        BalanceType scrubbedEntryBalanceType = accountingCycleCachingService
                                .getBalanceType(scrubbedEntry.getFinancialBalanceTypeCode());
                        if (scrubbedEntryBalanceType.isFinancialOffsetGenerationIndicator()
                                && offsetFiscalPeriods.evaluationSucceeds()) {
                            if (scrubbedEntry.isDebit()) {
                                unitOfWork.offsetAmount = unitOfWork.offsetAmount.add(transactionAmount);
                            } else {
                                unitOfWork.offsetAmount = unitOfWork.offsetAmount.subtract(transactionAmount);
                            }
                        }

                        // The sub account type code will only exist if there is a valid sub account
                        String subAccountTypeCode = GeneralLedgerConstants.getSpaceSubAccountTypeCode();
                        // major assumption: the a21 subaccount is proxied, so we don't want to query the database if the
                        // subacct
                        // number is dashes
                        if (!KFSConstants.getDashSubAccountNumber()
                                .equals(scrubbedEntry.getSubAccountNumber())) {
                            A21SubAccount scrubbedEntryA21SubAccount = accountingCycleCachingService
                                    .getA21SubAccount(scrubbedEntry.getChartOfAccountsCode(),
                                            scrubbedEntry.getAccountNumber(),
                                            scrubbedEntry.getSubAccountNumber());
                            if (ObjectUtils.isNotNull(scrubbedEntryA21SubAccount)) {
                                subAccountTypeCode = scrubbedEntryA21SubAccount.getSubAccountTypeCode();
                            }
                        }

                        ParameterEvaluator costShareObjectTypeCodes = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.COST_SHARE_OBJ_TYPE_CODES,
                                        scrubbedEntry.getFinancialObjectTypeCode());
                        ParameterEvaluator costShareEncBalanceTypeCodes = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.COST_SHARE_ENC_BAL_TYP_CODES,
                                        scrubbedEntry.getFinancialBalanceTypeCode());
                        ParameterEvaluator costShareEncFiscalPeriodCodes = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.COST_SHARE_ENC_FISCAL_PERIOD_CODES,
                                        scrubbedEntry.getUniversityFiscalPeriodCode());
                        ParameterEvaluator costShareEncDocTypeCodes = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.COST_SHARE_ENC_DOC_TYPE_CODES,
                                        scrubbedEntry.getFinancialDocumentTypeCode().trim());
                        ParameterEvaluator costShareFiscalPeriodCodes = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.COST_SHARE_FISCAL_PERIOD_CODES,
                                        scrubbedEntry.getUniversityFiscalPeriodCode());
                        Account scrubbedEntryAccount = accountingCycleCachingService.getAccount(
                                scrubbedEntry.getChartOfAccountsCode(), scrubbedEntry.getAccountNumber());

                        if (costShareObjectTypeCodes.evaluationSucceeds()
                                && costShareEncBalanceTypeCodes.evaluationSucceeds()
                                && scrubbedEntryAccount.isForContractsAndGrants()
                                && KFSConstants.SubAccountType.COST_SHARE.equals(subAccountTypeCode)
                                && costShareEncFiscalPeriodCodes.evaluationSucceeds()
                                && costShareEncDocTypeCodes.evaluationSucceeds()) {
                            TransactionError te1 = generateCostShareEncumbranceEntries(scrubbedEntry,
                                    scrubberReport);
                            if (te1 != null) {
                                List errors = new ArrayList();
                                errors.add(te1.message);
                                handleTransactionErrors(te1.transaction, errors);
                                saveValidTransaction = false;
                                saveErrorTransaction = true;
                            }
                        }

                        SystemOptions scrubbedEntryOption = accountingCycleCachingService
                                .getSystemOptions(scrubbedEntry.getUniversityFiscalYear());
                        if (costShareObjectTypeCodes.evaluationSucceeds()
                                && scrubbedEntryOption.getActualFinancialBalanceTypeCd()
                                        .equals(scrubbedEntry.getFinancialBalanceTypeCode())
                                && scrubbedEntryAccount.isForContractsAndGrants()
                                && KFSConstants.SubAccountType.COST_SHARE.equals(subAccountTypeCode)
                                && costShareFiscalPeriodCodes.evaluationSucceeds()
                                && costShareEncDocTypeCodes.evaluationSucceeds()) {
                            if (scrubbedEntry.isDebit()) {
                                scrubCostShareAmount = scrubCostShareAmount.subtract(transactionAmount);
                            } else {
                                scrubCostShareAmount = scrubCostShareAmount.add(transactionAmount);
                            }
                        }

                        ParameterEvaluator otherDocTypeCodes = /*REFACTORME*/SpringContext
                                .getBean(ParameterEvaluatorService.class)
                                .getParameterEvaluator(ScrubberStep.class,
                                        GeneralLedgerConstants.GlScrubberGroupRules.OFFSET_DOC_TYPE_CODES,
                                        scrubbedEntry.getFinancialDocumentTypeCode());

                        if (otherDocTypeCodes.evaluationSucceeds()) {
                            String m = processCapitalization(scrubbedEntry, scrubberReport);
                            if (m != null) {
                                saveValidTransaction = false;
                                saveErrorTransaction = false;
                                addTransactionError(m, "", Message.TYPE_FATAL);
                            }

                            m = processLiabilities(scrubbedEntry, scrubberReport);
                            if (m != null) {
                                saveValidTransaction = false;
                                saveErrorTransaction = false;
                                addTransactionError(m, "", Message.TYPE_FATAL);
                            }

                            m = processPlantIndebtedness(scrubbedEntry, scrubberReport);
                            if (m != null) {
                                saveValidTransaction = false;
                                saveErrorTransaction = false;
                                addTransactionError(m, "", Message.TYPE_FATAL);
                            }
                        }

                        if (!scrubCostShareAmount.isZero()) {
                            TransactionError te = generateCostShareEntries(scrubbedEntry, scrubberReport);

                            if (te != null) {
                                saveValidTransaction = false;
                                saveErrorTransaction = false;

                                // Make a copy of it so OJB doesn't just update the row in the original
                                // group. It needs to make a new one in the error group
                                OriginEntryFull errorEntry = new OriginEntryFull(te.transaction);
                                errorEntry.setTransactionScrubberOffsetGenerationIndicator(false);
                                createOutputEntry(GLEN_RECORD, OUTPUT_ERR_FILE_ps);
                                scrubberReport.incrementErrorRecordWritten();
                                unitOfWork.errorsFound = true;

                                handleTransactionError(te.transaction, te.message);
                            }
                            scrubCostShareAmount = KualiDecimal.ZERO;
                        }

                        lastEntry = scrubbedEntry;
                    }
                } else {
                    // Error transaction
                    saveErrorTransaction = true;
                    fatalErrorOccurred = true;
                }
                handleTransactionErrors(OriginEntryFull.copyFromOriginEntryable(unscrubbedEntry),
                        transactionErrors);

                if (saveValidTransaction) {
                    scrubbedEntry.setTransactionScrubberOffsetGenerationIndicator(false);
                    createOutputEntry(scrubbedEntry, OUTPUT_GLE_FILE_ps);
                    scrubberReport.incrementScrubbedRecordWritten();
                }

                if (saveErrorTransaction) {
                    // Make a copy of it so OJB doesn't just update the row in the original
                    // group. It needs to make a new one in the error group
                    OriginEntryFull errorEntry = OriginEntryFull.copyFromOriginEntryable(unscrubbedEntry);
                    errorEntry.setTransactionScrubberOffsetGenerationIndicator(false);
                    createOutputEntry(GLEN_RECORD, OUTPUT_ERR_FILE_ps);
                    scrubberReport.incrementErrorRecordWritten();
                    if (!fatalErrorOccurred) {
                        // if a fatal error occurred, the creation of a new unit of work was by-passed;
                        // therefore, it shouldn't ruin the previous unit of work
                        unitOfWork.errorsFound = true;
                    }
                }
            }
        }

        if (!collectorMode) {
            // Generate last offset (if necessary)
            generateOffset(lastEntry, scrubberReport);
        }

        INPUT_GLE_FILE_br.close();
        INPUT_GLE_FILE.close();
        OUTPUT_GLE_FILE_ps.close();
        OUTPUT_ERR_FILE_ps.close();
        OUTPUT_EXP_FILE_ps.close();
        LOG.info("Successfully writen and closed " + validFile + ", " + errorFile + ", " + expiredFile + ".");

        handleEndOfScrubberReport(scrubberReport);

        if (!collectorMode) {
            ledgerSummaryReport.writeReport(this.scrubberLedgerReportWriterService);
        }
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}