Example usage for java.lang System gc

List of usage examples for java.lang System gc

Introduction

In this page you can find the example usage for java.lang System gc.

Prototype

public static void gc() 

Source Link

Document

Runs the garbage collector in the Java Virtual Machine.

Usage

From source file:MSUmpire.PeptidePeakClusterDetection.PDHandlerBase.java

protected void FindAllMzTracePeakCurves(ScanCollection scanCollection) throws IOException {
    //        final HashSet<String> IncludedHashMap = new HashSet<>();

    //        Logger.getRootLogger().info("Processing all scans to detect possible m/z peak curves....");
    Logger.getRootLogger().info("Processing all scans to detect possible m/z peak curves and");
    Logger.getRootLogger().info("Smoothing detected signals......");
    float preRT = 0f;

    //Loop for each scan in the ScanCollection
    final ArrayList<ForkJoinTask<ArrayList<PeakCurve>>> ftemp = new ArrayList<>();
    final ForkJoinPool fjp = new ForkJoinPool(NoCPUs);
    final int idx_end = scanCollection.GetScanNoArray(MSlevel).size();

    final int[] ia = new int[idx_end + 1];
    ia[0] = 0;/* www  .j  a v  a 2s  .c o  m*/
    for (int idx = 0; idx < idx_end; idx++) {
        final int scanNO = scanCollection.GetScanNoArray(MSlevel).get(idx);
        final ScanData sd = scanCollection.GetScan(scanNO);
        ia[idx + 1] = sd.Data.size() + ia[idx];
    }

    final boolean[] included = new boolean[ia[ia.length - 1]];
    if (step == -1)
        step = fjp.getParallelism() * 32;
    long peakCurvesCount = 0;
    for (int idx = 0; idx < idx_end; idx++) {
        int scanNO = scanCollection.GetScanNoArray(MSlevel).get(idx);
        ScanData scanData = scanCollection.GetScan(scanNO);

        //If we are doing targeted peak detection and the RT of current scan is not in the range of targeted list, jump to the next scan 
        if (TargetedOnly && !FoundInInclusionRTList(scanData.RetentionTime)) {
            continue;
        }
        if (idx == 0) {
            preRT = scanData.RetentionTime - 0.01f;
        }
        for (int i = 0; i < scanData.PointCount(); i++) {
            XYData peak = scanData.Data.get(i);
            //If we are doing targeted peak detection and the RT and m/z of current peak is not in the range of targeted list, jump to the next peak 
            if (TargetedOnly && !FoundInInclusionMZList(scanData.RetentionTime, peak.getX())) {
                continue;
            }

            if (peak.getX() < parameter.MinMZ) {
                continue;
            }

            //Check if the current peak has been included in previously developed peak curves
            //                if (!IncludedHashMap.contains(scanNO + "_" + peak.getX())) {//The peak hasn't been included
            final int id_scanNO_peak = int_id(ia, idx, i);
            if (!included[id_scanNO_peak]) {//The peak hasn't been included
                //The current peak will be the starting peak of a new peak curve
                //Add it to the hash table

                //                    IncludedHashMap.add(scanNO + "_" + peak.getX());
                included[id_scanNO_peak] = true;

                float startmz = peak.getX();
                float startint = peak.getY();

                //Find the maximum peak within PPM window as the starting peak
                for (int j = i + 1; j < scanData.PointCount(); j++) {
                    XYData currentpeak = scanData.Data.get(j);
                    final int id_scanNO_currentpeak = int_id(ia, idx, j);
                    if (!included[id_scanNO_currentpeak]) {
                        //                        if (!IncludedHashMap.contains(scanNO + "_" + currentpeak.getX())) {
                        if (InstrumentParameter.CalcPPM(currentpeak.getX(), startmz) <= PPM) {
                            included[id_scanNO_currentpeak] = true;
                            //                                IncludedHashMap.add(scanNO + "_" + currentpeak.getX());

                            if (currentpeak.getY() >= startint) {
                                startmz = currentpeak.getX();
                                startint = currentpeak.getY();
                            }
                        } else {
                            break;
                        }
                    }
                }

                //Initialize a new peak curve
                PeakCurve Peakcurve = new PeakCurve(parameter);
                //Add a background peak
                Peakcurve.AddPeak(preRT, startmz, scanData.background);
                //Add the starting peak
                Peakcurve.AddPeak(scanData.RetentionTime, startmz, startint);
                Peakcurve.StartScan = scanNO;

                int missedScan = 0;
                float endrt = scanData.RetentionTime;
                int endScan = scanData.ScanNum;
                float bk = 0f;

                //Starting from the next scan, find the following peaks given the starting peak
                for (int idx2 = idx + 1; idx2 < scanCollection.GetScanNoArray(MSlevel).size()
                        && (missedScan < parameter.NoMissedScan /*|| (TargetedOnly && Peakcurve.RTWidth()<parameter.MaxCurveRTRange)*/); idx2++) {
                    int scanNO2 = scanCollection.GetScanNoArray(MSlevel).get(idx2);
                    ScanData scanData2 = scanCollection.GetScan(scanNO2);

                    endrt = scanData2.RetentionTime;
                    endScan = scanData2.ScanNum;
                    bk = scanData2.background;
                    float currentmz = 0f;
                    float currentint = 0f;

                    //If the scan is empty
                    if (scanData2.PointCount() == 0) {
                        if (parameter.FillGapByBK) {
                            Peakcurve.AddPeak(scanData2.RetentionTime, Peakcurve.TargetMz,
                                    scanData2.background);
                        }
                        missedScan++;
                        continue;
                    }

                    //Find the m/z index 
                    int mzidx = scanData2.GetLowerIndexOfX(Peakcurve.TargetMz);
                    for (int pkidx = mzidx; pkidx < scanData2.Data.size(); pkidx++) {
                        XYData currentpeak = scanData2.Data.get(pkidx);
                        if (currentpeak.getX() < parameter.MinMZ) {
                            continue;
                        }
                        //Check if the peak has been included or not
                        final int int_id_scanNO2_currentpeak = int_id(ia, idx2, pkidx);
                        //                            if (!included.get(int_id_scanNO2_currentpeak)) {
                        if (!included[int_id_scanNO2_currentpeak]) {
                            if (InstrumentParameter.CalcPPM(currentpeak.getX(), Peakcurve.TargetMz) > PPM) {
                                if (currentpeak.getX() > Peakcurve.TargetMz) {
                                    break;
                                }
                            } else {
                                //////////The peak is in the ppm window, select the highest peak
                                included[int_id_scanNO2_currentpeak] = true;
                                //                                    IncludedHashMap.add(scanNO2 + "_" + currentpeak.getX());
                                if (currentint < currentpeak.getY()) {
                                    currentmz = currentpeak.getX();
                                    currentint = currentpeak.getY();
                                }
                            }
                        }
                    }

                    //No peak in the PPM window has been found
                    if (currentmz == 0f) {
                        if (parameter.FillGapByBK) {
                            Peakcurve.AddPeak(scanData2.RetentionTime, Peakcurve.TargetMz,
                                    scanData2.background);
                        }
                        missedScan++;
                    } else {
                        missedScan = 0;
                        Peakcurve.AddPeak(scanData2.RetentionTime, currentmz, currentint);
                    }
                }
                Peakcurve.AddPeak(endrt, Peakcurve.TargetMz, bk);
                Peakcurve.EndScan = endScan;

                //First check if the peak curve is in targeted list
                if (FoundInInclusionList(Peakcurve.TargetMz, Peakcurve.StartRT(), Peakcurve.EndRT())) {
                    //                        LCMSPeakBase.UnSortedPeakCurves.add(Peakcurve);
                    ++peakCurvesCount;
                    ftemp.add(fjp.submit(new PeakCurveSmoothingUnit(Peakcurve, parameter)));
                    //Then check if the peak curve passes the criteria
                } else if (Peakcurve.GetRawSNR() > LCMSPeakBase.SNR
                        && Peakcurve.GetPeakList().size() >= parameter.MinPeakPerPeakCurve + 2) {
                    //                        LCMSPeakBase.UnSortedPeakCurves.add(Peakcurve);
                    ++peakCurvesCount;
                    ftemp.add(fjp.submit(new PeakCurveSmoothingUnit(Peakcurve, parameter)));
                } else {
                    Peakcurve = null;
                }
            }
        }
        preRT = scanData.RetentionTime;
        if (ReleaseScans) {
            scanData.dispose();
        }
        /** the if statement below does PeakCurveSmoothing() and ClearRawPeaks()
         */
        final boolean last_iter = idx + 1 == idx_end;
        if (ftemp.size() == step || last_iter) {
            final List<ForkJoinTask<ArrayList<PeakCurve>>> ftemp_sublist_view = last_iter ? ftemp
                    : ftemp.subList(0, step / 2);
            for (final Future<ArrayList<PeakCurve>> f : ftemp_sublist_view) {
                try {
                    LCMSPeakBase.UnSortedPeakCurves.addAll(f.get());
                } catch (InterruptedException | ExecutionException e) {
                    throw new RuntimeException(e);
                }
            }
            ftemp_sublist_view.clear();
            if (!last_iter && fjp.getActiveThreadCount() < fjp.getParallelism()) {
                //                    System.out.println("PeakCurveSmoothingUnit: fjp.getActiveThreadCount()\t"+fjp.getActiveThreadCount()+"\t"+step);
                step *= 2;
            }
        }
    }
    assert ftemp.isEmpty();
    //System.out.print("PSM removed (PeakCurve generation):" + PSMRemoved );

    int i = 1;
    //Assign peak curve index
    for (PeakCurve peakCurve : LCMSPeakBase.UnSortedPeakCurves) {
        peakCurve.Index = i++;
    }

    System.gc();
    //        Logger.getRootLogger().info(LCMSPeakBase.UnSortedPeakCurves.size() + " Peak curves found (Memory usage:" + Math.round((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576) + "MB)");
    Logger.getRootLogger()
            .info(peakCurvesCount + " Peak curves found (Memory usage:"
                    + Math.round(
                            (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576)
                    + "MB)");
}

From source file:com.atlassian.jira.ComponentManager.java

private static void gc() {
    int count = 0;
    Object obj = new Object();
    WeakReference<Object> ref = new java.lang.ref.WeakReference<Object>(obj);

    //noinspection UnusedAssignment
    obj = null;//  w  w  w .j av  a 2  s .c  om

    // break after 100 attempts
    while (count < 10 && ref.get() != null) {
        count++;
        log.debug("Attempting to do a garbage collection:" + count);
        System.gc();
    }
}

From source file:com.web.server.WarDeployer.java

/**
 * This method is the implementation of the war deployer which frequently scans the deploy
 * directory and if there is a change in war redeploys and configures the map
 *//*from   www  .  j a va2 s . c o  m*/
public void run() {
    File file;
    ConcurrentHashMap filePrevMap = new ConcurrentHashMap();
    ConcurrentHashMap fileCurrMap = new ConcurrentHashMap();
    ;

    FileInfo filePrevLastModified;
    FileInfo fileCurrLastModified;
    String filePath;
    FileInfo fileinfoTmp;
    URLClassLoader loader = (URLClassLoader) ClassLoader.getSystemClassLoader();
    URL[] urls = loader.getURLs();
    warsDeployed = new CopyOnWriteArrayList();
    //System.out.println("URLS="+urls[0]);
    WebClassLoader customClassLoader;
    while (true) {
        file = new File(scanDirectory);
        File[] files = file.listFiles();
        for (int i = 0; i < files.length; i++) {
            if (files[i].isDirectory())
                continue;
            //Long lastModified=(Long) fileMap.get(files[i].getName());
            if (files[i].getName().endsWith(".war")) {
                filePath = files[i].getAbsolutePath();
                //logger.info("filePath"+filePath);
                filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".war"));
                File warDirectory = new File(filePath);
                fileinfoTmp = new FileInfo();
                fileinfoTmp.setFile(files[i]);
                fileinfoTmp.setLastModified(files[i].lastModified());
                if (!warDirectory.exists() || fileCurrMap.get(files[i].getName()) == null
                        && filePrevMap.get(files[i].getName()) == null) {
                    if (warDirectory.exists()) {
                        deleteDir(warDirectory);
                    }
                    customClassLoader = new WebClassLoader(urls);
                    synchronized (urlClassLoaderMap) {
                        logger.info("WARDIRECTORY=" + warDirectory.getAbsolutePath());
                        urlClassLoaderMap.put(warDirectory.getAbsolutePath().replace("\\", "/"),
                                customClassLoader);
                    }
                    extractWar(files[i], customClassLoader);
                    //System.out.println("War Deployed Successfully in path: "+filePath);
                    AddUrlToClassLoader(warDirectory, customClassLoader);
                    numberOfWarDeployed++;
                    logger.info(files[i] + " Deployed");
                    warsDeployed.add(files[i].getName());
                    filePrevMap.put(files[i].getName(), fileinfoTmp);
                }
                fileCurrMap.put(files[i].getName(), fileinfoTmp);
            }
            /*if(lastModified==null||lastModified!=files[i].lastModified()){
               fileMap.put(files[i].getName(),files[i].lastModified());
            }*/
        }
        Set keyset = fileCurrMap.keySet();
        Iterator ite = keyset.iterator();
        String fileName;
        while (ite.hasNext()) {
            fileName = (String) ite.next();
            //logger.info("fileName"+fileName);
            filePrevLastModified = (FileInfo) filePrevMap.get(fileName);
            fileCurrLastModified = (FileInfo) fileCurrMap.get(fileName);
            if (filePrevLastModified != null)
                //logger.info("lastmodified="+filePrevLastModified.getLastModified());
                //System.out.println("prevmodified"+fileCurrLastModified.getLastModified()+""+filePrevLastModified.getLastModified());
                if (fileCurrLastModified != null) {
                    //System.out.println("prevmodified"+fileCurrLastModified.getLastModified());
                }
            if (filePrevLastModified == null
                    || filePrevLastModified.getLastModified() != fileCurrLastModified.getLastModified()) {
                filePath = fileCurrLastModified.getFile().getAbsolutePath();
                //logger.info("filePath"+filePath);
                filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".war"));
                File warDirectory = new File(filePath);
                //logger.info("WARDIRECTORY="+warDirectory.getAbsolutePath());
                if (warDirectory.exists()) {
                    WebClassLoader webClassLoader = (WebClassLoader) urlClassLoaderMap
                            .get(warDirectory.getAbsolutePath().replace("\\", "/"));
                    synchronized (executorServiceMap) {
                        try {
                            new ExecutorServicesConstruct().removeExecutorServices(executorServiceMap,
                                    new File(warDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/"
                                            + "executorservices.xml"),
                                    webClassLoader);
                        } catch (Exception e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                        //System.out.println("executorServiceMap"+executorServiceMap);
                    }
                    synchronized (messagingClassMap) {
                        try {
                            new MessagingClassConstruct().removeMessagingClass(messagedigester,
                                    new File(warDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/"
                                            + "messagingclass.xml"),
                                    messagingClassMap);
                        } catch (Exception e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                        System.out.println("executorServiceMap" + executorServiceMap);
                    }
                    ClassLoaderUtil.cleanupJarFileFactory(ClassLoaderUtil.closeClassLoader(webClassLoader));
                    try {
                        webClassLoader.close();
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                    logger.info("ServletMapping" + servletMapping);
                    logger.info("warDirectory=" + warDirectory.getAbsolutePath().replace("\\", "/"));
                    urlClassLoaderMap.remove(warDirectory.getAbsolutePath().replace("\\", "/"));
                    WebAppConfig webAppConfig = (WebAppConfig) servletMapping
                            .remove(warDirectory.getAbsolutePath().replace("\\", "/"));
                    System.gc();
                    deleteDir(warDirectory);
                    warsDeployed.remove(fileName);
                    removeServletFromSessionObject(webAppConfig,
                            warDirectory.getAbsolutePath().replace("\\", "/"));
                    numberOfWarDeployed--;
                }
                customClassLoader = new WebClassLoader(urls);
                logger.info(customClassLoader);
                urlClassLoaderMap.put(warDirectory.getAbsolutePath().replace("\\", "/"), customClassLoader);
                extractWar(fileCurrLastModified.getFile(), customClassLoader);
                //System.out.println("War Deployed Successfully in path: "+fileCurrLastModified.getFile().getAbsolutePath());
                AddUrlToClassLoader(warDirectory, customClassLoader);
                numberOfWarDeployed++;
                warsDeployed.add(fileName);
                logger.info(filePath + ".war Deployed");
            }
        }
        keyset = filePrevMap.keySet();
        ite = keyset.iterator();
        while (ite.hasNext()) {
            fileName = (String) ite.next();
            filePrevLastModified = (FileInfo) filePrevMap.get(fileName);
            fileCurrLastModified = (FileInfo) fileCurrMap.get(fileName);
            if (fileCurrLastModified == null) {
                filePath = filePrevLastModified.getFile().getAbsolutePath();
                filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".war"));
                logger.info("filePath" + filePath);
                File deleteDirectory = new File(filePath);
                logger.info("Delete Directory" + deleteDirectory.getAbsolutePath().replace("\\", "/"));
                WebClassLoader webClassLoader = (WebClassLoader) urlClassLoaderMap
                        .get(deleteDirectory.getAbsolutePath().replace("\\", "/"));
                ;
                synchronized (executorServiceMap) {

                    try {
                        new ExecutorServicesConstruct().removeExecutorServices(executorServiceMap,
                                new File(deleteDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/"
                                        + "executorservices.xml"),
                                webClassLoader);
                    } catch (Exception e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                    //System.out.println("executorServiceMap"+executorServiceMap);
                }
                synchronized (messagingClassMap) {
                    try {
                        new MessagingClassConstruct().removeMessagingClass(messagedigester,
                                new File(deleteDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/"
                                        + "messagingclass.xml"),
                                messagingClassMap);
                    } catch (Exception e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                    //System.out.println("executorServiceMap"+executorServiceMap);
                }
                WebAppConfig webAppConfig = (WebAppConfig) servletMapping
                        .remove(deleteDirectory.getAbsolutePath().replace("\\", "/"));
                ClassLoaderUtil.cleanupJarFileFactory(ClassLoaderUtil.closeClassLoader(webClassLoader));
                urlClassLoaderMap.remove(deleteDirectory.getAbsolutePath().replace("\\", "/"));
                logger.info("ServletMapping" + servletMapping);
                logger.info("warDirectory=" + deleteDirectory.getAbsolutePath().replace("\\", "/"));
                try {
                    logger.info(webClassLoader);
                    logger.info("CLASSLOADER IS CLOSED");
                    webClassLoader.close();
                } catch (Throwable e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                System.gc();
                deleteDir(deleteDirectory);
                numberOfWarDeployed--;
                warsDeployed.remove(fileName);
                try {
                    removeServletFromSessionObject(webAppConfig,
                            deleteDirectory.getAbsolutePath().replace("\\", "/"));
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
                logger.info(filePath + ".war Undeployed");
            }
        }
        filePrevMap.keySet().removeAll(filePrevMap.keySet());
        filePrevMap.putAll(fileCurrMap);
        fileCurrMap.keySet().removeAll(fileCurrMap.keySet());
        //System.out.println("filePrevMap="+filePrevMap);
        //System.out.println("fileCurrMap="+fileCurrMap);
        try {
            Thread.sleep(3000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
}

From source file:de.julielab.jtbd.TokenizerApplication.java

/**
 * tokenize documents// www  .  j a  v a2  s.co  m
 *
 * @param inDir
 *            the directory with the documents to be tokenized
 * @param outDir
 *            the directory where the tokenized documents should be written
 *            to
 * @param modelFile
 *            the model to use for tokenization
 * @throws IOException 
 */
public static void doPrediction(final File inDir, final File outDir, final String modelFilename)
        throws IOException {

    final Tokenizer tokenizer = new Tokenizer();
    try {
        tokenizer.readModel(new File(modelFilename));
    } catch (final Exception e) {
        e.printStackTrace();
    }

    // get list of all files in directory
    final File[] predictOrgFiles = inDir.listFiles();

    // loop over all files
    for (final File predictOrgFile : predictOrgFiles) {
        final long start = System.currentTimeMillis();

        List<String> orgSentences = FileUtils.readLines(predictOrgFile, "utf-8");
        //readFile(predictOrgFile); //TODO erik fragen was er davon htl
        ArrayList<String> tokSentences = new ArrayList<String>();

        ArrayList<String> predictions = new ArrayList<String>();

        // force empty labels
        for (int j = 0; j < orgSentences.size(); j++)
            tokSentences.add("");

        // make prediction data
        InstanceList predData = tokenizer.makePredictionData(orgSentences, tokSentences);

        // predict
        for (int i = 0; i < predData.size(); i++) {
            final String orgSentence = orgSentences.get(i);
            final char lastChar = orgSentence.charAt(orgSentence.length() - 1);

            final Instance inst = predData.get(i);
            ArrayList<Unit> units = null;
            units = tokenizer.predict(inst);

            // ArrayList<Unit> units = (ArrayList) inst.getName();

            String sentence = "";

            for (int j = 0; j < units.size(); j++) {
                final String sp = (units.get(j).label.equals("P")) ? " " : "";
                sentence += units.get(j).rep + sp;
            }

            if (EOSSymbols.contains(lastChar))
                sentence += " " + lastChar;

            sentence = sentence.replaceAll(" +", " ");

            predictions.add(sentence);

        }

        // write predictions into file
        final String fName = predictOrgFile.toString();
        final String newfName = fName.substring(fName.lastIndexOf("/") + 1, fName.length());
        final File fNew = new File(outDir.toString() + "/" + newfName);
        writeFile(predictions, fNew);
        // System.out.println("\ntokenized sentences written to: " +
        // fNew.toString());

        // set all arraylists to null so that GC can get them
        orgSentences = null;
        tokSentences = null;
        predictions = null;
        predData = null;
        System.gc();

        final long stop = System.currentTimeMillis();
        System.out.println("took: " + (stop - start));
    } // out loop over files

    System.out.println("Tokenized texts written to: " + outDir.toString());

}

From source file:com.roche.sequencing.bioinformatics.common.utils.FileUtil.java

/**
 * Recursively delete a directory. If the first attempt fails due to an IOException, garbage collect, sleep, and try one more time to get around an issue with NFS volumes.
 * /*from  w ww  .ja v  a2  s .c  o m*/
 * @param directory
 *            directory to delete
 * @throws IOException
 *             in case deletion is unsuccessful
 */
public static void deleteDirectory(File directory) throws IOException {
    try {
        // Attempt to recursively delete directory
        FileUtils.deleteDirectory(directory);
    } catch (IOException e) {
        // Directories mounted on NFS volumes may have lingering .nfsXXXX
        // files
        // if no streams are open, it is likely from stale objects
        int totalAttempts = 5;
        for (int i = 0; i < totalAttempts; i++) {
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e1) {
            }
            System.gc();
            try {
                FileUtils.deleteDirectory(directory);
            } catch (IOException e1) {
                Logger logger = LoggerFactory.getLogger(FileUtil.class);
                if (i == totalAttempts - 1) {
                    logger.warn("Unable to delete directory[" + directory.getAbsolutePath() + "] on attempt "
                            + (i + 1) + ".  Will attempt deletion on exit.");
                    directory.deleteOnExit();
                } else {
                    logger.warn("Unable to delete directory[" + directory.getAbsolutePath() + "] on attempt "
                            + (i + 1) + ".  Will attempt deletion " + (totalAttempts - i - 1) + " more times.");
                    continue;
                }
            }
            break;
        }
    }
}

From source file:me.ububble.speakall.fragment.ConversationChatFragment.java

@Override
public void onResume() {
    super.onResume();
    System.gc();
    getActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
    if (!dontClose) {
        messageSelected = null;/* ww w .  j av a 2s. co m*/
        mediaPlayer = null;
        ((MainActivity) activity).changeBtnColor(R.id.ic_menu_chat);
        setIconBarContent();
        SpeakSocket.conversationChatFragment = this;
        initAdapter();
    }
    dontClose = false;
}

From source file:edu.ucla.cs.scai.canali.core.index.BuildIndex.java

private void loadTriples() throws Exception {
    HashMap<String, Integer> propertyFrequency = new HashMap<>();
    HashSet<String> shortProperties = new HashSet<>();
    if (minPropertyLength > 1) {
        System.out.println(// ww  w. j a va2s . c  om
                "Finding propertys to be ignored because they have lenght less than " + minPropertyLength);
        int i = 0;
        try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "property_labels"))) {
            String l;
            while ((l = in.readLine()) != null) {
                i++;
                if (l.length() > 0) {
                    try {
                        StringTokenizer st = new StringTokenizer(l, "\t<> ");
                        String uri = st.nextToken().trim();
                        if (uri.startsWith("http")) {
                            String label = st.hasMoreTokens() ? st.nextToken().trim() : "";
                            if (label.length() < minPropertyLength && !shortProperties.contains(uri)) {
                                shortProperties.add(uri);
                                System.out
                                        .println("Property " + uri + " will be ignored, having label " + label);
                                propertyFrequency.put(uri, 0);
                            }
                        }
                    } catch (Exception e) {
                        System.out.println("Error at line " + i + ": " + l);
                        e.printStackTrace();
                    }
                }
            }
        }
        System.out.println(shortProperties.size() + " propertys will be ignored, having lenght less than "
                + minPropertyLength);
    }
    int maxNumberOfProperties = 100000;
    System.out.println("Finding the the " + maxNumberOfProperties
            + " most frequent propertys of the propertys whose label has at least two characters");
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            if (l.contains("classDegree")) {
                System.out.print("");
            }
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String subject = st.nextToken();
            String property = st.nextToken();
            String value = st.nextToken();
            if (subject.startsWith("http") && property.startsWith("http")
                    && !shortProperties.contains(property)) {
                if (value.startsWith("http") || value.startsWith("ftp:")) { //it is an entity
                    Integer c = propertyFrequency.get(property);
                    if (c == null) {
                        propertyFrequency.put(property, 1);
                    } else {
                        propertyFrequency.put(property, 1 + c);
                    }
                } else { //it is a literal
                    if (value.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            Integer c = propertyFrequency.get(property);
                            if (c == null) {
                                propertyFrequency.put(property, 1);
                            } else {
                                propertyFrequency.put(property, 1 + c);
                            }
                        } else {
                            System.out.println("Basic type not recognized in " + l);
                        }
                    } else {
                        if (value.startsWith("\"")) { //it is a String
                            Integer c = propertyFrequency.get(property);
                            if (c == null) {
                                propertyFrequency.put(property, 1);
                            } else {
                                propertyFrequency.put(property, 1 + c);
                            }
                        } else {
                            System.out.println("Basic type not recognized in " + l);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Scanned " + (n / 1000000) + "M triples");
                }
            } else {
                //System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    shortProperties = null;
    System.gc();
    ArrayList<Map.Entry<String, Integer>> f = new ArrayList<>(propertyFrequency.entrySet());
    Collections.sort(f, new Comparator<Map.Entry<String, Integer>>() {
        @Override
        public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
            return Integer.compare(o2.getValue(), o1.getValue());
        }
    });
    int minFreq = 1;
    if (f.size() > maxNumberOfProperties) {
        minFreq = f.get(maxNumberOfProperties - 1).getValue();
        if (f.get(maxNumberOfProperties).equals(f.get(maxNumberOfProperties - 1))) {
            minFreq++;
        }
    }
    for (Map.Entry<String, Integer> e : f) {
        System.out.println(e.getKey() + "\t" + e.getValue());
    }
    System.out.println("Keeping propertys with at least " + minFreq + " occurrences");
    HashSet<String> acceptedProperties = new HashSet<>();
    for (Map.Entry<String, Integer> e : propertyFrequency.entrySet()) {
        if (e.getValue() >= minFreq) {
            acceptedProperties.add(e.getKey());
        }
    }
    System.out.println(acceptedProperties.size() + " propertys kept over " + f.size());
    f = null;
    propertyFrequency = null;
    System.gc();
    System.out.println("Mapping entities and property URIs to ids");
    int nEntityTriples = 0;
    HashMap<String, Integer> nLiteralTriples = new HashMap<>();
    for (String type : literalTypes) {
        nLiteralTriples.put(type, 0);
    }
    HashSet<String> unrecognizedBasicTypes = new HashSet<>();
    //count entity-valued and literal-valued triples
    //and
    //create the association between uris and ids for entities        
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String subject = st.nextToken();
            String property = st.nextToken();
            if (!acceptedProperties.contains(property)) {
                l = in.readLine();
                continue;
            }
            String value = st.nextToken();
            if (subject.startsWith("http") && property.startsWith("http")) {
                Integer idSbj = getEntityIdFromUri(subject); //entityIdFromUri.get(subject);
                if (idSbj == null) {
                    idSbj = entityIdFromUriWithPrefix.size() + 1;//entityIdFromUri.size() + 1;
                    putEntityIdFromUri(subject, idSbj); //entityIdFromUri.put(subject, idSbj);
                }
                Integer idAttr = propertyIdFromUri.get(property);
                if (idAttr == null) {
                    idAttr = propertyIdFromUri.size() + 1;
                    propertyIdFromUri.put(property, idAttr);
                }
                if (value.startsWith("http") || value.startsWith("ftp:")) { //it is an entity
                    Integer idVal = getEntityIdFromUri(value); //entityIdFromUri.get(value);
                    if (idVal == null) {
                        idVal = entityIdFromUriWithPrefix.size() + 1;//entityIdFromUri.size() + 1;
                        putEntityIdFromUri(value, idVal);//entityIdFromUri.put(value, idVal);
                    }
                    Integer idInvAttr = propertyIdFromUri.get(property + "Inv");
                    if (idInvAttr == null) {
                        idInvAttr = propertyIdFromUri.size() + 1;
                        propertyIdFromUri.put(property + "Inv", idInvAttr);
                    }
                    nEntityTriples += 2;
                } else { //it is a literal
                    if (value.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            nLiteralTriples.put(literalType, nLiteralTriples.get(literalType) + 1);
                        } else {
                            if (!unrecognizedBasicTypes.contains(type)) {
                                System.out.println("Unrecognized type: " + type);
                                System.out.println("in line: " + l);
                                unrecognizedBasicTypes.add(type);
                            }
                        }
                    } else {
                        if (value.startsWith("\"")) { //it is a String
                            nLiteralTriples.put(STRING, nLiteralTriples.get(STRING) + 1);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Loaded " + (n / 1000000) + "M triples");
                }
            } else {
                System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    System.out.println("Number of triples with entity value: " + nEntityTriples);
    for (String type : literalTypes) {
        System.out.println("Number of triples with " + type + " value: " + nLiteralTriples.get(type));
    }
    entityTriplesSubjects = new int[nEntityTriples];
    entityTriplesProperties = new int[nEntityTriples];
    entityTriplesValues = new int[nEntityTriples];
    for (String type : literalTypes) {
        literalTriplesSubjects.put(type, new int[nLiteralTriples.get(type)]);
        literalTriplesProperties.put(type, new int[nLiteralTriples.get(type)]);
    }
    //load the triples into the arrays creaded above
    System.out.println("Loading triples");
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String sbj = st.nextToken();
            String attr = st.nextToken();
            if (!acceptedProperties.contains(attr)) {
                l = in.readLine();
                continue;
            }
            String val = st.nextToken();
            if (sbj.startsWith("http") && attr.startsWith("http")) {
                if (val.startsWith("http") || val.startsWith("ftp:")) { //it is an entity
                    updateTriples(sbj, attr, val, null);
                } else { //it is a literal
                    if (val.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            updateTriples(sbj, attr, null, literalType);
                        } else {
                            if (!unrecognizedBasicTypes.contains(type)) {
                                System.out.println("Unrecognized type: " + type);
                                System.out.println("in line: " + l);
                                unrecognizedBasicTypes.add(type);
                            }
                        }
                    } else {
                        if (val.startsWith("\"")) { //it is a String
                            updateTriples(sbj, attr, null, STRING);
                        } else {
                            System.out.println("Unexpected line: " + l);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Loaded " + (n / 1000000) + "M triples");
                }
            } else {
                System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    System.out.println("Entity value triples: " + entityTriplesSubjects.length);
    for (String type : literalTriplesSubjects.keySet()) {
        System.out.println(type + " value triples: " + literalTriplesSubjects.get(type).length);
    }
    propertyUri = new String[propertyIdFromUri.size() + 1];
    for (Map.Entry<String, Integer> e : propertyIdFromUri.entrySet()) {
        propertyUri[e.getValue()] = e.getKey();
    }
    entityUriWithPrefix = new String[entityIdFromUriWithPrefix.size() + 1];
    for (Map.Entry<String, Integer> e : entityIdFromUriWithPrefix.entrySet()) {
        entityUriWithPrefix[e.getValue()] = e.getKey();
    }
    //entityUri = new String[entityIdFromUri.size() + 1];
    //for (Map.Entry<String, Integer> e : entityIdFromUri.entrySet()) {
    //    entityUri[e.getValue()] = e.getKey();
    //}
    entityLabels = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityLabels = new HashSet[entityIdFromUri.size() + 1];
    entityClasses = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityClasses = new HashSet[entityIdFromUri.size() + 1];
    propertyLabels = new HashSet[propertyIdFromUri.size() + 1];
    entityOutProperties = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityOutProperties = new HashSet[entityIdFromUri.size() + 1];
    entityInProperties = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityInProperties = new HashSet[entityIdFromUri.size() + 1];
    propertyOutProperties = new HashSet[propertyIdFromUri.size() + 1];
    propertyInProperties = new HashSet[propertyIdFromUri.size() + 1];
    propertyHasLiteralRange = new boolean[propertyIdFromUri.size() + 1];
    propertyCount = new int[propertyIdFromUri.size() + 1];
}

From source file:eu.planets_project.pp.plato.action.workflow.DefineSampleRecordsAction.java

/**
 * Uploads a file into a newly created sample record and adds this sample
 * record to the list in the project./* w  ww .j a  va  2  s  .c o m*/
 *
 * @return always returns null
 */
public String upload() {
    if (!sampleRecordToUpload.isDataExistent()) {

        log.debug("No file for upload selected.");
        FacesMessages.instance().add(FacesMessage.SEVERITY_ERROR,
                "You have to select a file before starting upload.");

        return null;
    }

    changed = "true";
    SampleObject record = new SampleObject();
    String fullName = new File(sampleRecordToUpload.getFullname()).getName();
    record.setFullname(fullName);
    record.setShortName(record.getFullname().substring(0, Math.min(20, record.getFullname().length())));
    record.setContentType(sampleRecordToUpload.getContentType());
    selectedPlan.getSampleRecordsDefinition().addRecord(record);

    writeTempFile(sampleRecordToUpload, record);

    // identify format of newly uploaded records
    if (shouldCharacterise(record)) {
        identifyFormat(record);
        //              describeInXcdl(record);
        characteriseFits(record);
    }

    // need to initialize jhove tree by upload of a new sample record
    if (record.getJhoveXMLString() == null || "".equals(record.getJhoveXMLString())) {
        record.setJhoveXMLString(jHoveAdaptor.describe(tempDigitalObjects.get(record)));
    }

    log.debug("Content-Type: " + sampleRecordToUpload.getContentType());
    log.debug("Size of Records Array: " + selectedPlan.getSampleRecordsDefinition().getRecords().size());
    log.debug("FileName: " + sampleRecordToUpload.getFullname());
    log.debug("Length of File: " + sampleRecordToUpload.getData().getSize());
    log.debug("added SampleObject: " + record.getFullname());
    log.debug("JHove initialized: " + (record.getJhoveXMLString() != null));

    sampleRecordToUpload.setData(new ByteStream());

    System.gc();

    return null;
}

From source file:ca.uviccscu.lp.server.main.ShutdownListener.java

@Deprecated
public void threadCleanup(File f) {
    while (!deleteFolder(f, false, 0, 0)) {
        l.error("Trying to stop more threads, list:");
        //List remaining threads
        ThreadGroup tg2 = Thread.currentThread().getThreadGroup();
        while (tg2.getParent() != null) {
            tg2 = tg2.getParent();//from w  ww .j  a  v  a2  s  .  com
        }
        //Object o = new Object();
        //o.notifyAll();
        Thread[] threads = new Thread[tg2.activeCount() + 1024];
        tg2.enumerate(threads, true);
        //VERY BAD WAY TO STOP THREAD BUT NO CHOICE - need to release the file locks
        for (int i = 0; i < threads.length; i++) {
            Thread th = threads[i];
            if (th != null) {
                l.trace("Have thread: " + i + " : " + th.getName());
                if (th != null && th != Thread.currentThread()
                        && (AEThread2.isOurThread(th) || isAzThread(th))) {
                    l.trace("Suspending " + th.getName());
                    try {
                        th.suspend();
                        l.trace("ok");
                    } catch (SecurityException e) {
                        l.trace("Stop vetoed by SM", e);
                    }

                }
            }
        }
        for (int i = 0; i < threads.length; i++) {
            Thread th = threads[i];
            if (th != null) {
                l.trace("Have thread: " + i + " : " + th.getName());
                if (th != null && th != Thread.currentThread()
                        && (AEThread2.isOurThread(th) || isAzThread(th))) {
                    l.trace("Stopping " + th.getName());
                    try {
                        th.stop();
                        l.trace("ok");
                    } catch (SecurityException e) {
                        l.trace("Stop vetoed by SM", e);
                    }

                }
            }
        }
    }
    System.gc();
}

From source file:com.life.wuhan.util.ImageDownloader.java

public synchronized Bitmap getBitmapFromCacheFile(String url) {
    String hashedUrl = getMd5(url);
    FileInputStream fis = null;//w ww .j a v a2 s  . c o m

    try {
        fis = mContext.openFileInput(hashedUrl);
        return BitmapFactory.decodeStream(fis);
    } catch (Exception e) {
        // Not there.
        return null;
    } catch (OutOfMemoryError e) {
        // Not there.
        System.gc();
        clearCache();
        return null;
    } finally {
        if (fis != null) {
            try {
                fis.close();
            } catch (IOException e) {
                // Ignore.
            }
        }
    }
}