List of usage examples for java.util.concurrent ExecutorService awaitTermination
boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException;
From source file:ca.zadrox.dota2esportticker.service.UpdateTeamsService.java
private void updateTopTeams() { LOGD(TAG, "starting update"); // actually, first, check for connectivity: if (!checkForConnectivity()) { LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_NO_CONNECTIVITY)); LOGD(TAG, "returning due to no connectivity"); return;/*from w ww.j a va 2 s . c o m*/ } // first, check last update time long lastUpdate = PrefUtils.lastTeamsUpdate(this); long currentTime = TimeUtils.getUTCTime(); // if last update is less than 1 hour old, boot user to cursorloader op. if (currentTime - lastUpdate < 60000 * 60) { LOGD(TAG, "returnning due to too soon"); LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_COMPLETED)); return; } // else // use local broadcast manager to show loading indicator LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_UPDATING)); final String BASE_URL = "http://www.gosugamers.net/dota2/rankings"; final String TEAM_LINK_BASE_URL = "http://www.gosugamers.net/dota2/teams/"; // we see what teams are in top 50. (httpreq -> gosugamers) try { String rawHtml = new OkHttpClient().newCall(new Request.Builder().url(BASE_URL).build()).execute() .body().string(); String processedHtml = rawHtml.substring(rawHtml.indexOf("<div id=\"col1\" class=\"rows\">"), rawHtml.indexOf("<div id=\"col2\" class=\"rows\">")); Elements teamRows = Jsoup.parse(processedHtml).getElementsByClass("ranking-link"); ExecutorService executorService = Executors.newFixedThreadPool(10); ContentValues[] teamRanks = new ContentValues[50]; HashMap<ContentValues, Future<String>> newTeamInfo = new HashMap<ContentValues, Future<String>>(); HashMap<ContentValues, Future<String>> updateTeamInfo = new HashMap<ContentValues, Future<String>>(); int i = 0; for (Element teamRow : teamRows) { ContentValues contentValues = new ContentValues(); String teamId = teamRow.attr("data-id"); contentValues.put(MatchContract.TeamEntry._ID, teamId); String untrimmedTeamName = teamRow.getElementsByTag("h4").first().text(); String teamUrl = TEAM_LINK_BASE_URL + teamId + "-" + untrimmedTeamName.replaceAll("[\\W]?[\\W][\\W]*", "-").toLowerCase(); contentValues.put(MatchContract.TeamEntry.COLUMN_TEAM_URL, teamUrl); String teamName = untrimmedTeamName.replaceAll(" ?\\.?\\-?-?Dot[aA][\\s]?2", ""); contentValues.put(MatchContract.TeamEntry.COLUMN_TEAM_NAME, teamName); if (teamUrl.charAt(teamUrl.length() - 1) == '-') { teamUrl = teamUrl.substring(0, teamUrl.length() - 2); } // then, we query db for id of the team ( Cursor cursor = getContentResolver().query( MatchContract.TeamEntry.buildTeamUri(Long.parseLong(teamId)), new String[] { MatchContract.TeamEntry.COLUMN_TEAM_NAME, MatchContract.TeamEntry.COLUMN_TEAM_URL }, null, null, null); // -> if present, and data remains unchanged, continue. // -> if present, but data is changed, add to update queue. if (cursor.moveToFirst()) { LOGD(TAG, "Have team already?"); if (!cursor.getString(0).contentEquals(teamName) || !cursor.getString(1).contentEquals(teamUrl)) { LOGD(TAG, "Team has updated values."); updateTeamInfo.put(contentValues, executorService.submit(new TeamGetter(teamUrl))); } } // -> if not present, add to update queue. else { LOGD(TAG, "Do team update"); newTeamInfo.put(contentValues, executorService.submit(new TeamGetter(teamUrl))); } // LOGD(TAG, "\n" + // "data-id: " + teamId + "\n" + // "team-name: " + teamName + "\n" + // "team-url: " + teamUrl); teamRanks[i] = new ContentValues(); teamRanks[i].put(MatchContract.TeamRankEntry._ID, i + 1); teamRanks[i].put(MatchContract.TeamRankEntry.COLUMN_TEAM_ID, teamId); cursor.close(); i++; } executorService.shutdown(); executorService.awaitTermination(20, TimeUnit.SECONDS); for (ContentValues contentValues : newTeamInfo.keySet()) { try { String teamLogo = newTeamInfo.get(contentValues).get(); contentValues.put(MatchContract.TeamEntry.COLUMN_TEAM_LOGO_URL, teamLogo); } catch (ExecutionException e) { LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_ERROR)); e.printStackTrace(); } } for (ContentValues contentValues : updateTeamInfo.keySet()) { try { String teamLogo = updateTeamInfo.get(contentValues).get(); contentValues.put(MatchContract.TeamEntry.COLUMN_TEAM_LOGO_URL, teamLogo); String teamId = contentValues.getAsString(MatchContract.TeamEntry._ID); contentValues.remove(MatchContract.TeamEntry._ID); int updatedRows = getContentResolver().update(MatchContract.TeamEntry.CONTENT_URI, contentValues, MatchContract.TeamEntry.TABLE_NAME + "." + MatchContract.TeamEntry._ID + " = ?", new String[] { teamId }); LOGD(TAG, "updatedRows: " + updatedRows); } catch (ExecutionException e) { LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_ERROR)); e.printStackTrace(); } } getContentResolver().bulkInsert(MatchContract.TeamEntry.CONTENT_URI, newTeamInfo.keySet().toArray(new ContentValues[newTeamInfo.size()])); getContentResolver().bulkInsert(MatchContract.TeamRankEntry.CONTENT_URI, teamRanks); } catch (IOException e) { LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_ERROR)); e.printStackTrace(); } catch (InterruptedException e2) { LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_ERROR)); e2.printStackTrace(); } // String[] projection = new String[]{ // MatchContract.TeamEntry.TABLE_NAME + "." + MatchContract.TeamEntry._ID, // MatchContract.TeamEntry.COLUMN_TEAM_NAME, // MatchContract.TeamEntry.COLUMN_TEAM_URL, // MatchContract.TeamEntry.COLUMN_TEAM_LOGO_URL, // MatchContract.TeamEntry.COLUMN_TEAM_STARRED, // MatchContract.TeamRankEntry.TABLE_NAME + "." + MatchContract.TeamRankEntry._ID // }; // // String sortOrder = // MatchContract.TeamRankEntry.TABLE_NAME + "." + // MatchContract.TeamRankEntry._ID + " ASC"; // // Cursor c = getContentResolver().query( // MatchContract.TeamEntry.TOP_50_URI, // projection, // null, // null, // sortOrder // ); // // while (c.moveToNext()) { // String teamPrintOut = // "Rank: " + c.getInt(5) + "\n" + // "teamId: " + c.getInt(0) + " teamName: " + c.getString(1) + "\n" + // "teamUrl: " + c.getString(2) + "\n" + // "teamLogoUrl: " + c.getString(3) + "\n" + // "isFavourited: " + (c.getInt(4) == 0 ? "false" : "true"); // LOGD(TAG + "/UTT", teamPrintOut); // } // // c.close(); // use local broadcast manager to hide loading indicator // and signal that cursorloader for top50 can happen. PrefUtils.setLastTeamUpdate(this, currentTime); LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(STATUS_COMPLETED)); }
From source file:MSUmpire.DIA.DIAPack.java
public void GenerateRawMGF() throws IOException, Exception { if (RawMGFExist()) { return;// www. j a v a 2s . c o m } Logger.getRootLogger().info("Extracting pseudo MS/MS spectra with raw intensity"); HashMap<Integer, ArrayList<PseudoMSMSProcessing>> ScanList = new HashMap<>(); HashMap<String, PseudoMSMSProcessing> UnfragScanList = new HashMap<>(); parameter.BoostComplementaryIon = false; ExecutorService executorPool = Executors.newFixedThreadPool(NoCPUs); for (LCMSPeakDIAMS2 DIAwindow : DIAWindows) { DIAwindow.ReadPeakCluster(); DIAwindow.ReadPrecursorFragmentClu2Cur(); DIAwindow.BuildFragmentMS1ranking(); DIAwindow.FilterByCriteria(); DIAwindow.BuildFragmentUnfragranking(); DIAwindow.FilterByCriteriaUnfrag(); for (PeakCluster ms1cluster : MS1FeatureMap.PeakClusters) { if (DIAwindow.DIA_MZ_Range.getX() <= ms1cluster.GetMaxMz() && DIAwindow.DIA_MZ_Range.getY() >= ms1cluster.TargetMz() && DIAwindow.FragmentsClu2Cur.containsKey(ms1cluster.Index)) { DIAwindow.ExtractFragmentForPeakCluser(ms1cluster); if (DIAwindow.Last_MZ_Range == null || DIAwindow.Last_MZ_Range.getY() < ms1cluster.TargetMz()) { PseudoMSMSProcessing mSMSProcessing = new PseudoMSMSProcessing(ms1cluster, parameter); executorPool.execute(mSMSProcessing); if (!ScanList.containsKey(ms1cluster.Index)) { ScanList.put(ms1cluster.Index, new ArrayList<PseudoMSMSProcessing>()); } ScanList.get(ms1cluster.Index).add(mSMSProcessing); } } } for (PeakCluster ms2cluster : DIAwindow.PeakClusters) { if (DIAwindow.DIA_MZ_Range.getX() <= ms2cluster.TargetMz() && DIAwindow.DIA_MZ_Range.getY() >= ms2cluster.TargetMz() && DIAwindow.UnFragIonClu2Cur.containsKey(ms2cluster.Index)) { DIAwindow.ExtractFragmentForUnfragPeakCluser(ms2cluster); PseudoMSMSProcessing mSMSProcessing = new PseudoMSMSProcessing(ms2cluster, parameter); executorPool.execute(mSMSProcessing); UnfragScanList.put(DIAwindow.WindowID + ";" + ms2cluster.Index, mSMSProcessing); } } DIAwindow.ClearAllPeaks(); System.gc(); Logger.getRootLogger() .info("(Memory usage:" + Math.round( (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1048576) + "MB)"); } executorPool.shutdown(); try { executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { Logger.getRootLogger().info("interrupted.."); } ReadScanNoMapping(); String mgffile = GetSkylineFolder() + GetForLibQ1Name() + ".mgf"; FileWriter mgfWriter = new FileWriter(mgffile, false); for (final int ScanNo : new java.util.TreeSet<>(ScanClusterMap_Q1.keySet())) { int ClusterIndex = ScanClusterMap_Q1.get(ScanNo); XYPointCollection Scan = new XYPointCollection(); PseudoMSMSProcessing mSMSProcessing = null; for (PseudoMSMSProcessing MS2Processing : ScanList.get(ClusterIndex)) { mSMSProcessing = MS2Processing; for (PrecursorFragmentPairEdge fragmentClusterUnit : MS2Processing.fragments) { Scan.AddPointKeepMaxIfValueExisted(fragmentClusterUnit.FragmentMz, fragmentClusterUnit.Intensity); } } StringBuilder mgfString = new StringBuilder(); mgfString.append("BEGIN IONS\n"); mgfString.append("PEPMASS=" + mSMSProcessing.Precursorcluster.TargetMz() + "\n"); mgfString.append("CHARGE=" + mSMSProcessing.Precursorcluster.Charge + "+\n"); mgfString.append("RTINSECONDS=" + mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f + "\n"); mgfString.append("TITLE=ClusterIndex:" + mSMSProcessing.Precursorcluster.Index + "\n"); for (int i = 0; i < Scan.PointCount(); i++) { mgfString.append(Scan.Data.get(i).getX()).append(" ").append(Scan.Data.get(i).getY()).append("\n"); } mgfString.append("END IONS\n\n"); mgfWriter.write(mgfString.toString()); } mgfWriter.close(); //////////////////////////////////////////////////////////////////////////////// String mgffile2 = GetSkylineFolder() + GetForLibQ2Name() + ".mgf"; FileWriter mgfWriter2 = new FileWriter(mgffile2, false); for (final int ScanNo : new java.util.TreeSet<>(ScanClusterMap_Q2.keySet())) { int ClusterIndex = ScanClusterMap_Q2.get(ScanNo); XYPointCollection Scan = new XYPointCollection(); PseudoMSMSProcessing mSMSProcessing = null; for (PseudoMSMSProcessing MS2Processing : ScanList.get(ClusterIndex)) { mSMSProcessing = MS2Processing; for (PrecursorFragmentPairEdge fragmentClusterUnit : MS2Processing.fragments) { Scan.AddPointKeepMaxIfValueExisted(fragmentClusterUnit.FragmentMz, fragmentClusterUnit.Intensity); } } StringBuilder mgfString = new StringBuilder(); mgfString.append("BEGIN IONS\n"); mgfString.append("PEPMASS=" + mSMSProcessing.Precursorcluster.TargetMz() + "\n"); mgfString.append("CHARGE=" + mSMSProcessing.Precursorcluster.Charge + "+\n"); mgfString.append("RTINSECONDS=" + mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f + "\n"); mgfString.append("TITLE=ClusterIndex:" + mSMSProcessing.Precursorcluster.Index + "\n"); for (int i = 0; i < Scan.PointCount(); i++) { mgfString.append(Scan.Data.get(i).getX()).append(" ").append(Scan.Data.get(i).getY()).append("\n"); } mgfString.append("END IONS\n\n"); mgfWriter2.write(mgfString.toString()); } mgfWriter2.close(); //////////////////////////////// String mgffile3 = GetSkylineFolder() + GetForLibQ3Name() + ".mgf"; FileWriter mgfWriter3 = new FileWriter(mgffile3, false); mzXMLParser Q3mzxml = new mzXMLParser(FilenameUtils.getFullPath(Filename) + GetQ3Name() + ".mzXML", parameter, SpectralDataType.DataType.DDA, null, NoCPUs); Q3mzxml.GetAllScanCollectionByMSLabel(false, true, false, false); for (final int ScanNo : new java.util.TreeSet<>(ScanClusterMap_Q3.keySet())) { String key = ScanClusterMap_Q3.get(ScanNo); XYPointCollection Scan = new XYPointCollection(); PseudoMSMSProcessing mSMSProcessing = UnfragScanList.get(key); for (PrecursorFragmentPairEdge fragmentClusterUnit : mSMSProcessing.fragments) { Scan.AddPointKeepMaxIfValueExisted(fragmentClusterUnit.FragmentMz, fragmentClusterUnit.Intensity); } StringBuilder mgfString = new StringBuilder(); mgfString.append("BEGIN IONS\n"); mgfString.append("PEPMASS=" + mSMSProcessing.Precursorcluster.TargetMz() + "\n"); mgfString.append("CHARGE=" + mSMSProcessing.Precursorcluster.Charge + "+\n"); mgfString.append("RTINSECONDS=" + mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f + "\n"); mgfString.append("TITLE=ClusterIndex:" + mSMSProcessing.Precursorcluster.Index + "\n"); for (int i = 0; i < Scan.PointCount(); i++) { mgfString.append(Scan.Data.get(i).getX()).append(" ").append(Scan.Data.get(i).getY()).append("\n"); } mgfString.append("END IONS\n\n"); mgfWriter3.write(mgfString.toString()); } mgfWriter3.close(); }
From source file:org.apache.accumulo.server.gc.SimpleGarbageCollector.java
/** * This method attempts to do its best to remove files from the filesystem that have been confirmed for deletion. */// www. java 2s. c om private void deleteFiles(SortedSet<String> confirmedDeletes) { // create a batchwriter to remove the delete flags for successful // deletes BatchWriter writer = null; if (!offline) { Connector c; try { c = instance.getConnector(SecurityConstants.getSystemCredentials()); writer = c.createBatchWriter(Constants.METADATA_TABLE_NAME, 10000000, 60000l, 3); } catch (Exception e) { log.error("Unable to create writer to remove file from the !METADATA table", e); } } // when deleting a dir and all files in that dir, only need to delete the dir // the dir will sort right before the files... so remove the files in this case // to minimize namenode ops Iterator<String> cdIter = confirmedDeletes.iterator(); String lastDir = null; while (cdIter.hasNext()) { String delete = cdIter.next(); if (isDir(delete)) { lastDir = delete; } else if (lastDir != null) { if (delete.startsWith(lastDir)) { log.debug("Ignoring " + delete + " because " + lastDir + " exist"); Mutation m = new Mutation(new Text(Constants.METADATA_DELETE_FLAG_PREFIX + delete)); m.putDelete(EMPTY_TEXT, EMPTY_TEXT); try { writer.addMutation(m); } catch (MutationsRejectedException e) { throw new RuntimeException(e); } cdIter.remove(); } else { lastDir = null; } } } final BatchWriter finalWriter = writer; ExecutorService deleteThreadPool = Executors.newFixedThreadPool(numDeleteThreads); for (final String delete : confirmedDeletes) { Runnable deleteTask = new Runnable() { @Override public void run() { boolean removeFlag; log.debug("Deleting " + ServerConstants.getTablesDir() + delete); try { Path p = new Path(ServerConstants.getTablesDir() + delete); if (fs.delete(p, true)) { // delete succeeded, still want to delete removeFlag = true; synchronized (SimpleGarbageCollector.this) { ++status.current.deleted; } } else if (fs.exists(p)) { // leave the entry in the METADATA table; we'll try again // later removeFlag = false; synchronized (SimpleGarbageCollector.this) { ++status.current.errors; } log.warn("File exists, but was not deleted for an unknown reason: " + p); } else { // this failure, we still want to remove the METADATA table // entry removeFlag = true; synchronized (SimpleGarbageCollector.this) { ++status.current.errors; } String parts[] = delete.split("/"); if (parts.length > 1) { String tableId = parts[1]; TableManager.getInstance().updateTableStateCache(tableId); TableState tableState = TableManager.getInstance().getTableState(tableId); if (tableState != null && tableState != TableState.DELETING) log.warn("File doesn't exist: " + p); } else { log.warn("Very strange path name: " + delete); } } // proceed to clearing out the flags for successful deletes and // non-existent files if (removeFlag && finalWriter != null) { Mutation m = new Mutation(new Text(Constants.METADATA_DELETE_FLAG_PREFIX + delete)); m.putDelete(EMPTY_TEXT, EMPTY_TEXT); finalWriter.addMutation(m); } } catch (Exception e) { log.error(e, e); } } }; deleteThreadPool.execute(deleteTask); } deleteThreadPool.shutdown(); try { while (!deleteThreadPool.awaitTermination(1000, TimeUnit.MILLISECONDS)) { } } catch (InterruptedException e1) { log.error(e1, e1); } if (writer != null) { try { writer.close(); } catch (MutationsRejectedException e) { log.error("Problem removing entries from the metadata table: ", e); } } }
From source file:org.opencb.opencga.storage.hadoop.variant.HadoopVariantStorageEngine.java
@Override public List<StoragePipelineResult> index(List<URI> inputFiles, URI outdirUri, boolean doExtract, boolean doTransform, boolean doLoad) throws StorageEngineException { if (inputFiles.size() == 1 || !doLoad) { return super.index(inputFiles, outdirUri, doExtract, doTransform, doLoad); }//from ww w.ja v a2s . c o m final boolean doArchive; final boolean doMerge; if (!getOptions().containsKey(HADOOP_LOAD_ARCHIVE) && !getOptions().containsKey(HADOOP_LOAD_VARIANT)) { doArchive = true; doMerge = true; } else { doArchive = getOptions().getBoolean(HADOOP_LOAD_ARCHIVE, false); doMerge = getOptions().getBoolean(HADOOP_LOAD_VARIANT, false); } if (!doArchive && !doMerge) { return Collections.emptyList(); } final int nThreadArchive = getOptions().getInt(HADOOP_LOAD_ARCHIVE_BATCH_SIZE, 2); ObjectMap extraOptions = new ObjectMap().append(HADOOP_LOAD_ARCHIVE, true).append(HADOOP_LOAD_VARIANT, false); final List<StoragePipelineResult> concurrResult = new CopyOnWriteArrayList<>(); List<VariantStoragePipeline> etlList = new ArrayList<>(); ExecutorService executorService = Executors.newFixedThreadPool(nThreadArchive, r -> { Thread t = new Thread(r); t.setDaemon(true); return t; }); // Set Daemon for quick shutdown !!! LinkedList<Future<StoragePipelineResult>> futures = new LinkedList<>(); List<Integer> indexedFiles = new CopyOnWriteArrayList<>(); for (URI inputFile : inputFiles) { //Provide a connected storageETL if load is required. VariantStoragePipeline storageETL = newStorageETL(doLoad, new ObjectMap(extraOptions)); futures.add(executorService.submit(() -> { try { Thread.currentThread().setName(Paths.get(inputFile).getFileName().toString()); StoragePipelineResult storagePipelineResult = new StoragePipelineResult(inputFile); URI nextUri = inputFile; boolean error = false; if (doTransform) { try { nextUri = transformFile(storageETL, storagePipelineResult, concurrResult, nextUri, outdirUri); } catch (StoragePipelineException ignore) { //Ignore here. Errors are stored in the ETLResult error = true; } } if (doLoad && doArchive && !error) { try { loadFile(storageETL, storagePipelineResult, concurrResult, nextUri, outdirUri); } catch (StoragePipelineException ignore) { //Ignore here. Errors are stored in the ETLResult error = true; } } if (doLoad && !error) { // Read the VariantSource to get the original fileName (it may be different from the // nextUri.getFileName if this is the transformed file) String fileName = storageETL.readVariantSource(nextUri, null).getFileName(); // Get latest study configuration from DB, might have been changed since StudyConfiguration studyConfiguration = storageETL.getStudyConfiguration(); // Get file ID for the provided file name Integer fileId = studyConfiguration.getFileIds().get(fileName); indexedFiles.add(fileId); } return storagePipelineResult; } finally { try { storageETL.close(); } catch (StorageEngineException e) { logger.error("Issue closing DB connection ", e); } } })); } executorService.shutdown(); int errors = 0; try { while (!futures.isEmpty()) { executorService.awaitTermination(1, TimeUnit.MINUTES); // Check values if (futures.peek().isDone() || futures.peek().isCancelled()) { Future<StoragePipelineResult> first = futures.pop(); StoragePipelineResult result = first.get(1, TimeUnit.MINUTES); if (result.getTransformError() != null) { //TODO: Handle errors. Retry? errors++; result.getTransformError().printStackTrace(); } else if (result.getLoadError() != null) { //TODO: Handle errors. Retry? errors++; result.getLoadError().printStackTrace(); } concurrResult.add(result); } } if (errors > 0) { throw new StoragePipelineException("Errors found", concurrResult); } if (doLoad && doMerge) { int batchMergeSize = getOptions().getInt(HADOOP_LOAD_VARIANT_BATCH_SIZE, 10); // Overwrite default ID list with user provided IDs List<Integer> pendingFiles = indexedFiles; if (getOptions().containsKey(HADOOP_LOAD_VARIANT_PENDING_FILES)) { List<Integer> idList = getOptions().getAsIntegerList(HADOOP_LOAD_VARIANT_PENDING_FILES); if (!idList.isEmpty()) { // only if the list is not empty pendingFiles = idList; } } List<Integer> filesToMerge = new ArrayList<>(batchMergeSize); int i = 0; for (Iterator<Integer> iterator = pendingFiles.iterator(); iterator.hasNext(); i++) { Integer indexedFile = iterator.next(); filesToMerge.add(indexedFile); if (filesToMerge.size() == batchMergeSize || !iterator.hasNext()) { extraOptions = new ObjectMap().append(HADOOP_LOAD_ARCHIVE, false) .append(HADOOP_LOAD_VARIANT, true) .append(HADOOP_LOAD_VARIANT_PENDING_FILES, filesToMerge); AbstractHadoopVariantStoragePipeline localEtl = newStorageETL(doLoad, extraOptions); int studyId = getOptions().getInt(Options.STUDY_ID.key()); localEtl.preLoad(inputFiles.get(i), outdirUri); localEtl.merge(studyId, filesToMerge); localEtl.postLoad(inputFiles.get(i), outdirUri); filesToMerge.clear(); } } annotateLoadedFiles(outdirUri, inputFiles, concurrResult, getOptions()); calculateStatsForLoadedFiles(outdirUri, inputFiles, concurrResult, getOptions()); } } catch (InterruptedException e) { Thread.interrupted(); throw new StoragePipelineException("Interrupted!", e, concurrResult); } catch (ExecutionException e) { throw new StoragePipelineException("Execution exception!", e, concurrResult); } catch (TimeoutException e) { throw new StoragePipelineException("Timeout Exception", e, concurrResult); } finally { if (!executorService.isShutdown()) { try { executorService.shutdownNow(); } catch (Exception e) { logger.error("Problems shutting executer service down", e); } } } return concurrResult; }
From source file:org.codehaus.mojo.nbm.CreateWebstartAppMojo.java
/** * * @throws org.apache.maven.plugin.MojoExecutionException * @throws org.apache.maven.plugin.MojoFailureException *//*from www .ja v a 2 s. c o m*/ @Override public void execute() throws MojoExecutionException, MojoFailureException { if ("none".equalsIgnoreCase(includeLocales)) { includeLocales = ""; } if (signingThreads < 1) { signingThreads = Runtime.getRuntime().availableProcessors(); } if ((signingMaximumThreads > 0) && (signingThreads > signingMaximumThreads)) { signingThreads = signingMaximumThreads; } getLog().info("Using " + signingThreads + " signing threads."); if (!"nbm-application".equals(project.getPackaging())) { throw new MojoExecutionException( "This goal only makes sense on project with nbm-application packaging."); } final Project antProject = antProject(); getLog().warn( "WARNING: Unsigned and self-signed WebStart applications are deprecated from JDK7u21 onwards. To ensure future correct functionality please use trusted certificate."); if (keystore != null && keystorealias != null && keystorepassword != null) { File ks = new File(keystore); if (!ks.exists()) { throw new MojoFailureException("Cannot find keystore file at " + ks.getAbsolutePath()); } else { //proceed.. } } else if (keystore != null || keystorepassword != null || keystorealias != null) { throw new MojoFailureException( "If you want to sign the jnlp application, you need to define all three keystore related parameters."); } else { File generatedKeystore = new File(outputDirectory, "generated.keystore"); if (!generatedKeystore.exists()) { getLog().warn("Keystore related parameters not set, generating a default keystore."); GenerateKey genTask = (GenerateKey) antProject.createTask("genkey"); genTask.setAlias("jnlp"); genTask.setStorepass("netbeans"); genTask.setDname("CN=" + System.getProperty("user.name")); genTask.setKeystore(generatedKeystore.getAbsolutePath()); genTask.execute(); } keystore = generatedKeystore.getAbsolutePath(); keystorepassword = "netbeans"; keystorealias = "jnlp"; } Taskdef taskdef = (Taskdef) antProject.createTask("taskdef"); taskdef.setClassname(MakeJnlp2.class.getName()); taskdef.setName("makejnlp"); taskdef.execute(); taskdef = (Taskdef) antProject.createTask("taskdef"); taskdef.setClassname(Jar.class.getName()); taskdef.setName("jar"); taskdef.execute(); taskdef = (Taskdef) antProject.createTask("taskdef"); taskdef.setClassname(VerifyJNLP.class.getName()); taskdef.setName("verifyjnlp"); taskdef.execute(); // +p try { final File webstartBuildDir = new File( outputDirectory + File.separator + "webstart" + File.separator + brandingToken); if (webstartBuildDir.exists()) { FileUtils.deleteDirectory(webstartBuildDir); } webstartBuildDir.mkdirs(); // P: copy webappResources --[ MavenResourcesExecution mavenResourcesExecution = new MavenResourcesExecution(webappResources, webstartBuildDir, project, encoding, Collections.EMPTY_LIST, Collections.EMPTY_LIST, session); mavenResourcesExecution.setEscapeWindowsPaths(true); mavenResourcesFiltering.filterResources(mavenResourcesExecution); // ]-- final String localCodebase = codebase != null ? codebase : webstartBuildDir.toURI().toString(); getLog().info("Generating webstartable binaries at " + webstartBuildDir.getAbsolutePath()); final File nbmBuildDirFile = new File(outputDirectory, brandingToken); // +p (needs to be before make jnlp) //TODO is it really netbeans/ if (masterJnlpFileName == null) { masterJnlpFileName = brandingToken; } Properties props = new Properties(); props.setProperty("jnlp.codebase", localCodebase); props.setProperty("app.name", brandingToken); props.setProperty("app.title", project.getName()); if (project.getOrganization() != null) { props.setProperty("app.vendor", project.getOrganization().getName()); } else { props.setProperty("app.vendor", "Nobody"); } String description = project.getDescription() != null ? project.getDescription() : "No Project Description"; props.setProperty("app.description", description); props.setProperty("branding.token", brandingToken); props.setProperty("master.jnlp.file.name", masterJnlpFileName); props.setProperty("netbeans.jnlp.fixPolicy", "false"); StringBuilder stBuilder = new StringBuilder(); if (additionalArguments != null) { StringTokenizer st = new StringTokenizer(additionalArguments); while (st.hasMoreTokens()) { String arg = st.nextToken(); if (arg.startsWith("-J")) { if (stBuilder.length() > 0) { stBuilder.append(' '); } stBuilder.append(arg.substring(2)); } } } props.setProperty("netbeans.run.params", stBuilder.toString()); final File masterJnlp = new File(webstartBuildDir, masterJnlpFileName + ".jnlp"); filterCopy(masterJnlpFile, "master.jnlp", masterJnlp, props); if (generateJnlpTimestamp) // \/\/\/\/ bad bad bad \/\/\/\/ { final File masterJnlpFileTmp = File.createTempFile(masterJnlpFileName + "_", ""); Files.append(JnlpUtils.getCurrentJnlpTimestamp() + "\n", masterJnlpFileTmp, Charset.forName("UTF-8")); ByteSink sink = Files.asByteSink(masterJnlpFileTmp, FileWriteMode.APPEND); sink.write(Files.toByteArray(masterJnlp)); Files.copy(masterJnlpFileTmp, masterJnlp); } File startup = copyLauncher(outputDirectory, nbmBuildDirFile); String masterJnlpStr = FileUtils.fileRead(masterJnlp); // P: JNLP-INF/APPLICATION_TEMPLATE.JNLP support --[ // this can be done better and will // ashamed if (generateJnlpApplicationTemplate) { File jnlpInfDir = new File(outputDirectory, "JNLP-INF"); getLog().info("Generate JNLP application template under: " + jnlpInfDir); jnlpInfDir.mkdirs(); File jnlpTemplate = new File(jnlpInfDir, "APPLICATION_TEMPLATE.JNLP"); masterJnlpStr = masterJnlpStr.replaceAll("(<jnlp.*codebase\\ *=\\ *)\"((?!\").)*", "$1\"*") .replaceAll("(<jnlp.*href\\ *=\\ *)\"((?!\").)*", "$1\"*"); FileUtils.fileWrite(jnlpTemplate, masterJnlpStr); File startupMerged = new File(outputDirectory, "startup-jnlpinf.jar"); Jar jar = (Jar) antProject.createTask("jar"); jar.setDestFile(startupMerged); jar.setFilesetmanifest((FilesetManifestConfig) EnumeratedAttribute .getInstance(FilesetManifestConfig.class, "merge")); FileSet jnlpInfDirectoryFileSet = new FileSet(); jnlpInfDirectoryFileSet.setDir(outputDirectory); jnlpInfDirectoryFileSet.setIncludes("JNLP-INF/**"); jar.addFileset(jnlpInfDirectoryFileSet); ZipFileSet startupJar = new ZipFileSet(); startupJar.setSrc(startup); jar.addZipfileset(startupJar); jar.execute(); startup = startupMerged; getLog().info("APPLICATION_TEMPLATE.JNLP generated - startup.jar: " + startup); } final JarsConfig startupConfig = new JarsConfig(); ManifestEntries startupManifestEntries = new ManifestEntries(); startupConfig.setManifestEntries(startupManifestEntries); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); if (!validateJnlpDtd) { factory.setValidating(false); factory.setNamespaceAware(true); factory.setFeature("http://xml.org/sax/features/namespaces", false); factory.setFeature("http://xml.org/sax/features/validation", false); factory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false); factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); } DocumentBuilder builder = factory.newDocumentBuilder(); final BufferedReader masterJnlpStrReader = new BufferedReader(new StringReader(masterJnlpStr)); if (generateJnlpTimestamp) { masterJnlpStrReader.readLine(); } Document doc = builder.parse(new InputSource(masterJnlpStrReader)); Element jnlpRoot = doc.getDocumentElement(); jarCodebase = jnlpRoot.getAttribute("codebase"); if (jarCodebase.isEmpty()) { jarCodebase = "*"; } startupManifestEntries.setCodebase(jarCodebase); XPath xpath = XPathFactory.newInstance().newXPath(); Node jnlpSecurityPermission = (Node) xpath.evaluate( "(/jnlp/security/all-permissions | /jnlp/security/j2ee-application-client-permissions)[1]", doc, XPathConstants.NODE); if (jnlpSecurityPermission == null) { jarPermissions = "sandbox"; jnlpSecurity = ""; } else { jarPermissions = "all-permissions"; jnlpSecurity = "<security><" + jnlpSecurityPermission.getNodeName() + "/></security>"; } startupManifestEntries.setPermissions(jarPermissions); if (applicationName == null) { String jnlpApplicationTitle = (String) xpath.evaluate("/jnlp/information/title", doc, XPathConstants.STRING); applicationName = jnlpApplicationTitle == null ? brandingToken : jnlpApplicationTitle; } startupManifestEntries.setApplicationName(applicationName); // +p if (autoManifestSecurityEntries) { if (jarsConfigs == null) { jarsConfigs = new ArrayList<JarsConfig>(); } jarsConfigs.add(0, startupConfig); } final List<SignJar.JarsConfig> signJarJarsConfigs = buildSignJarJarsConfigs(jarsConfigs); File jnlpDestination = new File(webstartBuildDir.getAbsolutePath() + File.separator + "startup.jar"); SignJar signTask = (SignJar) antProject.createTask("signjar"); signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); if (keystoretype != null) { signTask.setStoretype(keystoretype); } signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setUnsignFirst(signingRemoveExistingSignatures); signTask.setJarsConfigs(buildSignJarJarsConfigs(Collections.singletonList(startupConfig))); signTask.setBasedir(nbmBuildDirFile); signTask.setSignedjar(jnlpDestination); signTask.setJar(startup); signTask.setPack200(pack200); signTask.setPack200Effort(pack200Effort); signTask.execute(); // <-- all of this will be refactored soon ]-- // FileUtils.copyDirectoryStructureIfModified( nbmBuildDirFile, webstartBuildDir ); MakeJnlp2 jnlpTask = (MakeJnlp2) antProject.createTask("makejnlp"); jnlpTask.setOptimize(optimize); jnlpTask.setIncludelocales(includeLocales); jnlpTask.setDir(webstartBuildDir); jnlpTask.setCodebase(localCodebase); //TODO, how to figure verify excludes.. jnlpTask.setVerify(false); jnlpTask.setPermissions(jnlpSecurity); jnlpTask.setSignJars(true); jnlpTask.setAlias(keystorealias); jnlpTask.setKeystore(keystore); jnlpTask.setStorePass(keystorepassword); if (keystoretype != null) { jnlpTask.setStoreType(keystoretype); } jnlpTask.setSigningForce(signingForce); jnlpTask.setSigningTsaCert(signingTsaCert); jnlpTask.setSigningTsaUrl(signingTsaUrl); jnlpTask.setUnsignFirst(signingRemoveExistingSignatures); jnlpTask.setJarsConfigs(signJarJarsConfigs); jnlpTask.setSigningMaxMemory(signingMaxMemory); jnlpTask.setSigningRetryCount(signingRetryCount); jnlpTask.setBasedir(nbmBuildDirFile); jnlpTask.setNbThreads(signingThreads); jnlpTask.setProcessJarVersions(processJarVersions); jnlpTask.setPack200(pack200); jnlpTask.setPack200Effort(pack200Effort); FileSet fs = jnlpTask.createModules(); fs.setDir(nbmBuildDirFile); OrSelector or = new OrSelector(); AndSelector and = new AndSelector(); FilenameSelector inc = new FilenameSelector(); inc.setName("*/modules/**/*.jar"); or.addFilename(inc); inc = new FilenameSelector(); inc.setName("*/lib/**/*.jar"); or.addFilename(inc); inc = new FilenameSelector(); inc.setName("*/core/**/*.jar"); or.addFilename(inc); ModuleSelector ms = new ModuleSelector(); Parameter included = new Parameter(); included.setName("includeClusters"); included.setValue(""); Parameter excluded = new Parameter(); excluded.setName("excludeClusters"); excluded.setValue(""); Parameter exModules = new Parameter(); exModules.setName("excludeModules"); exModules.setValue(""); ms.setParameters(new Parameter[] { included, excluded, exModules }); and.add(or); and.add(ms); fs.addAnd(and); jnlpTask.execute(); Set<String> locales = jnlpTask.getExecutedLocales(); String extSnippet = generateExtensions(fs, antProject, ""); // "netbeans/" //branding DirectoryScanner ds = new DirectoryScanner(); ds.setBasedir(nbmBuildDirFile); final List<String> localeIncludes = new ArrayList<String>(); final List<String> localeExcludes = new ArrayList<String>(); localeIncludes.add("**/locale/*.jar"); if (includeLocales != null) { List<String> excludes = Splitter.on(',').trimResults().omitEmptyStrings() .splitToList(includeLocales); for (String exclude : (Collection<String>) CollectionUtils.subtract(locales, excludes)) { localeExcludes.add("**/locale/*_" + exclude + ".jar"); } } ds.setIncludes(localeIncludes.toArray(new String[localeIncludes.size()])); ds.setExcludes(localeExcludes.toArray(new String[localeExcludes.size()])); ds.scan(); String[] includes = ds.getIncludedFiles(); StringBuilder brandRefs = new StringBuilder( "<property name=\"jnlp.packEnabled\" value=\"" + String.valueOf(pack200) + "\"/>\n"); if (includes != null && includes.length > 0) { final File brandingDir = new File(webstartBuildDir, "branding"); brandingDir.mkdirs(); for (String incBran : includes) { File source = new File(nbmBuildDirFile, incBran); File dest = new File(brandingDir, source.getName()); brandRefs.append(" <jar href=\'branding/").append(dest.getName()).append("\'/>\n"); } final ExecutorService executorService = Executors.newFixedThreadPool(signingThreads); final List<Exception> threadException = new ArrayList<Exception>(); for (final String toSign : includes) { executorService.execute(new Runnable() { @Override public void run() { try { File toSignFile = new File(nbmBuildDirFile, toSign); SignJar signTask = (SignJar) antProject.createTask("signjar"); if (keystoretype != null) { signTask.setStoretype(keystoretype); } signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setUnsignFirst(signingRemoveExistingSignatures); signTask.setJarsConfigs(signJarJarsConfigs); signTask.setJar(toSignFile); signTask.setDestDir(brandingDir); signTask.setBasedir(nbmBuildDirFile); signTask.setDestFlatten(true); signTask.setPack200(pack200); signTask.setPack200Effort(pack200Effort); signTask.execute(); } catch (Exception e) { threadException.add(e); } } }); } executorService.shutdown(); executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); if (!threadException.isEmpty()) { throw threadException.get(0); } } File modulesJnlp = new File(webstartBuildDir.getAbsolutePath() + File.separator + "modules.jnlp"); props.setProperty("jnlp.branding.jars", brandRefs.toString()); props.setProperty("jnlp.resources", extSnippet); filterCopy(null, /* filename is historical */"branding.jnlp", modulesJnlp, props); if (verifyJnlp) { getLog().info("Verifying generated webstartable content."); VerifyJNLP verifyTask = (VerifyJNLP) antProject.createTask("verifyjnlp"); FileSet verify = new FileSet(); verify.setFile(masterJnlp); verifyTask.addConfiguredFileset(verify); verifyTask.execute(); } // create zip archive if (destinationFile.exists()) { destinationFile.delete(); } ZipArchiver archiver = new ZipArchiver(); if (codebase != null) { getLog().warn("Defining <codebase>/${nbm.webstart.codebase} is generally unnecessary"); archiver.addDirectory(webstartBuildDir); } else { archiver.addDirectory(webstartBuildDir, null, new String[] { "**/*.jnlp" }); for (final File jnlp : webstartBuildDir.listFiles()) { if (!jnlp.getName().endsWith(".jnlp")) { continue; } archiver.addResource(new PlexusIoResource() { public @Override InputStream getContents() throws IOException { return new ByteArrayInputStream(FileUtils.fileRead(jnlp, "UTF-8") .replace(localCodebase, "$$codebase").getBytes("UTF-8")); } public @Override long getLastModified() { return jnlp.lastModified(); } public @Override boolean isExisting() { return true; } public @Override long getSize() { return UNKNOWN_RESOURCE_SIZE; } public @Override URL getURL() throws IOException { return null; } public @Override String getName() { return jnlp.getAbsolutePath(); } public @Override boolean isFile() { return true; } public @Override boolean isDirectory() { return false; } }, jnlp.getName(), archiver.getDefaultFileMode()); } } File jdkhome = new File(System.getProperty("java.home")); File servlet = new File(jdkhome, "sample/jnlp/servlet/jnlp-servlet.jar"); if (!servlet.exists()) { servlet = new File(jdkhome.getParentFile(), "sample/jnlp/servlet/jnlp-servlet.jar"); if (!servlet.exists()) { servlet = File.createTempFile("nbm_", "jnlp-servlet.jar"); FileUtils.copyURLToFile( Thread.currentThread().getContextClassLoader().getResource("jnlp-servlet.jar"), servlet); } } if (servlet.exists()) { File servletDir = new File(webstartBuildDir, "WEB-INF/lib"); servletDir.mkdirs(); signTask = (SignJar) antProject.createTask("signjar"); signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setJar(servlet); signTask.setSignedjar(new File(servletDir, "jnlp-servlet.jar")); signTask.execute(); //archiver.addFile( servlet, "WEB-INF/lib/jnlp-servlet.jar" ); archiver.addResource(new PlexusIoResource() { public @Override InputStream getContents() throws IOException { return new ByteArrayInputStream(("" + "<web-app>\n" + " <servlet>\n" + " <servlet-name>JnlpDownloadServlet</servlet-name>\n" + " <servlet-class>jnlp.sample.servlet.JnlpDownloadServlet</servlet-class>\n" + " </servlet>\n" + " <servlet-mapping>\n" + " <servlet-name>JnlpDownloadServlet</servlet-name>\n" + " <url-pattern>*.jnlp</url-pattern>\n" + " </servlet-mapping>\n" + " <servlet-mapping>\n" + " <servlet-name>JnlpDownloadServlet</servlet-name>\n" + " <url-pattern>*.jar</url-pattern>\n" + " </servlet-mapping>\n" + " <mime-mapping>\n" + " <extension>jnlp</extension>\n" + " <mime-type>application/x-java-jnlp-file</mime-type>\n" + " </mime-mapping>\n" + "</web-app>\n").getBytes()); } public @Override long getLastModified() { return UNKNOWN_MODIFICATION_DATE; } public @Override boolean isExisting() { return true; } public @Override long getSize() { return UNKNOWN_RESOURCE_SIZE; } public @Override URL getURL() throws IOException { return null; } public @Override String getName() { return "web.xml"; } public @Override boolean isFile() { return true; } public @Override boolean isDirectory() { return false; } }, "WEB-INF/web.xml", archiver.getDefaultFileMode()); } archiver.setDestFile(destinationFile); archiver.createArchive(); if (signWar) { signTask = (SignJar) antProject.createTask("signjar"); signTask.setKeystore(keystore); signTask.setStorepass(keystorepassword); signTask.setAlias(keystorealias); signTask.setForce(signingForce); signTask.setTsacert(signingTsaCert); signTask.setTsaurl(signingTsaUrl); signTask.setMaxmemory(signingMaxMemory); signTask.setRetryCount(signingRetryCount); signTask.setJar(destinationFile); signTask.execute(); } // attach standalone so that it gets installed/deployed projectHelper.attachArtifact(project, "war", webstartClassifier, destinationFile); } catch (Exception ex) { throw new MojoExecutionException("Error creating webstartable binary.", ex); } }
From source file:org.cytoscape.kddn.internal.KddnMethods.java
/** * Do an actual calculation of ddn// w w w. ja v a 2 s .c o m * @return */ public static KddnResults solveDDN(final KddnSettings kddn) throws InterruptedException { // loop through all variables to calculate beta final int[] idx = new int[kddn.p]; for (int i = 0; i < kddn.p; i++) idx[i] = i; final double[][] beta = new double[kddn.p][2 * kddn.p]; // beta results in rows final int[][] adjacentMatrix = new int[kddn.p][2 * kddn.p]; ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); try { for (final Integer i : idx) { exec.submit(new Runnable() { @Override public void run() { double[] y1 = getColumn(kddn.data1, i); double[] y2 = getColumn(kddn.data2, i); double[][] X1 = removeColumn(kddn.data1, i); double[][] X2 = removeColumn(kddn.data2, i); double[] l1 = new double[2 * (kddn.p - 1)]; double[] l1a = new double[2 * kddn.p]; for (int j = 0; j < 2 * kddn.p; j++) l1a[j] = (1 - kddn.theta * kddn.W[i][j]) * kddn.lambda1; if (i == 0) { System.arraycopy(l1a, 1, l1, 0, kddn.p - 1); System.arraycopy(l1a, kddn.p + 1, l1, kddn.p - 1, kddn.p - 1); } else if (i == kddn.p - 1) { System.arraycopy(l1a, 0, l1, 0, kddn.p - 1); System.arraycopy(l1a, kddn.p, l1, kddn.p - 1, kddn.p - 1); } else { System.arraycopy(l1a, 0, l1, 0, i); System.arraycopy(l1a, kddn.p, l1, kddn.p - 1, i); System.arraycopy(l1a, i + 1, l1, i, kddn.p - i - 1); System.arraycopy(l1a, kddn.p + i + 1, l1, kddn.p + i - 1, kddn.p - i - 1); } BCD oneNode = new BCD(y1, y2, X1, X2, l1, kddn.lambda2); if (oneNode.solve()) { if (i > 0 && i < kddn.p - 1) { System.arraycopy(oneNode.getBeta(), 0, beta[i], 0, i); System.arraycopy(oneNode.getBeta(), kddn.p - 1, beta[i], kddn.p, i); System.arraycopy(oneNode.getBeta(), i, beta[i], i + 1, kddn.p - 1 - i); System.arraycopy(oneNode.getBeta(), kddn.p - 1 + i, beta[i], kddn.p + i + 1, kddn.p - 1 - i); System.arraycopy(oneNode.getAdj(), 0, adjacentMatrix[i], 0, i); System.arraycopy(oneNode.getAdj(), kddn.p - 1, adjacentMatrix[i], kddn.p, i); System.arraycopy(oneNode.getAdj(), i, adjacentMatrix[i], i + 1, kddn.p - 1 - i); System.arraycopy(oneNode.getAdj(), kddn.p - 1 + i, adjacentMatrix[i], kddn.p + i + 1, kddn.p - 1 - i); } else if (i == 0) { System.arraycopy(oneNode.getBeta(), 0, beta[i], 1, kddn.p - 1); System.arraycopy(oneNode.getBeta(), kddn.p - 1, beta[i], kddn.p + 1, kddn.p - 1); System.arraycopy(oneNode.getAdj(), 0, adjacentMatrix[i], 1, kddn.p - 1); System.arraycopy(oneNode.getAdj(), kddn.p - 1, adjacentMatrix[i], kddn.p + 1, kddn.p - 1); } else if (i == kddn.p - 1) { System.arraycopy(oneNode.getBeta(), 0, beta[i], 0, kddn.p - 1); System.arraycopy(oneNode.getBeta(), kddn.p - 1, beta[i], kddn.p, kddn.p - 1); System.arraycopy(oneNode.getAdj(), 0, adjacentMatrix[i], 0, kddn.p - 1); System.arraycopy(oneNode.getAdj(), kddn.p - 1, adjacentMatrix[i], kddn.p, kddn.p - 1); } } else System.err.println("BCD error!"); } }); } } finally { exec.shutdown(); exec.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); } // symetrify adjacent matrix, requires sign consistency for (int i = 0; i < kddn.p - 1; i++) for (int j = i + 1; j < kddn.p; j++) { if ((adjacentMatrix[i][j] + adjacentMatrix[j][i] > 0) && (adjacentMatrix[i][j] * adjacentMatrix[j][i] >= 0)) { adjacentMatrix[i][j] = 1; adjacentMatrix[j][i] = 1; } else if ((adjacentMatrix[i][j] + adjacentMatrix[j][i] < 0) && (adjacentMatrix[i][j] * adjacentMatrix[j][i] >= 0)) { adjacentMatrix[i][j] = -1; adjacentMatrix[j][i] = -1; } else if (adjacentMatrix[i][j] * adjacentMatrix[j][i] < 0) { adjacentMatrix[i][j] = 0; adjacentMatrix[j][i] = 0; } if ((adjacentMatrix[i][j + kddn.p] + adjacentMatrix[j][i + kddn.p] > 0) && (adjacentMatrix[i][j + kddn.p] * adjacentMatrix[j][i + kddn.p] >= 0)) { adjacentMatrix[i][j + kddn.p] = 1; adjacentMatrix[j][i + kddn.p] = 1; } else if ((adjacentMatrix[i][j + kddn.p] + adjacentMatrix[j][i + kddn.p] < 0) && (adjacentMatrix[i][j + kddn.p] * adjacentMatrix[j][i + kddn.p] >= 0)) { adjacentMatrix[i][j + kddn.p] = -1; adjacentMatrix[j][i + kddn.p] = -1; } else if (adjacentMatrix[i][j + kddn.p] * adjacentMatrix[j][i + kddn.p] < 0) { adjacentMatrix[i][j + kddn.p] = 0; adjacentMatrix[j][i + kddn.p] = 0; } } KddnResults woPvalue = new KddnResults(kddn.varList, beta, adjacentMatrix); int[][] difNet = woPvalue.getDifferentialNetwork(); int numDif = 0; for (int i = 0; i < kddn.p - 1; i++) for (int j = i + 1; j < kddn.p; j++) { if (difNet[i][j] != 0) numDif++; } double[][] pValue = new double[numDif][4]; int rowId = 0; for (int i = 0; i < kddn.p - 1; i++) for (int j = i + 1; j < kddn.p; j++) { if (difNet[i][j] != 0) { pValue[rowId][0] = i; pValue[rowId][1] = j; pValue[rowId][2] = -1; if (difNet[i][j] == 1) pValue[rowId][3] = 1; else pValue[rowId][3] = 2; rowId++; } } return new KddnResults(kddn.varList, adjacentMatrix, beta, pValue); }
From source file:org.dllearner.algorithms.qtl.experiments.SPARQLLearningProblemsGenerator.java
public void generateBenchmark(int nrOfSPARQLQueries, final int minDepth, final int maxDepth, int minNrOfExamples) { Collection<OWLClass> classes = getClasses(); ArrayList<OWLClass> classesList = new ArrayList<>(classes); Collections.shuffle(classesList, new Random(123)); classes = classesList;// ww w .j a v a 2 s . c om // classes = Sets.newHashSet(new OWLClassImpl(IRI.create("http://semantics.crl.ibm.com/univ-bench-dl.owl#TennisFan"))); // ExecutorService tp = Executors.newFixedThreadPool(threadCount); List<Path> allPaths = new ArrayList<>(); // ThreadPoolExecutor tp = new CustomFutureReturningExecutor( // threadCount, threadCount, // 5000L, TimeUnit.MILLISECONDS, // new ArrayBlockingQueue<Runnable>(classes.size(), true)); ExecutorService tp = Executors.newFixedThreadPool(threadCount); CompletionService<List<Path>> ecs = new ExecutorCompletionService<List<Path>>(tp); JDKRandomGenerator rndGen = new JDKRandomGenerator(); rndGen.setSeed(123); int nrOfQueriesPerDepth = nrOfSPARQLQueries / (maxDepth - minDepth + 1); // for each depth <= maxDepth for (int depth = minDepth; depth <= maxDepth; depth++) { System.out.println("Generating " + nrOfQueriesPerDepth + " queries for depth " + depth); Iterator<OWLClass> iterator = classes.iterator(); // generate paths of depths <= maxDepth List<Path> pathsForDepth = new ArrayList<>(); while (pathsForDepth.size() < nrOfQueriesPerDepth && iterator.hasNext()) { Collection<Future<List<Path>>> futures = new ArrayList<>(); try { int cnt = 0; while (iterator.hasNext() && (pathsForDepth.size() + ++cnt < nrOfQueriesPerDepth)) { // pick next class OWLClass cls = iterator.next(); // int depth = rndGen.nextInt(maxDepth) + 1; Future<List<Path>> future = ecs .submit(new PathDetectionTask(dataDir, ks, schema, cls, depth, minNrOfExamples)); futures.add(future); } int n = futures.size(); try { for (int i = 0; i < n; ++i) { Future<List<Path>> f = ecs.take(); if (!f.isCancelled()) { List<Path> paths = f.get(); if (paths != null) { for (int j = 0; j < Math.min(paths.size(), maxPathsPerClassAndDepth); j++) { pathsForDepth.add(paths.get(j)); } } // System.out.println("#Paths: " + paths.size()); // paths.forEach(p -> System.out.println(p)); if (pathsForDepth.size() >= nrOfQueriesPerDepth) { break; } } } } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } finally { for (Future<List<Path>> f : futures) { f.cancel(true); } } } allPaths.addAll(pathsForDepth); } // for (Future<Path> future : futures) { // try { // Path path = future.get(); // if(path != null) { // paths.add(path); // } // if(paths.size() == nrOfSPARQLQueries) { // System.err.println("Benchmark generation finished. Stopping all running threads."); // tp.shutdownNow(); // } // } catch (InterruptedException | ExecutionException e) { // e.printStackTrace(); // } // if(paths.size() == nrOfSPARQLQueries) { // System.err.println("Benchmark generation finished. Stopping all running threads."); // tp.shutdownNow(); // } // } tp.shutdownNow(); try { tp.awaitTermination(1, TimeUnit.HOURS); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } // try { // tp.awaitTermination(1, TimeUnit.DAYS); // } catch (InterruptedException e) { // e.printStackTrace(); // } // write queries to disk String queries = ""; for (Path path : allPaths) { System.out.println(path); queries += path.asSPARQLQuery(Var.alloc("s")) + "\n"; } File file = new File(benchmarkDirectory, "queries_" + nrOfSPARQLQueries + "_" + minDepth + "-" + maxDepth + "_" + minNrOfExamples + ".txt"); try { Files.write(queries, file, Charsets.UTF_8); } catch (IOException e) { e.printStackTrace(); } }
From source file:org.apache.usergrid.rest.UniqueCatsIT.java
@Test @Ignore("Intended for use against prod-like cluster") public void testDuplicatePrevention() throws Exception { int numThreads = 20; int poolSize = 20; int numCats = 100; Multimap<String, String> catsCreated = Multimaps.synchronizedMultimap(HashMultimap.create()); Multimap<String, Map<String, Object>> dupsRejected = Multimaps.synchronizedMultimap(HashMultimap.create()); ExecutorService execService = Executors.newFixedThreadPool(poolSize); Client client = ClientBuilder.newClient(); final MetricRegistry metrics = new MetricRegistry(); final Timer responses = metrics.timer(name(UniqueCatsIT.class, "responses")); long startTime = System.currentTimeMillis(); final AtomicBoolean failed = new AtomicBoolean(false); String[] targetHosts = { "http://localhost:8080" }; for (int i = 0; i < numCats; i++) { if (failed.get()) { break; }/*w w w . j a v a2s .c om*/ String randomizer = RandomStringUtils.randomAlphanumeric(8); // multiple threads simultaneously trying to create a cat with the same propertyName for (int j = 0; j < numThreads; j++) { if (failed.get()) { break; } final String name = "uv_test_cat_" + randomizer; final String host = targetHosts[j % targetHosts.length]; execService.submit(() -> { Map<String, Object> form = new HashMap<String, Object>() { { put("name", name); } }; Timer.Context time = responses.time(); try { WebTarget target = client.target(host).path( //"/test-organization/test-app/cats" ); "/dmjohnson/sandbox/cats"); //logger.info("Posting cat {} to host {}", catname, host); Response response = target.request() //.post( Entity.entity( form, MediaType.APPLICATION_FORM_URLENCODED )); .post(Entity.entity(form, MediaType.APPLICATION_JSON)); org.apache.usergrid.rest.test.resource.model.ApiResponse apiResponse = null; String responseAsString = ""; if (response.getStatus() >= 400) { responseAsString = response.readEntity(String.class); } else { apiResponse = response .readEntity(org.apache.usergrid.rest.test.resource.model.ApiResponse.class); } if (response.getStatus() == 200 || response.getStatus() == 201) { catsCreated.put(name, apiResponse.getEntity().getUuid().toString()); successCounter.incrementAndGet(); } else if (response.getStatus() == 400 && responseAsString.contains("DuplicateUniquePropertyExistsException")) { dupsRejected.put(name, form); dupCounter.incrementAndGet(); } else { logger.error("Cat creation failed status {} message {}", response.getStatus(), responseAsString); errorCounter.incrementAndGet(); } } catch (ProcessingException e) { errorCounter.incrementAndGet(); if (e.getCause() instanceof ConnectException) { logger.error("Error connecting to " + host); } else { logger.error("Error", e); } } catch (Exception e) { errorCounter.incrementAndGet(); logger.error("Error", e); } time.stop(); }); } } execService.shutdown(); try { while (!execService.awaitTermination(60, TimeUnit.SECONDS)) { System.out.println("Waiting..."); } } catch (InterruptedException e) { e.printStackTrace(); } long endTime = System.currentTimeMillis(); logger.info("Total time {}s", (endTime - startTime) / 1000); DecimalFormat format = new DecimalFormat("##.###"); logger.info( "Timed {} requests:\n" + "mean rate {}/s\n" + "min {}s\n" + "max {}s\n" + "mean {}s", responses.getCount(), format.format(responses.getMeanRate()), format.format((double) responses.getSnapshot().getMin() / 1000000000), format.format((double) responses.getSnapshot().getMax() / 1000000000), format.format(responses.getSnapshot().getMean() / 1000000000)); logger.info("Error count {} ratio = {}", errorCounter.get(), (float) errorCounter.get() / (float) responses.getCount()); logger.info("Success count = {}", successCounter.get()); logger.info("Rejected dup count = {}", dupCounter.get()); // for ( String catname : catsCreated.keys() ) { // System.out.println( catname ); // Collection<Cat> cats = catsCreated.get( catname ); // for ( Cat cat : cats ) { // System.out.println(" " + cat.getUuid() ); // } // } // int count = 0; // for ( String catname : dupsRejected.keySet() ) { // System.out.println( catname ); // Collection<Cat> cats = dupsRejected.get( catname ); // for ( Cat cat : cats ) { // System.out.println(" " + (count++) + " rejected " + cat.getCatname() + ":" + cat.getUuid() ); // } // } int catCount = 0; int catnamesWithDuplicates = 0; for (String name : catsCreated.keySet()) { //Collection<Map<String, String>> forms = Collection<String> forms = catsCreated.get(name); if (forms.size() > 1) { catnamesWithDuplicates++; logger.info("Duplicate " + name); } catCount++; } Assert.assertEquals(0, catnamesWithDuplicates); Assert.assertEquals(0, errorCounter.get()); Assert.assertEquals(numCats, successCounter.get()); Assert.assertEquals(numCats, catCount); }
From source file:de.huberlin.cuneiform.compiler.local.LocalDispatcher.java
protected Set<JsonReportEntry> dispatch(Invocation invocation) throws IOException, InterruptedException, NotDerivableException, JSONException { File scriptFile;//from w ww. j a v a 2 s. c om Process process; int exitValue; Set<JsonReportEntry> report; String line; String[] arg; String value; int i; StringBuffer buf; File location; File reportFile; StreamConsumer stdoutConsumer, errConsumer; ExecutorService executor; String signature; Path srcPath, destPath; File successMarker; if (invocation == null) throw new NullPointerException("Invocation must not be null."); if (!invocation.isReady()) throw new RuntimeException("Cannot dispatch invocation that is not ready."); location = new File(buildDir.getAbsolutePath() + "/" + invocation.getSignature()); successMarker = new File(location.getAbsolutePath() + "/" + SUCCESS_FILENAME); reportFile = new File(location.getAbsolutePath() + "/" + Invocation.REPORT_FILENAME); if (!successMarker.exists()) { if (location.exists()) FileUtils.deleteDirectory(location); if (!location.mkdirs()) throw new IOException("Could not create invocation location."); scriptFile = new File(location.getAbsolutePath() + "/" + SCRIPT_FILENAME); try (BufferedWriter writer = new BufferedWriter(new FileWriter(scriptFile, false))) { // write away script writer.write(invocation.toScript()); } scriptFile.setExecutable(true); for (String filename : invocation.getStageInList()) { if (filename.charAt(0) != '/' && filename.indexOf('_') >= 0) { signature = filename.substring(0, filename.indexOf('_')); srcPath = FileSystems.getDefault() .getPath(buildDir.getAbsolutePath() + "/" + signature + "/" + filename); destPath = FileSystems.getDefault() .getPath(buildDir.getAbsolutePath() + "/" + invocation.getSignature() + "/" + filename); Files.createSymbolicLink(destPath, srcPath); } } arg = new String[] { "/usr/bin/time", "-a", "-o", location.getAbsolutePath() + "/" + Invocation.REPORT_FILENAME, "-f", "{" + JsonReportEntry.ATT_TIMESTAMP + ":" + System.currentTimeMillis() + "," + JsonReportEntry.ATT_RUNID + ":\"" + invocation.getDagId() + "\"," + JsonReportEntry.ATT_TASKID + ":" + invocation.getTaskNodeId() + "," + JsonReportEntry.ATT_TASKNAME + ":\"" + invocation.getTaskName() + "\"," + JsonReportEntry.ATT_LANG + ":\"" + invocation.getLangLabel() + "\"," + JsonReportEntry.ATT_INVOCID + ":" + invocation.getSignature() + "," + JsonReportEntry.ATT_KEY + ":\"" + JsonReportEntry.KEY_INVOC_TIME + "\"," + JsonReportEntry.ATT_VALUE + ":" + "{\"realTime\":%e,\"userTime\":%U,\"sysTime\":%S," + "\"maxResidentSetSize\":%M,\"avgResidentSetSize\":%t," + "\"avgDataSize\":%D,\"avgStackSize\":%p,\"avgTextSize\":%X," + "\"nMajPageFault\":%F,\"nMinPageFault\":%R," + "\"nSwapOutMainMem\":%W,\"nForcedContextSwitch\":%c," + "\"nWaitContextSwitch\":%w,\"nIoRead\":%I,\"nIoWrite\":%O," + "\"nSocketRead\":%r,\"nSocketWrite\":%s,\"nSignal\":%k}}", scriptFile.getAbsolutePath() }; // run script process = Runtime.getRuntime().exec(arg, null, location); executor = Executors.newCachedThreadPool(); stdoutConsumer = new StreamConsumer(process.getInputStream()); executor.execute(stdoutConsumer); errConsumer = new StreamConsumer(process.getErrorStream()); executor.execute(errConsumer); executor.shutdown(); exitValue = process.waitFor(); if (!executor.awaitTermination(4, TimeUnit.SECONDS)) throw new RuntimeException("Consumer threads did not finish orderly."); try (BufferedWriter reportWriter = new BufferedWriter(new FileWriter(reportFile, true))) { if (exitValue != 0) { System.err.println("[script]"); try (BufferedReader reader = new BufferedReader(new StringReader(invocation.toScript()))) { i = 0; while ((line = reader.readLine()) != null) System.err.println(String.format("%02d %s", ++i, line)); } System.err.println("[out]"); try (BufferedReader reader = new BufferedReader( new StringReader(stdoutConsumer.getContent()))) { while ((line = reader.readLine()) != null) System.err.println(line); } System.err.println("[err]"); try (BufferedReader reader = new BufferedReader(new StringReader(errConsumer.getContent()))) { while ((line = reader.readLine()) != null) System.err.println(line); } System.err.println("[end]"); throw new RuntimeException("Invocation of task '" + invocation.getTaskName() + "' with signature " + invocation.getSignature() + " terminated with non-zero exit value. Exit value was " + exitValue + "."); } try (BufferedReader reader = new BufferedReader(new StringReader(stdoutConsumer.getContent()))) { buf = new StringBuffer(); while ((line = reader.readLine()) != null) buf.append(line.replaceAll("\\\\", "\\\\\\\\").replaceAll("\"", "\\\"")).append('\n'); value = buf.toString(); if (!value.isEmpty()) reportWriter.write(new JsonReportEntry(invocation, JsonReportEntry.KEY_INVOC_STDOUT, value) .toString()); } try (BufferedReader reader = new BufferedReader(new StringReader(errConsumer.getContent()))) { buf = new StringBuffer(); while ((line = reader.readLine()) != null) buf.append(line.replaceAll("\\\\", "\\\\\\\\").replaceAll("\"", "\\\"")).append('\n'); value = buf.toString(); if (!value.isEmpty()) reportWriter.write(new JsonReportEntry(invocation, JsonReportEntry.KEY_INVOC_STDERR, value) .toString()); } } } // gather report report = new HashSet<>(); try (BufferedReader reader = new BufferedReader(new FileReader(reportFile))) { while ((line = reader.readLine()) != null) { line = line.trim(); if (line.isEmpty()) continue; report.add(new JsonReportEntry(line)); } } invocation.evalReport(report); if (!successMarker.exists()) if (!successMarker.createNewFile()) throw new IOException("Could not create success marker."); return report; }
From source file:com.cloud.storage.resource.VmwareStorageProcessor.java
private void rescanAllHosts(VmwareContext context, List<Pair<ManagedObjectReference, String>> lstHosts) throws Exception { ExecutorService executorService = Executors.newFixedThreadPool(lstHosts.size()); final List<Exception> exceptions = new ArrayList<Exception>(); for (Pair<ManagedObjectReference, String> hostPair : lstHosts) { HostMO host = new HostMO(context, hostPair.first()); HostStorageSystemMO hostStorageSystem = host.getHostStorageSystemMO(); boolean iScsiHbaConfigured = false; for (HostHostBusAdapter hba : hostStorageSystem.getStorageDeviceInfo().getHostBusAdapter()) { if (hba instanceof HostInternetScsiHba) { // just finding an instance of HostInternetScsiHba means that we have found at least one configured iSCSI HBA // at least one iSCSI HBA must be configured before a CloudStack user can use this host for iSCSI storage iScsiHbaConfigured = true; final String iScsiHbaDevice = hba.getDevice(); final HostStorageSystemMO hss = hostStorageSystem; executorService.submit(new Thread() { @Override/*from w ww . j a va 2 s. c o m*/ public void run() { try { hss.rescanHba(iScsiHbaDevice); hss.rescanVmfs(); } catch (Exception ex) { synchronized (exceptions) { exceptions.add(ex); } } } }); } } if (!iScsiHbaConfigured) { throw new Exception("An iSCSI HBA must be configured before a host can use iSCSI storage."); } } executorService.shutdown(); if (!executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.MINUTES)) { throw new Exception("The system timed out before completing the task 'rescanAllHosts'."); } if (exceptions.size() > 0) { throw new Exception(exceptions.get(0).getMessage()); } }