List of usage examples for java.util.concurrent ForkJoinPool ForkJoinPool
public ForkJoinPool()
From source file:org.aroyomc.nexus.strikes.utils.SqlManager.java
/** * * @param driver Location of the class that provides the SQL driver * @param url URL of the SQL server/* w w w.j ava2 s.c o m*/ * @param username SQL server username * @param password SQL server password * @throws SQLException When an issue occurs while connecting */ public SqlManager(String driver, String url, String username, String password) throws SQLException { source = new BasicDataSource(); source.setDriverClassName(driver); source.setUrl(url); source.setUsername(username); source.setPassword(password); query = new AsyncQueryRunner(new ForkJoinPool()); }
From source file:org.rhwlab.BHCnotused.ThreadedAlgorithm.java
@Override public void run() { while (clusters.size() > 1) { System.out.printf("\n%d Clusters\n", clusters.size()); Cluster T = maximumRCluster();//from w w w. j ava2 s. c o m if (T == null) { return; } T.printCluster(System.out); // System.out.printf("size=%d,dpm=%e,like=%e,pi=%f,pi*like=%e,r=%.20f,d=%s,Gam=%s,f=%s,d2=%s\n", T.data.getN(),T.dpm,T.data.likelihood(),T.pi,T.pi*T.data.likelihood(),T.r,T.d,T.gammaN,T.f,T.d2); // remove the children of the max r pair pairs.remove(T.left); pairs.remove(T.right); for (HashMap<Cluster, Cluster> map : pairs.values()) { map.remove(T.left); map.remove(T.right); } clusters.remove(T.left); clusters.remove(T.right); // make new pairs with all the clusters HashMap<Cluster, Cluster> map = new HashMap<>(); MergeAction merge = new MergeAction(clusters, T, 0, clusters.size() - 1, map); ForkJoinPool pool = new ForkJoinPool(); pool.invoke(merge); pairs.put(T, map); clusters.add(T); } }
From source file:uniol.apt.analysis.synthesize.FindWords.java
/** * Generate Petri net solvable words with the given characteristics. * @param properties The properties that should be considered. * @param alphabet The alphabet from which words should be generated. * @param quickFail Should quick-fail synthesis be done or should full synthesis be attempted? * @param wordCallback Callback that should be called for each word that is found. * @param lengthDoneCallback Callback that should be called when all words of a given length were handled. * @throws PreconditionFailedException If a combination of properties is specified for which there is no * sensible definition of 'minimally unsolvable word', i.e. plain+k-marking. *//*from ww w.j a v a 2 s . c o m*/ static public void generateList(PNProperties properties, SortedSet<Character> alphabet, boolean quickFail, WordCallback wordCallback, LengthDoneCallback lengthDoneCallback) throws PreconditionFailedException { // Java 8 provides ForkJoinPool.commonPool(). Java 7 does not, so we need to create our own pool. ForkJoinPool executor = new ForkJoinPool(); try { generateList(properties, alphabet, quickFail, wordCallback, lengthDoneCallback, executor); } finally { executor.shutdownNow(); } }
From source file:com.kegare.caveworld.world.WorldProviderCaveworld.java
public static void regenerate(final boolean backup) { final File dir = getDimDir(); final String name = dir.getName().substring(4); final MinecraftServer server = FMLCommonHandler.instance().getMinecraftServerInstance(); Set<EntityPlayerMP> target = Sets.newHashSet(); for (Object obj : server.getConfigurationManager().playerEntityList.toArray()) { if (obj != null && ((EntityPlayerMP) obj).dimension == CaveworldAPI.getDimension()) { target.add(CaveUtils.respawnPlayer((EntityPlayerMP) obj, 0)); }/*from w w w . j a va 2 s . com*/ } boolean result = new ForkJoinPool().invoke(new RecursiveTask<Boolean>() { @Override protected Boolean compute() { IChatComponent component; try { component = new ChatComponentText( StatCollector.translateToLocalFormatted("caveworld.regenerate.regenerating", name)); component.getChatStyle().setColor(EnumChatFormatting.GRAY).setItalic(true); server.getConfigurationManager().sendChatMsg(component); if (server.isSinglePlayer()) { Caveworld.network.sendToAll(new RegenerateMessage(backup)); } Caveworld.network.sendToAll(new RegenerateMessage.ProgressNotify(0)); CaveBlocks.caveworld_portal.portalDisabled = true; int dim = CaveworldAPI.getDimension(); WorldServer world = DimensionManager.getWorld(dim); if (world != null) { world.saveAllChunks(true, null); world.flush(); MinecraftForge.EVENT_BUS.post(new WorldEvent.Unload(world)); DimensionManager.setWorld(dim, null); } if (dir != null) { if (backup) { File parent = dir.getParentFile(); final Pattern pattern = Pattern.compile("^" + dir.getName() + "_bak-..*\\.zip$"); File[] files = parent.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return pattern.matcher(name).matches(); } }); if (files != null && files.length >= 5) { Arrays.sort(files, new Comparator<File>() { @Override public int compare(File o1, File o2) { int i = CaveUtils.compareWithNull(o1, o2); if (i == 0 && o1 != null && o2 != null) { try { i = Files.getLastModifiedTime(o1.toPath()) .compareTo(Files.getLastModifiedTime(o2.toPath())); } catch (IOException e) { } } return i; } }); FileUtils.forceDelete(files[0]); } Calendar calendar = Calendar.getInstance(); String year = Integer.toString(calendar.get(Calendar.YEAR)); String month = String.format("%02d", calendar.get(Calendar.MONTH) + 1); String day = String.format("%02d", calendar.get(Calendar.DATE)); String hour = String.format("%02d", calendar.get(Calendar.HOUR_OF_DAY)); String minute = String.format("%02d", calendar.get(Calendar.MINUTE)); String second = String.format("%02d", calendar.get(Calendar.SECOND)); File bak = new File(parent, dir.getName() + "_bak-" + Joiner.on("").join(year, month, day) + "-" + Joiner.on("").join(hour, minute, second) + ".zip"); if (bak.exists()) { FileUtils.deleteQuietly(bak); } component = new ChatComponentText(StatCollector .translateToLocalFormatted("caveworld.regenerate.backingup", name)); component.getChatStyle().setColor(EnumChatFormatting.GRAY).setItalic(true); server.getConfigurationManager().sendChatMsg(component); Caveworld.network.sendToAll(new RegenerateMessage.ProgressNotify(1)); if (CaveUtils.archiveDirZip(dir, bak)) { ClickEvent click = new ClickEvent(ClickEvent.Action.OPEN_FILE, FilenameUtils.normalize(bak.getParentFile().getPath())); component = new ChatComponentText(StatCollector .translateToLocalFormatted("caveworld.regenerate.backedup", name)); component.getChatStyle().setColor(EnumChatFormatting.GRAY).setItalic(true) .setChatClickEvent(click); server.getConfigurationManager().sendChatMsg(component); } else { component = new ChatComponentText(StatCollector .translateToLocalFormatted("caveworld.regenerate.backup.failed", name)); component.getChatStyle().setColor(EnumChatFormatting.RED).setItalic(true); server.getConfigurationManager().sendChatMsg(component); } } FileUtils.deleteDirectory(dir); } if (DimensionManager.shouldLoadSpawn(dim)) { DimensionManager.initDimension(dim); world = DimensionManager.getWorld(dim); if (world != null) { world.saveAllChunks(true, null); world.flush(); } } CaveBlocks.caveworld_portal.portalDisabled = false; component = new ChatComponentText( StatCollector.translateToLocalFormatted("caveworld.regenerate.regenerated", name)); component.getChatStyle().setColor(EnumChatFormatting.GRAY).setItalic(true); server.getConfigurationManager().sendChatMsg(component); Caveworld.network.sendToAll(new RegenerateMessage.ProgressNotify(2)); return true; } catch (Exception e) { component = new ChatComponentText( StatCollector.translateToLocalFormatted("caveworld.regenerate.failed", name)); component.getChatStyle().setColor(EnumChatFormatting.RED).setItalic(true); server.getConfigurationManager().sendChatMsg(component); Caveworld.network.sendToAll(new RegenerateMessage.ProgressNotify(3)); CaveLog.log(Level.ERROR, e, component.getUnformattedText()); } return false; } }); if (result && (Config.hardcore || Config.caveborn)) { for (EntityPlayerMP player : target) { if (!CaveworldAPI.isEntityInCaveworld(player)) { CaveUtils.forceTeleport(player, CaveworldAPI.getDimension()); } } } }
From source file:cn.afterturn.easypoi.excel.imports.ExcelImportService.java
private <T> List<T> importExcel(Collection<T> result, Sheet sheet, Class<?> pojoClass, ImportParams params, Map<String, PictureData> pictures) throws Exception { List collection = new ArrayList(); Map<String, ExcelImportEntity> excelParams = new HashMap<String, ExcelImportEntity>(); List<ExcelCollectionParams> excelCollection = new ArrayList<ExcelCollectionParams>(); String targetId = null;/*from w w w. j av a 2s .com*/ i18nHandler = params.getI18nHandler(); boolean isMap = Map.class.equals(pojoClass); if (!isMap) { Field[] fileds = PoiPublicUtil.getClassFields(pojoClass); ExcelTarget etarget = pojoClass.getAnnotation(ExcelTarget.class); if (etarget != null) { targetId = etarget.value(); } getAllExcelField(targetId, fileds, excelParams, excelCollection, pojoClass, null, null); } Iterator<Row> rows = sheet.rowIterator(); for (int j = 0; j < params.getTitleRows(); j++) { rows.next(); } Map<Integer, String> titlemap = getTitleMap(rows, params, excelCollection, excelParams); checkIsValidTemplate(titlemap, excelParams, params, excelCollection); Row row = null; Object object = null; String picId; int readRow = 1; // for (int i = 0; i < params.getStartRows(); i++) { rows.next(); } //index ?,? if (excelCollection.size() > 0 && params.getKeyIndex() == null) { params.setKeyIndex(0); } if (params.isConcurrentTask()) { ForkJoinPool forkJoinPool = new ForkJoinPool(); int endRow = sheet.getLastRowNum() - params.getLastOfInvalidRow(); if (params.getReadRows() > 0) { endRow = params.getReadRows(); } ExcelImportForkJoinWork task = new ExcelImportForkJoinWork( params.getStartRows() + params.getHeadRows() + params.getTitleRows(), endRow, sheet, params, pojoClass, this, targetId, titlemap, excelParams); ExcelImportResult forkJoinResult = forkJoinPool.invoke(task); collection = forkJoinResult.getList(); failCollection = forkJoinResult.getFailList(); } else { StringBuilder errorMsg; while (rows.hasNext() && (row == null || sheet.getLastRowNum() - row.getRowNum() > params.getLastOfInvalidRow())) { if (params.getReadRows() > 0 && readRow > params.getReadRows()) { break; } row = rows.next(); // Fix row if (sheet.getLastRowNum() - row.getRowNum() < params.getLastOfInvalidRow()) { break; } /* ?? */ if (row.getLastCellNum() < 0) { continue; } if (isMap && object != null) { ((Map) object).put("excelRowNum", row.getRowNum()); } errorMsg = new StringBuilder(); // ???,?,? // keyIndex ??,?? if (params.getKeyIndex() != null && (row.getCell(params.getKeyIndex()) == null || StringUtils.isEmpty(getKeyValue(row.getCell(params.getKeyIndex())))) && object != null) { for (ExcelCollectionParams param : excelCollection) { addListContinue(object, param, row, titlemap, targetId, pictures, params, errorMsg); } } else { object = PoiPublicUtil.createObject(pojoClass, targetId); try { Set<Integer> keys = titlemap.keySet(); for (Integer cn : keys) { Cell cell = row.getCell(cn); String titleString = (String) titlemap.get(cn); if (excelParams.containsKey(titleString) || isMap) { if (excelParams.get(titleString) != null && excelParams.get(titleString) .getType() == BaseEntityTypeConstants.IMAGE_TYPE) { picId = row.getRowNum() + "_" + cn; saveImage(object, picId, excelParams, titleString, pictures, params); } else { try { saveFieldValue(params, object, cell, excelParams, titleString, row); } catch (ExcelImportException e) { // ?,, if (params.isNeedVerify() && ExcelImportEnum.GET_VALUE_ERROR.equals(e.getType())) { errorMsg.append(" ").append(titleString) .append(ExcelImportEnum.GET_VALUE_ERROR.getMsg()); } } } } } //for (int i = row.getFirstCellNum(), le = titlemap.size(); i < le; i++) { //} if (object instanceof IExcelDataModel) { ((IExcelDataModel) object).setRowNum(row.getRowNum()); } for (ExcelCollectionParams param : excelCollection) { addListContinue(object, param, row, titlemap, targetId, pictures, params, errorMsg); } if (verifyingDataValidity(object, row, params, isMap, errorMsg)) { collection.add(object); } else { failCollection.add(object); } } catch (ExcelImportException e) { LOGGER.error("excel import error , row num:{},obj:{}", readRow, ReflectionToStringBuilder.toString(object)); if (!e.getType().equals(ExcelImportEnum.VERIFY_ERROR)) { throw new ExcelImportException(e.getType(), e); } } catch (Exception e) { LOGGER.error("excel import error , row num:{},obj:{}", readRow, ReflectionToStringBuilder.toString(object)); throw new RuntimeException(e); } } readRow++; } } return collection; }
From source file:org.caleydo.core.util.impute.KNNImpute.java
public static void main(String[] args) throws IOException { ImmutableList.Builder<Gene> b = ImmutableList.builder(); List<String> lines = CharStreams .readLines(new InputStreamReader(KNNImpute.class.getResourceAsStream("khan.csv"))); lines = lines.subList(1, lines.size()); int j = 0;/*from w ww .j ava 2 s. co m*/ for (String line : lines) { String[] l = line.split(";"); float[] d = new float[l.length]; int nans = 0; for (int i = 0; i < l.length; ++i) { if ("NA".equals(l[i])) { nans++; d[i] = Float.NaN; } else { d[i] = Float.parseFloat(l[i]); } } b.add(new Gene(j++, nans, d)); } final KNNImputeDescription desc2 = new KNNImputeDescription(); desc2.setMaxp(100000); KNNImpute r = new KNNImpute(desc2, b.build()); ForkJoinPool p = new ForkJoinPool(); p.invoke(r); try (PrintWriter w = new PrintWriter("khan.imputed.csv")) { w.println(StringUtils.repeat("sample", ";", r.samples)); for (Gene g : r.genes) { float[] d = g.data; int nan = 0; w.print(Float.isNaN(d[0]) ? g.nanReplacements[nan++] : d[0]); for (int i = 1; i < d.length; ++i) w.append(';').append(String.valueOf(Float.isNaN(d[i]) ? g.nanReplacements[nan++] : d[i])); w.println(); } } }
From source file:org.apache.flink.runtime.webmonitor.WebRuntimeMonitor.java
public WebRuntimeMonitor(Configuration config, LeaderRetrievalService leaderRetrievalService, ActorSystem actorSystem) throws IOException, InterruptedException { this.leaderRetrievalService = checkNotNull(leaderRetrievalService); this.timeout = AkkaUtils.getTimeout(config); this.retriever = new JobManagerRetriever(this, actorSystem, AkkaUtils.getTimeout(config), timeout); final WebMonitorConfig cfg = new WebMonitorConfig(config); final String configuredAddress = cfg.getWebFrontendAddress(); final int configuredPort = cfg.getWebFrontendPort(); if (configuredPort < 0) { throw new IllegalArgumentException("Web frontend port is invalid: " + configuredPort); }/*from w w w. ja v a 2 s. com*/ final WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(config); // create an empty directory in temp for the web server String rootDirFileName = "flink-web-" + UUID.randomUUID(); webRootDir = new File(getBaseDir(config), rootDirFileName); LOG.info("Using directory {} for the web interface files", webRootDir); final boolean webSubmitAllow = cfg.isProgramSubmitEnabled(); if (webSubmitAllow) { // create storage for uploads this.uploadDir = getUploadDir(config); // the upload directory should either 1. exist and writable or 2. can be created and writable if (!(uploadDir.exists() && uploadDir.canWrite()) && !(uploadDir.mkdir() && uploadDir.canWrite())) { throw new IOException(String.format("Jar upload directory %s cannot be created or is not writable.", uploadDir.getAbsolutePath())); } LOG.info("Using directory {} for web frontend JAR file uploads", uploadDir); } else { this.uploadDir = null; } ExecutionGraphHolder currentGraphs = new ExecutionGraphHolder(); // - Back pressure stats ---------------------------------------------- stackTraceSamples = new StackTraceSampleCoordinator(actorSystem.dispatcher(), 60000); // Back pressure stats tracker config int cleanUpInterval = config.getInteger(ConfigConstants.JOB_MANAGER_WEB_BACK_PRESSURE_CLEAN_UP_INTERVAL, ConfigConstants.DEFAULT_JOB_MANAGER_WEB_BACK_PRESSURE_CLEAN_UP_INTERVAL); int refreshInterval = config.getInteger(ConfigConstants.JOB_MANAGER_WEB_BACK_PRESSURE_REFRESH_INTERVAL, ConfigConstants.DEFAULT_JOB_MANAGER_WEB_BACK_PRESSURE_REFRESH_INTERVAL); int numSamples = config.getInteger(ConfigConstants.JOB_MANAGER_WEB_BACK_PRESSURE_NUM_SAMPLES, ConfigConstants.DEFAULT_JOB_MANAGER_WEB_BACK_PRESSURE_NUM_SAMPLES); int delay = config.getInteger(ConfigConstants.JOB_MANAGER_WEB_BACK_PRESSURE_DELAY, ConfigConstants.DEFAULT_JOB_MANAGER_WEB_BACK_PRESSURE_DELAY); Time delayBetweenSamples = Time.milliseconds(delay); backPressureStatsTracker = new BackPressureStatsTracker(stackTraceSamples, cleanUpInterval, numSamples, delayBetweenSamples); // -------------------------------------------------------------------- executorService = new ForkJoinPool(); ExecutionContextExecutor context = ExecutionContext$.MODULE$.fromExecutor(executorService); // Config to enable https access to the web-ui boolean enableSSL = config.getBoolean(ConfigConstants.JOB_MANAGER_WEB_SSL_ENABLED, ConfigConstants.DEFAULT_JOB_MANAGER_WEB_SSL_ENABLED) && SSLUtils.getSSLEnabled(config); if (enableSSL) { LOG.info("Enabling ssl for the web frontend"); try { serverSSLContext = SSLUtils.createSSLServerContext(config); } catch (Exception e) { throw new IOException("Failed to initialize SSLContext for the web frontend", e); } } else { serverSSLContext = null; } metricFetcher = new MetricFetcher(actorSystem, retriever, context); String defaultSavepointDir = config.getString(ConfigConstants.SAVEPOINT_DIRECTORY_KEY, null); JobCancellationWithSavepointHandlers cancelWithSavepoint = new JobCancellationWithSavepointHandlers( currentGraphs, context, defaultSavepointDir); RuntimeMonitorHandler triggerHandler = handler(cancelWithSavepoint.getTriggerHandler()); RuntimeMonitorHandler inProgressHandler = handler(cancelWithSavepoint.getInProgressHandler()); router = new Router() // config how to interact with this web server .GET("/config", handler(new DashboardConfigHandler(cfg.getRefreshInterval()))) // the overview - how many task managers, slots, free slots, ... .GET("/overview", handler(new ClusterOverviewHandler(DEFAULT_REQUEST_TIMEOUT))) // job manager configuration .GET("/jobmanager/config", handler(new JobManagerConfigHandler(config))) // overview over jobs .GET("/joboverview", handler(new CurrentJobsOverviewHandler(DEFAULT_REQUEST_TIMEOUT, true, true))) .GET("/joboverview/running", handler(new CurrentJobsOverviewHandler(DEFAULT_REQUEST_TIMEOUT, true, false))) .GET("/joboverview/completed", handler(new CurrentJobsOverviewHandler(DEFAULT_REQUEST_TIMEOUT, false, true))) .GET("/jobs", handler(new CurrentJobIdsHandler(DEFAULT_REQUEST_TIMEOUT))) .GET("/jobs/:jobid", handler(new JobDetailsHandler(currentGraphs, metricFetcher))) .GET("/jobs/:jobid/vertices", handler(new JobDetailsHandler(currentGraphs, metricFetcher))) .GET("/jobs/:jobid/vertices/:vertexid", handler(new JobVertexDetailsHandler(currentGraphs, metricFetcher))) .GET("/jobs/:jobid/vertices/:vertexid/subtasktimes", handler(new SubtasksTimesHandler(currentGraphs))) .GET("/jobs/:jobid/vertices/:vertexid/taskmanagers", handler(new JobVertexTaskManagersHandler(currentGraphs, metricFetcher))) .GET("/jobs/:jobid/vertices/:vertexid/accumulators", handler(new JobVertexAccumulatorsHandler(currentGraphs))) .GET("/jobs/:jobid/vertices/:vertexid/checkpoints", handler(new JobVertexCheckpointsHandler(currentGraphs))) .GET("/jobs/:jobid/vertices/:vertexid/backpressure", handler(new JobVertexBackPressureHandler(currentGraphs, backPressureStatsTracker, refreshInterval))) .GET("/jobs/:jobid/vertices/:vertexid/metrics", handler(new JobVertexMetricsHandler(metricFetcher))) .GET("/jobs/:jobid/vertices/:vertexid/subtasks/accumulators", handler(new SubtasksAllAccumulatorsHandler(currentGraphs))) .GET("/jobs/:jobid/vertices/:vertexid/subtasks/:subtasknum", handler(new SubtaskCurrentAttemptDetailsHandler(currentGraphs, metricFetcher))) .GET("/jobs/:jobid/vertices/:vertexid/subtasks/:subtasknum/attempts/:attempt", handler(new SubtaskExecutionAttemptDetailsHandler(currentGraphs, metricFetcher))) .GET("/jobs/:jobid/vertices/:vertexid/subtasks/:subtasknum/attempts/:attempt/accumulators", handler(new SubtaskExecutionAttemptAccumulatorsHandler(currentGraphs))) .GET("/jobs/:jobid/plan", handler(new JobPlanHandler(currentGraphs))) .GET("/jobs/:jobid/config", handler(new JobConfigHandler(currentGraphs))) .GET("/jobs/:jobid/exceptions", handler(new JobExceptionsHandler(currentGraphs))) .GET("/jobs/:jobid/accumulators", handler(new JobAccumulatorsHandler(currentGraphs))) .GET("/jobs/:jobid/checkpoints", handler(new JobCheckpointsHandler(currentGraphs))) .GET("/jobs/:jobid/metrics", handler(new JobMetricsHandler(metricFetcher))) .GET("/taskmanagers", handler(new TaskManagersHandler(DEFAULT_REQUEST_TIMEOUT, metricFetcher))) .GET("/taskmanagers/:" + TaskManagersHandler.TASK_MANAGER_ID_KEY + "/metrics", handler(new TaskManagersHandler(DEFAULT_REQUEST_TIMEOUT, metricFetcher))) .GET("/taskmanagers/:" + TaskManagersHandler.TASK_MANAGER_ID_KEY + "/log", new TaskManagerLogHandler(retriever, context, jobManagerAddressPromise.future(), timeout, TaskManagerLogHandler.FileMode.LOG, config, enableSSL)) .GET("/taskmanagers/:" + TaskManagersHandler.TASK_MANAGER_ID_KEY + "/stdout", new TaskManagerLogHandler(retriever, context, jobManagerAddressPromise.future(), timeout, TaskManagerLogHandler.FileMode.STDOUT, config, enableSSL)) .GET("/taskmanagers/:" + TaskManagersHandler.TASK_MANAGER_ID_KEY + "/metrics", handler(new TaskManagerMetricsHandler(metricFetcher))) // log and stdout .GET("/jobmanager/log", logFiles.logFile == null ? new ConstantTextHandler("(log file unavailable)") : new StaticFileServerHandler(retriever, jobManagerAddressPromise.future(), timeout, logFiles.logFile, enableSSL)) .GET("/jobmanager/stdout", logFiles.stdOutFile == null ? new ConstantTextHandler("(stdout file unavailable)") : new StaticFileServerHandler(retriever, jobManagerAddressPromise.future(), timeout, logFiles.stdOutFile, enableSSL)) .GET("/jobmanager/metrics", handler(new JobManagerMetricsHandler(metricFetcher))) // Cancel a job via GET (for proper integration with YARN this has to be performed via GET) .GET("/jobs/:jobid/yarn-cancel", handler(new JobCancellationHandler())) // DELETE is the preferred way of canceling a job (Rest-conform) .DELETE("/jobs/:jobid/cancel", handler(new JobCancellationHandler())) .GET("/jobs/:jobid/cancel-with-savepoint", triggerHandler) .GET("/jobs/:jobid/cancel-with-savepoint/target-directory/:targetDirectory", triggerHandler) .GET(JobCancellationWithSavepointHandlers.IN_PROGRESS_URL, inProgressHandler) // stop a job via GET (for proper integration with YARN this has to be performed via GET) .GET("/jobs/:jobid/yarn-stop", handler(new JobStoppingHandler())) // DELETE is the preferred way of stopping a job (Rest-conform) .DELETE("/jobs/:jobid/stop", handler(new JobStoppingHandler())); if (webSubmitAllow) { router // fetch the list of uploaded jars. .GET("/jars", handler(new JarListHandler(uploadDir))) // get plan for an uploaded jar .GET("/jars/:jarid/plan", handler(new JarPlanHandler(uploadDir))) // run a jar .POST("/jars/:jarid/run", handler(new JarRunHandler(uploadDir, timeout, config))) // upload a jar .POST("/jars/upload", handler(new JarUploadHandler(uploadDir))) // delete an uploaded jar from submission interface .DELETE("/jars/:jarid", handler(new JarDeleteHandler(uploadDir))); } else { router // send an Access Denied message (sort of) // Every other GET request will go to the File Server, which will not provide // access to the jar directory anyway, because it doesn't exist in webRootDir. .GET("/jars", handler(new JarAccessDeniedHandler())); } // this handler serves all the static contents router.GET("/:*", new StaticFileServerHandler(retriever, jobManagerAddressPromise.future(), timeout, webRootDir, enableSSL)); // add shutdown hook for deleting the directories and remaining temp files on shutdown try { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { cleanup(); } }); } catch (IllegalStateException e) { // race, JVM is in shutdown already, we can safely ignore this LOG.debug("Unable to add shutdown hook, shutdown already in progress", e); } catch (Throwable t) { // these errors usually happen when the shutdown is already in progress LOG.warn("Error while adding shutdown hook", t); } ChannelInitializer<SocketChannel> initializer = new ChannelInitializer<SocketChannel>() { @Override protected void initChannel(SocketChannel ch) { Handler handler = new Handler(router); // SSL should be the first handler in the pipeline if (serverSSLContext != null) { SSLEngine sslEngine = serverSSLContext.createSSLEngine(); sslEngine.setUseClientMode(false); ch.pipeline().addLast("ssl", new SslHandler(sslEngine)); } ch.pipeline().addLast(new HttpServerCodec()).addLast(new ChunkedWriteHandler()) .addLast(new HttpRequestHandler(uploadDir)).addLast(handler.name(), handler) .addLast(new PipelineErrorHandler(LOG)); } }; NioEventLoopGroup bossGroup = new NioEventLoopGroup(1); NioEventLoopGroup workerGroup = new NioEventLoopGroup(); this.bootstrap = new ServerBootstrap(); this.bootstrap.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class) .childHandler(initializer); ChannelFuture ch; if (configuredAddress == null) { ch = this.bootstrap.bind(configuredPort); } else { ch = this.bootstrap.bind(configuredAddress, configuredPort); } this.serverChannel = ch.sync().channel(); InetSocketAddress bindAddress = (InetSocketAddress) serverChannel.localAddress(); String address = bindAddress.getAddress().getHostAddress(); int port = bindAddress.getPort(); LOG.info("Web frontend listening at " + address + ':' + port); }
From source file:com.kegare.caveworld.client.config.GuiBiomesEntry.java
@Override public void setResult(final List<Integer> result) { new ForkJoinPool().execute(new RecursiveAction() { @Override//from w w w. jav a 2 s. c om protected void compute() { BiomeGenBase biome; ICaveBiome entry; for (Integer id : result) { biome = BiomeGenBase.getBiome(id); if (biome != null) { for (ICaveBiome cave : biomeList.biomes) { if (cave.getBiome() == biome) { biome = null; biomeList.selected = cave; break; } } if (biome == null) { continue; } if (CaveBiomeManager.defaultMapping.containsKey(biome)) { entry = CaveBiomeManager.defaultMapping.get(biome); } else { entry = new CaveBiome(biome, 10); } if (biomeList.biomes.addIfAbsent(entry)) { biomeList.contents.addIfAbsent(entry); biomeList.selected = entry; } } } Comparator<ICaveBiome> comparator = new CaveBiomeComparator(); Collections.sort(biomeList.contents, comparator); Collections.sort(biomeList.biomes, comparator); biomeList.scrollToSelected(); } }); }