List of usage examples for org.apache.commons.lang3.tuple ImmutablePair of
public static <L, R> ImmutablePair<L, R> of(final L left, final R right)
Obtains an immutable pair of from two objects inferring the generic types.
This factory allows the pair to be created using inference to obtain the generic types.
From source file:org.apache.spark.network.ChunkFetchRequestHandlerSuite.java
@Test public void handleChunkFetchRequest() throws Exception { RpcHandler rpcHandler = new NoOpRpcHandler(); OneForOneStreamManager streamManager = (OneForOneStreamManager) (rpcHandler.getStreamManager()); Channel channel = mock(Channel.class); ChannelHandlerContext context = mock(ChannelHandlerContext.class); when(context.channel()).thenAnswer(invocationOnMock0 -> channel); List<Pair<Object, ExtendedChannelPromise>> responseAndPromisePairs = new ArrayList<>(); when(channel.writeAndFlush(any())).thenAnswer(invocationOnMock0 -> { Object response = invocationOnMock0.getArguments()[0]; ExtendedChannelPromise channelFuture = new ExtendedChannelPromise(channel); responseAndPromisePairs.add(ImmutablePair.of(response, channelFuture)); return channelFuture; });//from ww w .ja v a2 s .c o m // Prepare the stream. List<ManagedBuffer> managedBuffers = new ArrayList<>(); managedBuffers.add(new TestManagedBuffer(10)); managedBuffers.add(new TestManagedBuffer(20)); managedBuffers.add(null); managedBuffers.add(new TestManagedBuffer(30)); managedBuffers.add(new TestManagedBuffer(40)); long streamId = streamManager.registerStream("test-app", managedBuffers.iterator(), channel); TransportClient reverseClient = mock(TransportClient.class); ChunkFetchRequestHandler requestHandler = new ChunkFetchRequestHandler(reverseClient, rpcHandler.getStreamManager(), 2L); RequestMessage request0 = new ChunkFetchRequest(new StreamChunkId(streamId, 0)); requestHandler.channelRead(context, request0); Assert.assertEquals(1, responseAndPromisePairs.size()); Assert.assertTrue(responseAndPromisePairs.get(0).getLeft() instanceof ChunkFetchSuccess); Assert.assertEquals(managedBuffers.get(0), ((ChunkFetchSuccess) (responseAndPromisePairs.get(0).getLeft())).body()); RequestMessage request1 = new ChunkFetchRequest(new StreamChunkId(streamId, 1)); requestHandler.channelRead(context, request1); Assert.assertEquals(2, responseAndPromisePairs.size()); Assert.assertTrue(responseAndPromisePairs.get(1).getLeft() instanceof ChunkFetchSuccess); Assert.assertEquals(managedBuffers.get(1), ((ChunkFetchSuccess) (responseAndPromisePairs.get(1).getLeft())).body()); // Finish flushing the response for request0. responseAndPromisePairs.get(0).getRight().finish(true); RequestMessage request2 = new ChunkFetchRequest(new StreamChunkId(streamId, 2)); requestHandler.channelRead(context, request2); Assert.assertEquals(3, responseAndPromisePairs.size()); Assert.assertTrue(responseAndPromisePairs.get(2).getLeft() instanceof ChunkFetchFailure); ChunkFetchFailure chunkFetchFailure = ((ChunkFetchFailure) (responseAndPromisePairs.get(2).getLeft())); Assert.assertEquals("java.lang.IllegalStateException: Chunk was not found", chunkFetchFailure.errorString.split("\\r?\\n")[0]); RequestMessage request3 = new ChunkFetchRequest(new StreamChunkId(streamId, 3)); requestHandler.channelRead(context, request3); Assert.assertEquals(4, responseAndPromisePairs.size()); Assert.assertTrue(responseAndPromisePairs.get(3).getLeft() instanceof ChunkFetchSuccess); Assert.assertEquals(managedBuffers.get(3), ((ChunkFetchSuccess) (responseAndPromisePairs.get(3).getLeft())).body()); RequestMessage request4 = new ChunkFetchRequest(new StreamChunkId(streamId, 4)); requestHandler.channelRead(context, request4); verify(channel, times(1)).close(); Assert.assertEquals(4, responseAndPromisePairs.size()); }
From source file:org.apache.spark.network.client.TransportResponseHandler.java
public void addStreamCallback(String streamId, StreamCallback callback) { timeOfLastRequestNs.set(System.nanoTime()); streamCallbacks.offer(ImmutablePair.of(streamId, callback)); }
From source file:org.apache.spark.network.server.OneForOneStreamManager.java
public static Pair<Long, Integer> parseStreamChunkId(String streamChunkId) { String[] array = streamChunkId.split("_"); assert array.length == 2 : "Stream id and chunk index should be specified."; long streamId = Long.valueOf(array[0]); int chunkIndex = Integer.valueOf(array[1]); return ImmutablePair.of(streamId, chunkIndex); }
From source file:org.apache.spark.network.TransportRequestHandlerSuite.java
@Test public void handleFetchRequestAndStreamRequest() throws Exception { RpcHandler rpcHandler = new NoOpRpcHandler(); OneForOneStreamManager streamManager = (OneForOneStreamManager) (rpcHandler.getStreamManager()); Channel channel = mock(Channel.class); List<Pair<Object, ExtendedChannelPromise>> responseAndPromisePairs = new ArrayList<>(); when(channel.writeAndFlush(any())).thenAnswer(invocationOnMock0 -> { Object response = invocationOnMock0.getArguments()[0]; ExtendedChannelPromise channelFuture = new ExtendedChannelPromise(channel); responseAndPromisePairs.add(ImmutablePair.of(response, channelFuture)); return channelFuture; });// www . j a va2 s . com // Prepare the stream. List<ManagedBuffer> managedBuffers = new ArrayList<>(); managedBuffers.add(new TestManagedBuffer(10)); managedBuffers.add(new TestManagedBuffer(20)); managedBuffers.add(new TestManagedBuffer(30)); managedBuffers.add(new TestManagedBuffer(40)); long streamId = streamManager.registerStream("test-app", managedBuffers.iterator()); streamManager.registerChannel(channel, streamId); TransportClient reverseClient = mock(TransportClient.class); TransportRequestHandler requestHandler = new TransportRequestHandler(channel, reverseClient, rpcHandler, 2L); RequestMessage request0 = new ChunkFetchRequest(new StreamChunkId(streamId, 0)); requestHandler.handle(request0); assert responseAndPromisePairs.size() == 1; assert responseAndPromisePairs.get(0).getLeft() instanceof ChunkFetchSuccess; assert ((ChunkFetchSuccess) (responseAndPromisePairs.get(0).getLeft())).body() == managedBuffers.get(0); RequestMessage request1 = new ChunkFetchRequest(new StreamChunkId(streamId, 1)); requestHandler.handle(request1); assert responseAndPromisePairs.size() == 2; assert responseAndPromisePairs.get(1).getLeft() instanceof ChunkFetchSuccess; assert ((ChunkFetchSuccess) (responseAndPromisePairs.get(1).getLeft())).body() == managedBuffers.get(1); // Finish flushing the response for request0. responseAndPromisePairs.get(0).getRight().finish(true); RequestMessage request2 = new StreamRequest(String.format("%d_%d", streamId, 2)); requestHandler.handle(request2); assert responseAndPromisePairs.size() == 3; assert responseAndPromisePairs.get(2).getLeft() instanceof StreamResponse; assert ((StreamResponse) (responseAndPromisePairs.get(2).getLeft())).body() == managedBuffers.get(2); // Request3 will trigger the close of channel, because the number of max chunks being // transferred is 2; RequestMessage request3 = new StreamRequest(String.format("%d_%d", streamId, 3)); requestHandler.handle(request3); verify(channel, times(1)).close(); assert responseAndPromisePairs.size() == 3; }
From source file:org.apache.syncope.core.logic.AbstractAnyLogic.java
protected Pair<TO, List<LogicActions>> beforeCreate(final TO input) { Realm realm = realmDAO.findByFullPath(input.getRealm()); if (realm == null) { SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidRealm); sce.getElements().add(input.getRealm()); throw sce; }//from w w w. ja v a 2s . co m AnyType anyType = input instanceof UserTO ? anyTypeDAO.findUser() : input instanceof GroupTO ? anyTypeDAO.findGroup() : anyTypeDAO.find(input.getType()); if (anyType == null) { SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidAnyType); sce.getElements().add(input.getType()); throw sce; } TO any = input; templateUtils.apply(any, realm.getTemplate(anyType)); List<LogicActions> actions = getActions(realm); for (LogicActions action : actions) { any = action.beforeCreate(any); } LOG.debug("Input: {}\nOutput: {}\n", input, any); return ImmutablePair.of(any, actions); }
From source file:org.apache.syncope.core.logic.AbstractAnyLogic.java
protected Pair<P, List<LogicActions>> beforeUpdate(final P input, final String realmPath) { Realm realm = realmDAO.findByFullPath(realmPath); if (realm == null) { SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidRealm); sce.getElements().add(realmPath); throw sce; }//from www. jav a2 s .co m P mod = input; List<LogicActions> actions = getActions(realm); for (LogicActions action : actions) { mod = action.beforeUpdate(mod); } LOG.debug("Input: {}\nOutput: {}\n", input, mod); return ImmutablePair.of(mod, actions); }
From source file:org.apache.syncope.core.logic.AbstractAnyLogic.java
protected Pair<TO, List<LogicActions>> beforeDelete(final TO input) { Realm realm = realmDAO.findByFullPath(input.getRealm()); if (realm == null) { SyncopeClientException sce = SyncopeClientException.build(ClientExceptionType.InvalidRealm); sce.getElements().add(input.getRealm()); throw sce; }//w ww.j a v a 2 s . c o m TO any = input; List<LogicActions> actions = getActions(realm); for (LogicActions action : actions) { any = action.beforeDelete(any); } LOG.debug("Input: {}\nOutput: {}\n", input, any); return ImmutablePair.of(any, actions); }
From source file:org.apache.syncope.core.logic.init.JobManagerImpl.java
@Transactional @Override//from w ww. jav a 2 s . c o m public void load() { final Pair<String, Long> conf = AuthContextUtils.execWithAuthContext(SyncopeConstants.MASTER_DOMAIN, new AuthContextUtils.Executable<Pair<String, Long>>() { @Override public Pair<String, Long> exec() { String notificationJobCronExpression = StringUtils.EMPTY; CPlainAttr notificationJobCronExp = confDAO.find("notificationjob.cronExpression", NotificationJob.DEFAULT_CRON_EXP); if (!notificationJobCronExp.getValuesAsStrings().isEmpty()) { notificationJobCronExpression = notificationJobCronExp.getValuesAsStrings().get(0); } long interruptMaxRetries = confDAO.find("tasks.interruptMaxRetries", "1").getValues().get(0) .getLongValue(); return ImmutablePair.of(notificationJobCronExpression, interruptMaxRetries); } }); for (String domain : domainsHolder.getDomains().keySet()) { AuthContextUtils.execWithAuthContext(domain, new AuthContextUtils.Executable<Void>() { @Override public Void exec() { // 1. jobs for SchedTasks Set<SchedTask> tasks = new HashSet<>(taskDAO.<SchedTask>findAll(TaskType.SCHEDULED)); tasks.addAll(taskDAO.<PullTask>findAll(TaskType.PULL)); tasks.addAll(taskDAO.<PushTask>findAll(TaskType.PUSH)); for (SchedTask task : tasks) { try { register(task, task.getStartAt(), conf.getRight()); } catch (Exception e) { LOG.error("While loading job instance for task " + task.getKey(), e); } } // 2. jobs for Reports for (Report report : reportDAO.findAll()) { try { register(report, null, conf.getRight()); } catch (Exception e) { LOG.error("While loading job instance for report " + report.getName(), e); } } return null; } }); } Map<String, Object> jobMap = new HashMap<>(); jobMap.put(JobManager.DOMAIN_KEY, AuthContextUtils.getDomain()); jobMap.put(INTERRUPT_MAX_RETRIES_KEY, conf.getRight()); // 3. NotificationJob if (StringUtils.isBlank(conf.getLeft())) { LOG.debug("Empty value provided for {}'s cron, not registering anything on Quartz", NotificationJob.class.getSimpleName()); } else { LOG.debug("{}'s cron expression: {} - registering Quartz job and trigger", NotificationJob.class.getSimpleName(), conf.getLeft()); try { NotificationJob job = createSpringBean(NotificationJob.class); registerJob(NOTIFICATION_JOB.getName(), job, conf.getLeft(), null, jobMap); } catch (Exception e) { LOG.error("While loading {} instance", NotificationJob.class.getSimpleName(), e); } } // 4. SystemLoadReporterJob (fixed schedule, every minute) LOG.debug("Registering {}", SystemLoadReporterJob.class); try { SystemLoadReporterJob job = createSpringBean(SystemLoadReporterJob.class); registerJob("systemLoadReporterJob", job, "0 * * * * ?", null, jobMap); } catch (Exception e) { LOG.error("While loading {} instance", SystemLoadReporterJob.class.getSimpleName(), e); } }
From source file:org.apache.syncope.core.logic.ResourceLogic.java
@PreAuthorize("hasRole('" + StandardEntitlement.RESOURCE_LIST_CONNOBJECT + "')") @Transactional(readOnly = true)//w ww . j ava2 s .co m public Pair<SearchResult, List<ConnObjectTO>> listConnObjects(final String key, final String anyTypeKey, final int size, final String pagedResultsCookie, final List<OrderByClause> orderBy) { ExternalResource resource; ObjectClass objectClass; OperationOptions options; if (SyncopeConstants.REALM_ANYTYPE.equals(anyTypeKey)) { resource = resourceDAO.authFind(key); if (resource == null) { throw new NotFoundException("Resource '" + key + "'"); } if (resource.getOrgUnit() == null) { throw new NotFoundException("Realm provisioning for resource '" + key + "'"); } objectClass = resource.getOrgUnit().getObjectClass(); options = MappingUtils.buildOperationOptions( MappingUtils.getPropagationItems(resource.getOrgUnit().getItems()).iterator()); } else { Triple<ExternalResource, AnyType, Provision> init = connObjectInit(key, anyTypeKey); resource = init.getLeft(); objectClass = init.getRight().getObjectClass(); init.getRight().getMapping().getItems(); Set<MappingItem> linkinMappingItems = virSchemaDAO.findByProvision(init.getRight()).stream() .map(virSchema -> virSchema.asLinkingMappingItem()).collect(Collectors.toSet()); Iterator<MappingItem> mapItems = new IteratorChain<>(init.getRight().getMapping().getItems().iterator(), linkinMappingItems.iterator()); options = MappingUtils.buildOperationOptions(mapItems); } final List<ConnObjectTO> connObjects = new ArrayList<>(); SearchResult searchResult = connFactory.getConnector(resource).search(objectClass, null, new ResultsHandler() { private int count; @Override public boolean handle(final ConnectorObject connectorObject) { connObjects.add(ConnObjectUtils.getConnObjectTO(connectorObject)); // safety protection against uncontrolled result size count++; return count < size; } }, size, pagedResultsCookie, orderBy, options); return ImmutablePair.of(searchResult, connObjects); }
From source file:org.apache.syncope.core.logic.UserLogic.java
@PreAuthorize("isAuthenticated()") @Transactional(readOnly = true)/*w w w .j a v a2 s . c o m*/ public Pair<String, UserTO> selfRead() { return ImmutablePair.of(POJOHelper.serialize(AuthContextUtils.getAuthorizations()), binder.returnUserTO(binder.getAuthenticatedUserTO())); }