Example usage for com.google.common.collect Iterables getLast

List of usage examples for com.google.common.collect Iterables getLast

Introduction

In this page you can find the example usage for com.google.common.collect Iterables getLast.

Prototype

public static <T> T getLast(Iterable<T> iterable) 

Source Link

Document

Returns the last element of iterable .

Usage

From source file:org.glowroot.ui.TransactionCommonService.java

List<ThroughputAggregate> getThroughputAggregates(String agentRollupId, AggregateQuery query,
        boolean autoRefresh) throws Exception {
    LiveResult<ThroughputAggregate> liveResult;
    long revisedTo;
    if (autoRefresh) {
        liveResult = null;/*from   w  w  w. j  a v  a2  s . com*/
        revisedTo = query.to();
    } else {
        liveResult = liveAggregateRepository.getThroughputAggregates(agentRollupId, query);
        revisedTo = liveResult == null ? query.to() : liveResult.revisedTo();
    }
    AggregateQuery revisedQuery = ImmutableAggregateQuery.builder().copyFrom(query).to(revisedTo).build();
    List<ThroughputAggregate> aggregates = aggregateRepository.readThroughputAggregates(agentRollupId,
            revisedQuery);
    if (revisedQuery.rollupLevel() == 0) {
        if (liveResult != null) {
            aggregates = Lists.newArrayList(aggregates);
            aggregates.addAll(liveResult.get());
        }
        return aggregates;
    }
    long nonRolledUpFrom = revisedQuery.from();
    if (!aggregates.isEmpty()) {
        nonRolledUpFrom = Iterables.getLast(aggregates).captureTime() + 1;
    }
    List<ThroughputAggregate> orderedNonRolledUpAggregates = Lists.newArrayList();
    if (nonRolledUpFrom <= revisedTo) {
        orderedNonRolledUpAggregates
                .addAll(aggregateRepository.readThroughputAggregates(agentRollupId, ImmutableAggregateQuery
                        .builder().copyFrom(revisedQuery).from(nonRolledUpFrom).rollupLevel(0).build()));
    }
    if (liveResult != null) {
        orderedNonRolledUpAggregates.addAll(liveResult.get());
    }
    aggregates = Lists.newArrayList(aggregates);
    long fixedIntervalMillis = configRepository.getRollupConfigs().get(revisedQuery.rollupLevel())
            .intervalMillis();
    aggregates.addAll(rollUpThroughputAggregates(orderedNonRolledUpAggregates,
            new RollupCaptureTimeFn(fixedIntervalMillis)));
    if (aggregates.size() >= 2) {
        long currentTime = clock.currentTimeMillis();
        ThroughputAggregate nextToLastAggregate = aggregates.get(aggregates.size() - 2);
        if (currentTime - nextToLastAggregate.captureTime() < 60000) {
            aggregates.remove(aggregates.size() - 1);
        }
    }
    return aggregates;
}

From source file:edu.harvard.med.screensaver.io.libraries.rnai.HairpinLibraryContentsParser.java

private void updateGeneList(List<Gene> genes, String[] row, CsvIntegerListColumn entrezGeneIdColumn,
        CsvTextListColumn geneNameColumn, CsvTextListColumn speciesNameColumn,
        CsvTextSubListColumn entrezSymbolsColumn, CsvTextSubListColumn accessionNumbersColumn)
        throws ParseException {
    // Read list of Entrezgene IDs
    // This defines the expected number of genes: other columns ought to describe the same number of genes
    List<Integer> entrezGeneIds = entrezGeneIdColumn.getValue(row);
    if (entrezGeneIds == null) {
        entrezGeneIds = Lists.newArrayList();
    }// w w w.  j a v  a  2 s.  c o  m

    // List of names (zero or one per Entrezgene ID)
    List<String> entrezGeneNames = geneNameColumn.getValue(row);
    if (entrezGeneNames == null) {
        entrezGeneNames = Lists.newArrayList();
    }

    while (entrezGeneNames.size() < entrezGeneIds.size())
        entrezGeneNames.add(entrezGeneNames.size() > 0 ? Iterables.getLast(entrezGeneNames) : null);

    // List of species names (zero or one per Entrezgene ID)
    List<String> speciesNames = speciesNameColumn.getValue(row);
    if (speciesNames == null) {
        speciesNames = Lists.newArrayList();
    }

    while (speciesNames.size() < entrezGeneIds.size())
        speciesNames.add(speciesNames.size() > 0 ? Iterables.getLast(speciesNames) : null);

    // List of gene symbols (zero or more per Entrezgene ID)
    List<List<String>> entrezGeneSymbols = entrezSymbolsColumn.getValue(row);
    if (entrezGeneSymbols == null) {
        entrezGeneSymbols = Lists.newArrayList();
    }

    while (entrezGeneSymbols.size() < entrezGeneIds.size())
        entrezGeneSymbols.add(Lists.<String>newArrayList());

    // List of Genbank accession numbers (zero or more per Entrezgene ID)
    List<List<String>> accessionNumbers = accessionNumbersColumn.getValue(row);
    if (accessionNumbers == null) {
        accessionNumbers = Lists.newArrayList();
    }
    while (accessionNumbers.size() < entrezGeneIds.size())
        accessionNumbers.add(Lists.<String>newArrayList());

    // Check we don't have more descriptions than gene IDs
    if (entrezGeneIds.size() < entrezGeneNames.size()) {
        throw new ParseException(new ParseError("Found more names in " + geneNameColumn.getName()
                + " than ids in " + entrezGeneIdColumn.getName(), geneNameColumn.getLocation(row)));
    }

    if (entrezGeneIds.size() < entrezGeneSymbols.size()) {
        throw new ParseException(new ParseError("Found more symbols in " + entrezSymbolsColumn.getName()
                + " than ids in " + entrezGeneIdColumn.getName(), entrezSymbolsColumn.getLocation(row)));
    }

    if (entrezGeneIds.size() < accessionNumbers.size()) {
        throw new ParseException(new ParseError("Found more accession numbers in "
                + accessionNumbersColumn.getName() + " than ids in " + entrezGeneIdColumn.getName(),
                accessionNumbersColumn.getLocation(row)));
    }

    // Clear the list in case there is an existing entry (e.g. blank entry created by accessing getVendorGene())
    genes.clear();

    // Create the genes and add them to the list
    for (int i = 0; i < entrezGeneIds.size(); ++i) {
        Gene gene = new Gene().withEntrezgeneId(entrezGeneIds.get(i)).withGeneName(entrezGeneNames.get(i))
                .withSpeciesName(speciesNames.get(i));

        if (entrezGeneSymbols.get(i) != null) {
            gene.getEntrezgeneSymbols().addAll(entrezGeneSymbols.get(i));
        }

        if (accessionNumbers.get(i) != null) {
            gene.getGenbankAccessionNumbers().addAll(accessionNumbers.get(i));
        }

        genes.add(gene);
    }
}

From source file:main.Solver.java

public void backtrack(Entry entry) {
    solutionScores = new ArrayList<int[]>();
    while (true) {
        if (entry.value.depth == 0)
            break;
        Tableau tableau = new Tableau();
        tableau.fromToken(entry);//from ww  w .j  ava 2s.c  o  m
        tableau.undo(entry.value.node);
        solution.add(tableau.notation(entry));

        if (showall)
            logger.log("node=" + entry.value.node + "\r\n" + Iterables.getLast(solution) + "\r\n" + tableau);

        entry = new Entry(tableau);
        entry.value = position.get(entry.key);
        solutionScores.add(entry.value.score);

        if (showall) {
            logger.log("Entry={key='" + entry.key + "', token=" + Arrays.toString(entry.value.token)
                    + ", depth=" + entry.value.depth + ", scores=" + Arrays.toString(entry.value.score) + ",");
        }
    }
}

From source file:com.twitter.aurora.scheduler.http.SchedulerzRole.java

private List<Job> fetchJobsBy(final String role, final Optional<String> environment,
        final Map<IJobKey, Map<?, ?>> cronJobs) {

    final Function<Map.Entry<IJobKey, Collection<IScheduledTask>>, Job> toJob = new Function<Map.Entry<IJobKey, Collection<IScheduledTask>>, Job>() {
        @Override//from   ww w.  j a va2  s  . c  o  m
        public Job apply(Map.Entry<IJobKey, Collection<IScheduledTask>> tasksByJobKey) {
            IJobKey jobKey = tasksByJobKey.getKey();
            Collection<IScheduledTask> tasks = tasksByJobKey.getValue();

            Job job = new Job();
            job.environment = jobKey.getEnvironment();
            job.name = jobKey.getName();

            // Pick the freshest task's config and associate it with the job.
            ITaskConfig freshestConfig = getFreshestTask(tasks).getAssignedTask().getTask();
            job.production = freshestConfig.isProduction();

            // TODO(Suman Karumuri): Add a source/job type to TaskConfig and replace logic below
            if (freshestConfig.isIsService()) {
                job.type = JobType.SERVICE;
            } else if (cronJobs.containsKey(jobKey)) {
                job.type = JobType.CRON;
            } else {
                job.type = JobType.ADHOC;
            }

            for (IScheduledTask task : tasks) {
                switch (task.getStatus()) {
                case INIT:
                case PENDING:
                    job.pendingTaskCount++;
                    break;

                case ASSIGNED:
                case STARTING:
                case RESTARTING:
                case RUNNING:
                case KILLING:
                case PREEMPTING:
                    job.activeTaskCount++;
                    break;

                case KILLED:
                case FINISHED:
                    job.finishedTaskCount++;
                    break;

                case LOST:
                case FAILED:
                case UNKNOWN:
                    job.failedTaskCount++;
                    Date now = new Date();
                    long elapsedMillis = now.getTime() - Iterables.getLast(task.getTaskEvents()).getTimestamp();

                    if (Amount.of(elapsedMillis, Time.MILLISECONDS).as(Time.HOURS) < 6) {
                        job.recentlyFailedTaskCount++;
                    }
                    break;

                default:
                    throw new IllegalArgumentException("Unsupported status: " + task.getStatus());
                }
            }

            return job;
        }
    };

    Query.Builder query = environment.isPresent() ? Query.envScoped(role, environment.get())
            : Query.roleScoped(role);

    Multimap<IJobKey, IScheduledTask> tasks = Tasks
            .byJobKey(Storage.Util.weaklyConsistentFetchTasks(storage, query));

    Iterable<Job> jobs = FluentIterable.from(tasks.asMap().entrySet()).transform(toJob);

    return DisplayUtils.JOB_ORDERING.sortedCopy(jobs);
}

From source file:org.springframework.cloud.deployer.resource.maven.MavenArtifactResolver.java

/**
 * Resolve an artifact and return its location in the local repository. Aether performs the normal
 * Maven resolution process ensuring that the latest update is cached to the local repository.
 * In addition, if the {@link MavenProperties#resolvePom} flag is <code>true</code>,
 * the POM is also resolved and cached.//  ww  w.  j a  va2  s .  co  m
 * @param resource the {@link MavenResource} representing the artifact
 * @return a {@link FileSystemResource} representing the resolved artifact in the local repository
 * @throws IllegalStateException if the artifact does not exist or the resolution fails
 */
Resource resolve(MavenResource resource) {
    Assert.notNull(resource, "MavenResource must not be null");
    validateCoordinates(resource);
    RepositorySystemSession session = newRepositorySystemSession(this.repositorySystem,
            this.properties.getLocalRepository());
    ArtifactResult resolvedArtifact;
    try {
        List<ArtifactRequest> artifactRequests = new ArrayList<>(2);
        if (properties.isResolvePom()) {
            artifactRequests.add(
                    new ArtifactRequest(toPomArtifact(resource), this.remoteRepositories, JavaScopes.RUNTIME));
        }
        artifactRequests
                .add(new ArtifactRequest(toJarArtifact(resource), this.remoteRepositories, JavaScopes.RUNTIME));

        resolvedArtifact = Iterables.getLast(this.repositorySystem.resolveArtifacts(session, artifactRequests));
    } catch (ArtifactResolutionException e) {
        throw new IllegalStateException(
                String.format("failed to resolve MavenResource: %s", resource.toString()), e);
    }
    return toResource(resolvedArtifact);
}

From source file:org.eclipse.sirius.diagram.ui.edit.internal.part.DiagramContainerEditPartOperation.java

private static void updatePrecedingSiblingCorner(final AbstractDiagramElementContainerEditPart self,
        int... cornerToCorrect) {
    // Update previous siblings: needed for the diagram
    // opening and the region container creation cases in
    // which each child will be the last element once.
    Collection<AbstractDiagramElementContainerEditPart> siblings = Lists.newArrayList(
            Iterables.filter(self.getParent().getChildren(), AbstractDiagramElementContainerEditPart.class));
    siblings.remove(self);/*from  ww w .  j a va2 s .c  o m*/
    AbstractDiagramElementContainerEditPart previous = siblings.isEmpty() ? null : Iterables.getLast(siblings);
    if (previous != null && previous.getPrimaryShape() instanceof RegionRoundedGradientRectangle) {
        RegionRoundedGradientRectangle gradientRoundedRectangle = (RegionRoundedGradientRectangle) previous
                .getPrimaryShape();
        if (!gradientRoundedRectangle.getAdditionalCornerDimensions()
                .equals(gradientRoundedRectangle.getCornerDimensions())) {
            for (int i : cornerToCorrect) {
                gradientRoundedRectangle.getAdditionalDimensionCorners().set(i);
            }
        }

        if (gradientRoundedRectangle.getAdditionalDimensionCorners().cardinality() == 4) {
            // we do not need specific corner anymore
            gradientRoundedRectangle.getAdditionalDimensionCorners().clear();
            gradientRoundedRectangle
                    .setCornerDimensions(gradientRoundedRectangle.getAdditionalCornerDimensions());
        }
    }
}

From source file:com.google.api.explorer.client.auth.AuthView.java

/**
 * Add an editor row in the form of a textbox, that will allow an arbitrary scope to be added.
 *///  w  ww .java 2  s  . c  o  m
private FocusWidget addFreeFormEditorRow(String name, boolean showRemoveLink) {
    final FlowPanel newRow = new FlowPanel();

    // Create the new editor and do the appropriate bookkeeping.
    final TextBox scopeText = new TextBox();
    scopeText.setValue(name);
    newRow.add(scopeText);
    freeFormEditors.add(scopeText);

    final Label removeLink = new InlineLabel("X");
    removeLink.addStyleName(style.clickable());
    removeLink.addClickHandler(new ClickHandler() {
        @Override
        public void onClick(ClickEvent event) {
            freeFormEditors.remove(scopeText);
            additionalScopePanel.remove(newRow);

            if (freeFormEditors.isEmpty()) {
                addFreeFormEditorRow("", false);
            }
        }
    });
    newRow.add(removeLink);
    removeLink.setVisible(showRemoveLink);

    // Add a handler to add a new editor when there is text in the existing editor.
    scopeText.addKeyDownHandler(new KeyDownHandler() {
        @Override
        public void onKeyDown(KeyDownEvent event) {
            TextBox editor = (TextBox) event.getSource();
            boolean isLastEditor = editor.equals(Iterables.getLast(freeFormEditors));
            if (isLastEditor && !editor.getValue().isEmpty()) {
                presenter.addNewScope();
                removeLink.setVisible(true);
            }
        }
    });

    additionalScopePanel.add(newRow);

    return scopeText;
}

From source file:com.google.security.zynamics.binnavi.Database.PostgreSQL.Savers.PostgreSQLNodeSaver.java

/**
 * Saves the code nodes to the database.
 *
 * @param provider The connection to the database.
 * @param nodes The nodes to save.//from   www  .  ja  v  a2 s.  c  om
 * @param firstNode The database index of the first node.
 * @param codeNodeIndices Index into the nodes list that identifies the code nodes.
 *
 * @throws SQLException Thrown if saving the code node instructions failed.
 */
protected static void saveCodeNodes(final SQLProvider provider, final List<INaviViewNode> nodes,
        final int firstNode, final List<Integer> codeNodeIndices) throws SQLException {

    if (!codeNodeIndices.isEmpty()) {
        final List<Pair<INaviCodeNode, INaviInstruction>> instructionsWithUnsavedLocalComments = PostgreSQLNodeSaver
                .saveCodeNodeInstructions(provider, nodes, firstNode, codeNodeIndices);

        final String query = "INSERT INTO " + CTableNames.CODE_NODES_TABLE
                + "(module_id, node_id, parent_function, comment_id) VALUES (?, ?, ?, ?)";

        final ArrayList<INaviCodeNode> codeNodesWithUnsavedComments = new ArrayList<INaviCodeNode>();

        final PreparedStatement preparedStatement = provider.getConnection().getConnection()
                .prepareStatement(query);

        try {
            for (final int index : codeNodeIndices) {
                final INaviCodeNode codeNode = (INaviCodeNode) nodes.get(index);
                codeNode.setId(firstNode + index);
                INaviFunction function = null;
                try {
                    function = codeNode.getParentFunction();
                } catch (final MaybeNullException e) {
                }
                final int moduleId = Iterables.getLast(codeNode.getInstructions()).getModule()
                        .getConfiguration().getId();
                final List<IComment> comment = codeNode.getComments().getLocalCodeNodeComment();
                final Integer commentId = comment == null ? null
                        : comment.size() == 0 ? null : Iterables.getLast(comment).getId();

                if ((comment != null) && (comment.size() != 0) && (commentId == null)) {
                    codeNodesWithUnsavedComments.add(codeNode);
                }
                preparedStatement.setInt(1, moduleId);
                preparedStatement.setInt(2, firstNode + index);
                if (function == null) {
                    preparedStatement.setNull(3, Types.BIGINT);
                } else {
                    preparedStatement.setObject(3, function.getAddress().toBigInteger(), Types.BIGINT);
                }
                if (commentId == null) {
                    preparedStatement.setNull(4, Types.INTEGER);
                } else {
                    preparedStatement.setInt(4, commentId);
                }
                preparedStatement.addBatch();
            }
            preparedStatement.executeBatch();
        } finally {
            preparedStatement.close();
        }

        // TODO (timkornau): this is not the best solution and is more a test then a full fledged
        // implementation.
        for (final INaviCodeNode codeNode : codeNodesWithUnsavedComments) {
            final ArrayList<IComment> codeNodecomments = new ArrayList<IComment>();
            for (final IComment comment : codeNode.getComments().getLocalCodeNodeComment()) {
                try {
                    final Integer commentId = PostgreSQLNodeFunctions.appendLocalCodeNodeComment(provider,
                            codeNode, comment.getComment(), comment.getUser().getUserId());
                    final IComment newComment = new CComment(commentId, comment.getUser(), comment.getParent(),
                            comment.getComment());
                    codeNodecomments.add(newComment);
                } catch (final CouldntSaveDataException exception) {
                    CUtilityFunctions.logException(exception);
                }
            }
            codeNode.getComments().initializeLocalCodeNodeComment(codeNodecomments);
        }

        // TODO (timkornau): this is not the best solution and is more a test then a full fledged
        // implementation.
        for (final Pair<INaviCodeNode, INaviInstruction> pair : instructionsWithUnsavedLocalComments) {
            final ArrayList<IComment> localInstructionComments = new ArrayList<IComment>();
            for (final IComment comment : pair.first().getComments()
                    .getLocalInstructionComment(pair.second())) {
                try {
                    final int commentId = PostgreSQLInstructionFunctions.appendLocalInstructionComment(provider,
                            pair.first(), pair.second(), comment.getComment(), comment.getUser().getUserId());
                    final IComment newComment = new CComment(commentId, comment.getUser(), comment.getParent(),
                            comment.getComment());
                    localInstructionComments.add(newComment);
                } catch (final CouldntSaveDataException exception) {
                    CUtilityFunctions.logException(exception);
                }
            }
            pair.first().getComments().initializeLocalInstructionComment(pair.second(),
                    localInstructionComments);
        }
    }
}

From source file:org.jclouds.gogrid.GoGridLiveTestDisabled.java

/**
 * Tests common load balancer operations. Also verifies IP services and job services.
 */// w  w  w  . j  a  va 2s  .com
@Test(enabled = true)
public void testLoadBalancerLifecycle() {
    int lbCountBeforeTest = api.getLoadBalancerServices().getLoadBalancerList().size();

    final String nameOfLoadBalancer = "LoadBalancer" + String.valueOf(new Date().getTime()).substring(6);
    loadBalancersToDeleteAfterTest.add(nameOfLoadBalancer);

    Set<Ip> availableIps = api.getIpServices().getUnassignedPublicIpList();

    if (availableIps.size() < 4)
        throw new SkipException("Not enough available IPs (4 needed) to run the test");
    Iterator<Ip> ipIterator = availableIps.iterator();
    Ip vip = ipIterator.next();
    Ip realIp1 = ipIterator.next();
    Ip realIp2 = ipIterator.next();
    Ip realIp3 = ipIterator.next();

    AddLoadBalancerOptions options = new AddLoadBalancerOptions.Builder()
            .create(LoadBalancerType.LEAST_CONNECTED, LoadBalancerPersistenceType.SOURCE_ADDRESS);
    LoadBalancer createdLoadBalancer = api.getLoadBalancerServices().addLoadBalancer(nameOfLoadBalancer,
            IpPortPair.builder().ip(vip).port(80).build(),
            Arrays.asList(IpPortPair.builder().ip(realIp1).port(80).build(),
                    IpPortPair.builder().ip(realIp2).port(80).build()),
            options);
    assertNotNull(createdLoadBalancer);
    assert loadBalancerLatestJobCompleted.apply(createdLoadBalancer);

    // get load balancer by name
    Set<LoadBalancer> response = api.getLoadBalancerServices().getLoadBalancersByName(nameOfLoadBalancer);
    assert (response.size() == 1);
    createdLoadBalancer = Iterables.getOnlyElement(response);
    assertNotNull(createdLoadBalancer.getRealIpList());
    assertEquals(createdLoadBalancer.getRealIpList().size(), 2);
    assertNotNull(createdLoadBalancer.getVirtualIp());
    assertEquals(createdLoadBalancer.getVirtualIp().getIp().getIp(), vip.getIp());

    LoadBalancer editedLoadBalancer = api.getLoadBalancerServices().editLoadBalancerNamed(nameOfLoadBalancer,
            Arrays.asList(IpPortPair.builder().ip(realIp3).port(8181).build()));
    assert loadBalancerLatestJobCompleted.apply(editedLoadBalancer);
    assertNotNull(editedLoadBalancer.getRealIpList());
    assertEquals(editedLoadBalancer.getRealIpList().size(), 1);
    assertEquals(Iterables.getOnlyElement(editedLoadBalancer.getRealIpList()).getIp().getIp(), realIp3.getIp());

    int lbCountAfterAddingOneServer = api.getLoadBalancerServices().getLoadBalancerList().size();
    assert lbCountAfterAddingOneServer == lbCountBeforeTest
            + 1 : "There should be +1 increase in the number of load balancers since the test started";

    // delete the load balancer
    api.getLoadBalancerServices().deleteByName(nameOfLoadBalancer);

    Set<Job> jobs = api.getJobServices().getJobsForObjectName(nameOfLoadBalancer);
    assert ("DeleteLoadBalancer".equals(Iterables.getLast(jobs).getCommand().getName()));

    assert loadBalancerLatestJobCompleted.apply(createdLoadBalancer);

    int lbCountAfterDeletingTheServer = api.getLoadBalancerServices().getLoadBalancerList().size();
    assert lbCountAfterDeletingTheServer == lbCountBeforeTest : "There should be the same # of load balancers as since the test started";
}

From source file:hu.bme.mit.massif.simulink.api.util.bus.BusSignalMappingCreator.java

private void updateResolutionMapForFirstSegments(BusSpecification specification,
        Map<String, FragmentResolution> resolutionMap) {
    if (specification instanceof BusCreator) {
        // incoming line names are used
        for (InPort inportOfCreator : specification.getInports()) {
            String collisionFreeLineName = mapper.getCollisionFreeLineName(inportOfCreator);
            OutPort connectedOutPort = mapper.getConnectedOutPort(inportOfCreator);
            setOutPortInResolutionMap(resolutionMap, collisionFreeLineName, connectedOutPort, false);
        }/*from  w  ww .ja  v  a 2s  .  c  o  m*/
    } else if (specification instanceof BusSelector) {
        BusSelector busSelector = (BusSelector) specification;
        // use mapping (tricky): find outport, look at name, etc.
        // we know that signals with the same name are not allowed to be selected into a bus
        Set<String> names = Sets.newHashSet();
        for (BusSignalMapping mapping : busSelector.getMappings()) {
            List<String> fragments = splitPathToFragments(mapping.getMappingPath());
            String lastFrag = Iterables.getLast(fragments);
            checkState(!names.contains(lastFrag), "Duplicate signal %s name in bus selector", lastFrag);
            names.add(lastFrag);
            setOutPortInResolutionMap(resolutionMap, lastFrag, mapping.getMappingFrom(),
                    mapping.isIncomplete());
        }
    }
}