Example usage for org.apache.commons.collections.map MultiKeyMap MultiKeyMap

List of usage examples for org.apache.commons.collections.map MultiKeyMap MultiKeyMap

Introduction

In this page you can find the example usage for org.apache.commons.collections.map MultiKeyMap MultiKeyMap.

Prototype

public MultiKeyMap() 

Source Link

Document

Constructs a new MultiKeyMap that decorates a HashedMap.

Usage

From source file:org.talend.designer.core.ui.editor.cmd.DeleteNodeContainerCommand.java

@Override
@SuppressWarnings("unchecked")
public void execute() {
    connectionDeletedInfosMap = new MultiKeyMap();
    process.setActivate(false);//from  www  .ja v a 2 s  .  c  om
    List uniqueNameList = new ArrayList();
    for (INode node : nodeList) {
        if (node.getJobletNode() != null) {
            continue;
        }
        uniqueNameList.add(node.getUniqueName());
        NodeContainer nodeContainer = ((Node) node).getNodeContainer();
        ((Process) process).removeNodeContainer(nodeContainer);
        List<IConnection> inputList = (List<IConnection>) node.getIncomingConnections();
        List<IConnection> outputList = (List<IConnection>) node.getOutgoingConnections();
        boolean builtIn = node.getConnectorFromType(EConnectionType.FLOW_MAIN).isMultiSchema()
                | node.getConnectorFromType(EConnectionType.TABLE).isMultiSchema();

        for (IConnection connection : inputList) {
            // see bug 0002633: "rejects" link disappears at times.
            if (connection != null && connection.getSourceNodeConnector() != null) {
                connection.getSourceNodeConnector()
                        .setCurLinkNbOutput(connection.getSourceNodeConnector().getCurLinkNbOutput() - 1);
            }

            INode prevNode = connection.getSource();
            if ((prevNode instanceof Node) && ((Node) prevNode).getJobletNode() != null) {
                Node jobletnode = (Node) prevNode.getJobletNode();
                ((JobletContainer) jobletnode.getNodeContainer()).getOutputs().remove(connection);
                if (!nodeList.contains(jobletnode)) {
                    boolean builtInJobletNode = jobletnode.getConnectorFromType(EConnectionType.FLOW_MAIN)
                            .isMultiSchema() | node.getConnectorFromType(EConnectionType.TABLE).isMultiSchema();
                    storeMetadata(connection, jobletnode);
                    jobletnode.removeOutput(connection);
                    if (!builtInJobletNode) {
                        process.removeUniqueConnectionName(connection.getUniqueName());
                    }
                }
            }
            if (!nodeList.contains(prevNode)) {
                boolean builtInPrevNode = prevNode.getConnectorFromType(EConnectionType.FLOW_MAIN)
                        .isMultiSchema() | node.getConnectorFromType(EConnectionType.TABLE).isMultiSchema();
                storeMetadata(connection, prevNode);
                prevNode.removeOutput(connection);
                if (!builtInPrevNode) {
                    process.removeUniqueConnectionName(connection.getUniqueName());
                }
            }
        }
        for (IConnection connection : outputList) {
            INode nextNode = connection.getTarget();
            if ((nextNode instanceof Node) && ((Node) nextNode).getJobletNode() != null) {
                Node jobletnode = (Node) nextNode.getJobletNode();
                ((JobletContainer) jobletnode.getNodeContainer()).getInputs().remove(connection);
                if (!nodeList.contains(jobletnode)) {
                    jobletnode.removeInput(connection);
                    boolean builtInJobletNode = jobletnode.getConnectorFromType(EConnectionType.FLOW_MAIN)
                            .isMultiSchema() | node.getConnectorFromType(EConnectionType.TABLE).isMultiSchema();
                    if (!builtInJobletNode) {
                        process.removeUniqueConnectionName(connection.getUniqueName());
                    }
                }
            }
            if (!nodeList.contains(nextNode)) {
                INodeConnector nodeConnector = nextNode.getConnectorFromType(connection.getLineStyle());
                nodeConnector.setCurLinkNbInput(nodeConnector.getCurLinkNbInput() - 1);
                nextNode.removeInput(connection);
                if (nextNode != null) {
                    for (int i = 0; i < nextNode.getIncomingConnections().size(); i++) {
                        Connection nextNodeConnection = (Connection) nextNode.getIncomingConnections().get(i);
                        nextNodeConnection.updateName();
                    }
                }

                if (nextNode.getExternalNode() instanceof AbstractNode) {
                    ((AbstractNode) nextNode.getExternalNode()).removeInput(connection);
                }
            }
            if (!builtIn) {
                process.removeUniqueConnectionName(connection.getUniqueName());
            }
        }
        if (builtIn) {
            for (IMetadataTable meta : node.getMetadataList()) {
                String metaName = meta.getTableName();
                process.removeUniqueConnectionName(metaName);
            }
            // for tmap remove join table names
            final List<String> names = CorePlugin.getDefault().getMapperService()
                    .getJoinTableNames(node.getExternalData());
            if (!names.isEmpty()) {
                joinTableNames.addAll(names);
                for (String name : joinTableNames) {
                    process.removeUniqueConnectionName(name);
                }
            }

        }
    }

    process.setActivate(true);
    process.checkStartNodes();
    process.checkProcess();
}

From source file:org.talend.mdm.commmon.metadata.DefaultValidationHandler.java

private void addErrorMessage(String message, Integer lineNumber, Integer columnNumber, ValidationError error) {
    MultiKeyMap currentErrors = errors.get(error);
    if (currentErrors == null) {
        currentErrors = new MultiKeyMap();
        errors.put(error, currentErrors);
    }/*  ww  w. j  a  v a 2s.  c  o m*/
    currentErrors.put(lineNumber, columnNumber,
            message + " (line: " + lineNumber + " / column: " + columnNumber + ")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}

From source file:org.wikipedia.nirvana.WikiBooster.java

/**
 * Check if a particular page contains a particular template.
 *
 * @param title the title of the page./*  ww  w .j  a va 2 s  . com*/
 * @param template the template name with namespace prefix.
 * @return <true> if asked page is using asked template
 * @see org.wikipedia.Wiki#hasTemplate(String[], String) 
 */
public boolean hasTemplate(String title, String template) throws IOException {
    if (templates == null) {
        throw new RuntimeException("This class is not prepared to be used with templates");
    }
    if (!pagesSet.contains(title)) {
        throw new IllegalStateException("The booster is not prepared for page: " + title);
    }
    if (!templatesSet.contains(template)) {
        throw new IllegalStateException("The booster is not prepared for template: " + template);
    }
    if (hasTemplatesCache == null) {
        log.debug("Request templates usage info for " + pages.size() + " pages and " + templates.size()
                + " templates");
        boolean[][] data = wiki.hasTemplates(pages.toArray(new String[pages.size()]),
                templates.toArray(new String[templates.size()]));
        hasTemplatesCache = new MultiKeyMap();
        for (int i = 0; i < pages.size(); i++) {
            for (int j = 0; j < templates.size(); j++) {
                MultiKey key = new MultiKey(pages.get(i), templates.get(j));
                hasTemplatesCache.put(key, data[i][j]);
            }
        }
    }
    Boolean result = (Boolean) hasTemplatesCache.get(title, template);
    if (result == null) {
        throw new IllegalStateException(
                "Info not found in cash about page: " + title + " and template: " + template);
    }
    return result;
}

From source file:piecework.engine.concrete.ProcessEngineConcreteFacade.java

@Override
public TaskResults findTasks(TaskCriteria... criterias) throws ProcessEngineException {
    String keyword = null;//www .  j  a v  a2  s . c o m
    TaskResults.Builder builder = null;
    Set<String> allowedProcessDefinitionKeys = new HashSet<String>();
    Set<String> engineProcessInstanceIds = new HashSet<String>();

    if (criterias != null && criterias.length > 0) {
        for (TaskCriteria criteria : criterias) {
            if (StringUtils.isNotEmpty(criteria.getKeyword()))
                keyword = criteria.getKeyword();

            if (criteria.getProcesses() != null && !criteria.getProcesses().isEmpty()) {
                Set<String> engineSet = new HashSet<String>();
                for (Process process : criteria.getProcesses()) {
                    ProcessDeployment deployment = processDeployment(process);
                    allowedProcessDefinitionKeys.add(process.getProcessDefinitionKey());
                    if (deployment.getEngine() == null || engineSet.contains(deployment.getEngine()))
                        continue;
                    engineSet.add(deployment.getEngine());
                    ProcessEngineProxy proxy = registry.retrieve(ProcessEngineProxy.class,
                            deployment.getEngine());

                    TaskResults localResults = proxy.findTasks(criteria);
                    if (localResults == null)
                        continue;

                    engineProcessInstanceIds.addAll(localResults.getEngineProcessInstanceIds());

                    if (builder == null)
                        builder = new TaskResults.Builder(localResults);
                    else {
                        builder.tasks(localResults.getTasks());
                        builder.addToTotal(localResults.getTotal());
                    }
                }
            }
        }
    } else {
        builder = new TaskResults.Builder();
    }

    long time = 0;
    if (LOG.isDebugEnabled())
        time = System.currentTimeMillis();

    TaskResults.Builder resultsBuilder = new TaskResults.Builder();

    List<Task> taskInstances = builder != null ? builder.build().getTasks() : null;
    List<Task> tasks;
    int count = 0;

    if (taskInstances != null && !taskInstances.isEmpty()) {
        tasks = new ArrayList<Task>(taskInstances.size());

        List<ProcessInstance> processInstances;

        if (StringUtils.isNotEmpty(keyword))
            processInstances = processInstanceRepository
                    .findByProcessDefinitionKeyInAndEngineProcessInstanceIdInAndKeyword(
                            allowedProcessDefinitionKeys, engineProcessInstanceIds, keyword);
        else
            processInstances = processInstanceRepository
                    .findByProcessDefinitionKeyInAndEngineProcessInstanceIdIn(allowedProcessDefinitionKeys,
                            engineProcessInstanceIds);

        if (processInstances != null && !processInstances.isEmpty()) {

            MultiKeyMap processInstanceMap = new MultiKeyMap();
            for (ProcessInstance processInstance : processInstances) {
                if (processInstance == null)
                    continue;
                if (org.apache.cxf.common.util.StringUtils.isEmpty(processInstance.getProcessDefinitionKey()))
                    continue;
                if (org.apache.cxf.common.util.StringUtils
                        .isEmpty(processInstance.getEngineProcessInstanceId()))
                    continue;

                processInstanceMap.put(processInstance.getProcessDefinitionKey(),
                        processInstance.getEngineProcessInstanceId(), processInstance);
            }
            for (Task taskInstance : taskInstances) {
                ProcessInstance instance = ProcessInstance.class.cast(processInstanceMap.get(
                        taskInstance.getProcessDefinitionKey(), taskInstance.getEngineProcessInstanceId()));
                if (instance == null)
                    continue;

                tasks.add(new Task.Builder(taskInstance, new PassthroughSanitizer())
                        .processInstanceId(instance.getProcessInstanceId())
                        .processInstanceAlias(instance.getAlias())
                        .processInstanceLabel(instance.getProcessInstanceLabel()).build());
                count++;
            }
        }
    } else {
        tasks = Collections.emptyList();
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("Searching for process instances took " + (System.currentTimeMillis() - time) + " ms");
    }

    resultsBuilder.firstResult(1);
    resultsBuilder.maxResults(count);
    resultsBuilder.total(count);
    resultsBuilder.tasks(tasks);

    return resultsBuilder.build();
}

From source file:ro.fortsoft.wicket.pivot.DefaultPivotModel.java

private MultiKeyMap getData(PivotField dataField) {
    MultiKeyMap data = new MultiKeyMap();
    List<List<Object>> rowKeys = getRowKeys();
    System.out.println("rowKeys.size() = " + rowKeys.size());
    List<List<Object>> columnKeys = getColumnKeys();
    System.out.println("columnKeys.size() = " + columnKeys.size());

    List<PivotField> rowFields = getFields(PivotField.Area.ROW);
    List<PivotField> columnFields = getFields(PivotField.Area.COLUMN);
    for (List<Object> rowKey : rowKeys) {
        for (List<Object> columnKey : columnKeys) {
            Map<Integer, Object> rowFilter = getFilter(rowFields, rowKey);
            Map<Integer, Object> columnFilter = getFilter(columnFields, columnKey);
            final Map<Integer, Object> filter = new HashMap<Integer, Object>(rowFilter);
            filter.putAll(columnFilter);
            List<Object> values = getValues(dataField, filter);
            if (!CollectionUtils.isEmpty(values) || dataField.getFieldCalculation() != null) {
                /*//from www . j  a  v a2s .  c om
                System.out.println("filter = " + filter);
                System.out.println("values = " + values);
                System.out.println(values.size());
                */
                Object summary = PivotUtils.getSummary(dataField, values, new FieldValueProvider() {
                    @Override
                    public Object getFieldValue(PivotField field) {
                        List<Object> fieldValues = getValues(field, filter);
                        return field.getAggregator().init().addAll(fieldValues).getResult();
                    }
                });
                //               System.out.println("summary = " + summary);
                data.put(rowKey, columnKey, summary);
            }
        }
    }

    return data;
}

From source file:ro.nextreports.server.pivot.DefaultPivotModel.java

private MultiKeyMap getData(PivotField dataField) {
    MultiKeyMap data = new MultiKeyMap();
    List<List<Object>> rowKeys = getRowKeys();
    System.out.println("rowKeys.size() = " + rowKeys.size());
    List<List<Object>> columnKeys = getColumnKeys();
    System.out.println("columnKeys.size() = " + columnKeys.size());

    List<PivotField> rowFields = getFields(PivotField.Area.ROW);
    List<PivotField> columnFields = getFields(PivotField.Area.COLUMN);
    for (List<Object> rowKey : rowKeys) {
        for (List<Object> columnKey : columnKeys) {
            Map<Integer, Object> rowFilter = getFilter(rowFields, rowKey);
            Map<Integer, Object> columnFilter = getFilter(columnFields, columnKey);
            Map<Integer, Object> filter = new HashMap<Integer, Object>(rowFilter);
            filter.putAll(columnFilter);
            List<Object> values = getValues(dataField, filter);
            if (!CollectionUtils.isEmpty(values)) {
                /*//from   w  ww .  j a v a2 s  .  co m
                System.out.println("filter = " + filter);
                System.out.println("values = " + values);
                System.out.println(values.size());
                */
                Object summary = PivotUtils.getSummary(dataField, values);
                //               System.out.println("summary = " + summary);
                data.put(rowKey, columnKey, summary);
            }
        }
    }

    return data;
}