Example usage for org.w3c.dom Element hasAttribute

List of usage examples for org.w3c.dom Element hasAttribute

Introduction

In this page you can find the example usage for org.w3c.dom Element hasAttribute.

Prototype

public boolean hasAttribute(String name);

Source Link

Document

Returns true when an attribute with a given name is specified on this element or has a default value, false otherwise.

Usage

From source file:autohit.creator.compiler.SimCompiler.java

/**
 *  handle call./*from   w w w .j av  a  2s  .  c  o  m*/
 *  MICROCODE
 * 1- i.scope
 * 2- (SET)*
 * 3- i.eval(name)
 * 4- i.method(method)
 * 5- i.rscope
 * 6- if (result exist) i.store(result)
 */
private void handleMethod(Element en) {

    String name = en.getAttribute(ATTR_NAME);
    String method = en.getAttribute(ATTR_METHOD);

    runtimeDebug("handleMethod.  name=" + name + " method=" + method);

    // 1- i.scope
    this.emitScope();

    // 2- (SET)*
    try {
        // recurse into the children
        processCode(en);

        // 3- i.call(name)
        this.emitEval(name);

        // 4- i.call(name)
        this.emitMethod(method);

    } catch (Exception e) {
        //   Stop an error unravelling here.  Close the scope and move on
        runtimeError("ERROR handleMethod.  Broken call." + name);
        runtimeError(e.toString());
        this.emitRScope();
        return;
    }

    // 5- i.rscope
    this.emitRScope();

    // 5- if (result exist) i.store(result)
    if (en.hasAttribute(ATTR_RESULT)) {
        this.emitStore(en.getAttribute(ATTR_RESULT));
    }
}

From source file:betullam.xmlmodifier.XMLmodifier.java

private boolean hasDuplicate(Element parentElement, String elementName, String attrName, String attrValue,
        String textContent) {/*w w  w . j a  va2s. c om*/
    boolean hasDuplicate = false;
    NodeList childNodes = parentElement.getChildNodes();
    if (childNodes.getLength() > 0) {
        for (int i = 0; i < childNodes.getLength(); i++) {
            if (childNodes.item(i).getNodeType() == org.w3c.dom.Node.ELEMENT_NODE) {
                Element childElement = (Element) childNodes.item(i);

                // Get name of child element:
                String childElementName = childElement.getTagName();

                // Check if the element with the given element name exists
                if (childElementName.equals(elementName)) {
                    boolean elementExists = true;
                    if (elementExists) { // The given element exists

                        // Check if given text content exists
                        String childElementTextContent = childElement.getTextContent().trim();
                        boolean textContentExists = (childElementTextContent.equals(textContent)) ? true
                                : false;

                        // If attribute value and name are null, we don't check for them (in this case, only the text content is relevant).
                        if (attrName.equals("null") && attrValue.equals("null")) {
                            if (textContentExists) { // Element exists with the given text content. 
                                hasDuplicate = true; // The new element would be a duplicate
                            } else { // Element exists but not with the given text content.
                                hasDuplicate = false; // The new element wouldn't be a duplicate
                            }
                        } else { // If attribute value and name are not null, check if they are the same as the given value.

                            // Check if child element has the given attribute
                            boolean elementHasAttr = childElement.hasAttribute(attrName);

                            if (elementHasAttr) { // The given element has the given attribute
                                // Check if the attribute has the given value
                                String childElementAttrValue = childElement.getAttribute(attrName);
                                if (childElementAttrValue.equals(attrValue)) {
                                    if (textContentExists) { // Element exists with the given text content. 
                                        hasDuplicate = true; // The attribute contains the given attribute value, so the new element would be a duplicate.
                                    } else { // Element exists but not with the given text content.
                                        hasDuplicate = false; // The new element wouldn't be a duplicate
                                    }
                                } else {
                                    hasDuplicate = false; // The attribute does not contain the given attribute value, so the new element would not be a duplicate.
                                }
                            } else {
                                hasDuplicate = false; // The attribute does not exist, so the new element would not be a duplicate.
                            }
                        }
                    }
                }
            }
        }
    }

    return hasDuplicate;
}

From source file:it.iit.genomics.cru.structures.bridges.uniprot.UniprotkbUtils.java

private Collection<MoleculeEntry> getUniprotEntriesXML(String location, boolean waitAndRetryOnFailure)
        throws BridgesRemoteAccessException {

    String url = location + "&format=xml";

    ArrayList<MoleculeEntry> uniprotEntries = new ArrayList<>();
    try {//  www.j av  a2  s  .c  o  m
        HttpClient client = new DefaultHttpClient();
        client.getParams().setParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, Boolean.TRUE);
        HttpGet request = new HttpGet(url);

        // add request header
        request.addHeader("User-Agent", USER_AGENT);

        HttpResponse response = client.execute(request);

        if (response.getEntity().getContentLength() == 0) {
            // No result
            return uniprotEntries;
        }

        DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
        DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
        Document doc = dBuilder.parse(new InputSource(response.getEntity().getContent()));

        // optional, but recommended
        // read this -
        // http://stackoverflow.com/questions/13786607/normalization-in-dom-parsing-with-java-how-does-it-work
        doc.getDocumentElement().normalize();

        // interaction structure
        NodeList entryList = doc.getElementsByTagName("entry");

        for (int i = 0; i < entryList.getLength(); i++) {

            Element entryElement = (Element) entryList.item(i);

            String dataset = entryElement.getAttribute("dataset");

            String ac = entryElement.getElementsByTagName("accession").item(0).getFirstChild().getNodeValue();

            MoleculeEntry uniprotEntry = new MoleculeEntry(ac);

            uniprotEntry.setDataset(dataset);

            // Taxid
            Element organism = (Element) entryElement.getElementsByTagName("organism").item(0);

            String organismCommonName = null;
            String organismScientificName = null;
            String organismOtherName = null;

            NodeList organismNames = organism.getElementsByTagName("name");

            for (int j = 0; j < organismNames.getLength(); j++) {

                Element reference = (Element) organismNames.item(j);
                switch (reference.getAttribute("type")) {
                case "scientific":
                    organismScientificName = reference.getTextContent();
                    break;
                case "common":
                    organismCommonName = reference.getTextContent();
                    break;
                default:
                    organismOtherName = reference.getTextContent();
                    break;
                }
            }

            if (null != organismCommonName) {
                uniprotEntry.setOrganism(organismCommonName);
            } else if (null != organismScientificName) {
                uniprotEntry.setOrganism(organismScientificName);
            } else if (null != organismOtherName) {
                uniprotEntry.setOrganism(organismOtherName);
            }

            NodeList organismReferences = organism.getElementsByTagName("dbReference");

            for (int j = 0; j < organismReferences.getLength(); j++) {
                Element reference = (Element) organismReferences.item(j);
                if (reference.hasAttribute("type") && "NCBI Taxonomy".equals(reference.getAttribute("type"))) {
                    String proteinTaxid = reference.getAttribute("id");
                    uniprotEntry.setTaxid(proteinTaxid);
                }
            }

            // GENE
            NodeList geneNames = entryElement.getElementsByTagName("gene");

            for (int j = 0; j < geneNames.getLength(); j++) {
                Element gene = (Element) geneNames.item(j);

                NodeList nameList = gene.getElementsByTagName("name");

                for (int k = 0; k < nameList.getLength(); k++) {
                    Element name = (Element) nameList.item(k);
                    uniprotEntry.addGeneName(name.getFirstChild().getNodeValue());
                }
            }

            // modified residues
            HashMap<String, ModifiedResidue> modifiedResidues = new HashMap<>();

            NodeList features = entryElement.getElementsByTagName("feature");
            for (int j = 0; j < features.getLength(); j++) {
                Element feature = (Element) features.item(j);

                if (false == entryElement.equals(feature.getParentNode())) {
                    continue;
                }

                // ensembl
                if (feature.hasAttribute("type") && "modified residue".equals(feature.getAttribute("type"))) {

                    String description = feature.getAttribute("description").split(";")[0];

                    if (false == modifiedResidues.containsKey(description)) {
                        modifiedResidues.put(description, new ModifiedResidue(description));
                    }

                    NodeList locations = feature.getElementsByTagName("location");
                    for (int k = 0; k < locations.getLength(); k++) {
                        Element loc = (Element) locations.item(k);
                        NodeList positions = loc.getElementsByTagName("position");
                        for (int l = 0; l < positions.getLength(); l++) {
                            Element position = (Element) positions.item(l);
                            modifiedResidues.get(description).addPosition(
                                    new UniprotPosition(Integer.parseInt(position.getAttribute("position"))));
                        }

                    }
                }
            }

            uniprotEntry.getModifications().addAll(modifiedResidues.values());

            // Xrefs:
            NodeList dbReferences = entryElement.getElementsByTagName("dbReference");
            for (int j = 0; j < dbReferences.getLength(); j++) {
                Element dbReference = (Element) dbReferences.item(j);

                if (false == entryElement.equals(dbReference.getParentNode())) {
                    continue;
                }

                NodeList molecules = dbReference.getElementsByTagName("molecule");

                // ensembl
                if (dbReference.hasAttribute("type") && "Ensembl".equals(dbReference.getAttribute("type"))) {

                    // transcript ID
                    String id = dbReference.getAttribute("id");

                    for (int iMolecule = 0; iMolecule < molecules.getLength(); iMolecule++) {
                        Element molecule = (Element) molecules.item(iMolecule);
                        uniprotEntry.addXrefToVarSplice(id, molecule.getAttribute("id"));
                    }

                    uniprotEntry.addEnsemblGene(id);

                    NodeList properties = dbReference.getElementsByTagName("property");

                    for (int k = 0; k < properties.getLength(); k++) {
                        Element property = (Element) properties.item(k);

                        if (property.hasAttribute("type") && "gene ID".equals(property.getAttribute("type"))) {
                            uniprotEntry.addEnsemblGene(property.getAttribute("value"));
                        }
                    }
                }

                // refseq
                if (dbReference.hasAttribute("type") && "RefSeq".equals(dbReference.getAttribute("type"))) {
                    NodeList properties = dbReference.getElementsByTagName("property");
                    for (int k = 0; k < properties.getLength(); k++) {
                        Element property = (Element) properties.item(k);
                        if (property.hasAttribute("type")
                                && "nucleotide sequence ID".equals(property.getAttribute("type"))) {

                            String id = property.getAttribute("value");
                            if (molecules.getLength() > 0) {
                                for (int iMolecule = 0; iMolecule < molecules.getLength(); iMolecule++) {
                                    Element molecule = (Element) molecules.item(iMolecule);

                                    // If refseq, add also without the version                                       
                                    uniprotEntry.addXrefToVarSplice(id, molecule.getAttribute("id"));
                                    uniprotEntry.addXrefToVarSplice(id.split("\\.")[0],
                                            molecule.getAttribute("id"));

                                }
                            } else {
                                // If refseq, add also without the version                                       
                                uniprotEntry.addXrefToVarSplice(id, ac);
                                uniprotEntry.addXrefToVarSplice(id.split("\\.")[0], ac);
                            }

                            uniprotEntry.addRefseq(id);

                        }
                    }
                }

                /* PDB chains will be imported from the webservice */
                // PDB
                if (dbReference.hasAttribute("type") && "PDB".equals(dbReference.getAttribute("type"))) {
                    NodeList properties = dbReference.getElementsByTagName("property");
                    String method = null;
                    String chains = null;

                    for (int k = 0; k < properties.getLength(); k++) {
                        Element property = (Element) properties.item(k);
                        if (property.hasAttribute("type") && "method".equals(property.getAttribute("type"))) {
                            method = property.getAttribute("value");
                        } else if (property.hasAttribute("type")
                                && "chains".equals(property.getAttribute("type"))) {
                            chains = property.getAttribute("value");
                        }
                    }

                    if (method != null && "Model".equals(method)) {
                        continue;
                    }

                    if (chains == null) {
                        continue;
                    }

                    String pdb = dbReference.getAttribute("id");

                    uniprotEntry.addPDB(pdb, method);

                    for (String chainElement : chains.split(",")) {
                        try {
                            String chainNames = chainElement.split("=")[0];
                            int start = Integer.parseInt(chainElement.split("=")[1].trim().split("-")[0]);
                            int end = Integer
                                    .parseInt(chainElement.split("=")[1].trim().split("-")[1].replace(".", ""));
                            for (String chainName : chainNames.split("/")) {
                                uniprotEntry.addChain(pdb, new ChainMapping(pdb, chainName.trim(), start, end),
                                        method);
                            }
                        } catch (ArrayIndexOutOfBoundsException aiobe) {
                            // IGBLogger.getInstance().warning(
                            // "Cannot parse chain: " + chainElement
                            // + ", skip");
                        }
                    }
                }

            }

            // Sequence
            NodeList sequenceElements = entryElement.getElementsByTagName("sequence");

            for (int j = 0; j < sequenceElements.getLength(); j++) {
                Element sequenceElement = (Element) sequenceElements.item(j);

                if (false == sequenceElement.getParentNode().equals(entryElement)) {
                    continue;
                }
                String sequence = sequenceElement.getFirstChild().getNodeValue().replaceAll("\n", "");
                uniprotEntry.setSequence(sequence);
            }

            // Diseases
            NodeList diseases = entryElement.getElementsByTagName("disease");

            for (int j = 0; j < diseases.getLength(); j++) {
                Element disease = (Element) diseases.item(j);

                NodeList nameList = disease.getElementsByTagName("name");

                for (int k = 0; k < nameList.getLength(); k++) {
                    Element name = (Element) nameList.item(k);
                    uniprotEntry.addDisease(name.getFirstChild().getNodeValue());
                }
            }

            // Get fasta for all varsplice
            String fastaQuery = "http://www.uniprot.org/uniprot/" + uniprotEntry.getUniprotAc()
                    + ".fasta?include=yes";

            try {
                //HttpClient fastaClient = new DefaultHttpClient();

                client.getParams().setParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, Boolean.TRUE);
                HttpGet fastaRequest = new HttpGet(fastaQuery);

                // add request header
                request.addHeader("User-Agent", USER_AGENT);

                HttpResponse fastaResponse = client.execute(fastaRequest);

                if (fastaResponse.getEntity().getContentLength() == 0) {
                    continue;
                }

                InputStream is = fastaResponse.getEntity().getContent();

                try {
                    LinkedHashMap<String, ProteinSequence> fasta = FastaReaderHelper
                            .readFastaProteinSequence(is);

                    boolean mainSequence = true;

                    for (ProteinSequence seq : fasta.values()) {
                        //                            logger.info("Add sequence: " + seq.getAccession().getID() + " : " + seq.getSequenceAsString());
                        uniprotEntry.addSequence(seq.getAccession().getID(), seq.getSequenceAsString());
                        if (mainSequence) {
                            uniprotEntry.setMainIsoform(seq.getAccession().getID());
                            mainSequence = false;
                        }
                    }
                } catch (Exception e) {
                    logger.error("Cannot retrieve fasta for : " + uniprotEntry.getUniprotAc());
                }
            } catch (IOException | IllegalStateException ex) {
                logger.error(null, ex);
            }

            uniprotEntries.add(uniprotEntry);

        }

    } catch (SAXParseException se) {
        // Nothing was return
        // IGBLogger.getInstance()
        // .error("Uniprot returns empty result: " + url);
    } catch (IOException | ParserConfigurationException | IllegalStateException | SAXException | DOMException
            | NumberFormatException e) {
        if (waitAndRetryOnFailure && allowedUniprotFailures > 0) {
            try {
                allowedUniprotFailures--;
                Thread.sleep(5000);
                return getUniprotEntriesXML(location, false);
            } catch (InterruptedException e1) {
                logger.error("Fail to retrieve data from " + location);
                throw new BridgesRemoteAccessException("Fail to retrieve data from Uniprot " + location);
            }
        } else {
            logger.error("Problem with Uniprot: " + url);
            throw new BridgesRemoteAccessException("Fail to retrieve data from Uniprot " + location);
        }
    }

    for (MoleculeEntry entry : uniprotEntries) {
        addToCache(entry);
    }

    return uniprotEntries;
}

From source file:it.imtech.metadata.MetaUtility.java

private void metadata_reader_metadatas(Element iENode, TreeMap<Object, Metadata> metadatas, boolean forceAdd,
        TreeMap forceAddMID, String sLang) {
    try {//w ww  .  j av  a2 s. c  o m
        if (iENode.getTagName().equals("metadatas")) {
            NodeList nList = iENode.getChildNodes();

            for (int s = 0; s < nList.getLength(); s++) {

                if (nList.item(s).getNodeType() == Node.ELEMENT_NODE) {
                    Element iInnerNode = (Element) nList.item(s);

                    if (iInnerNode.getTagName().equals("metadata")) {
                        String MID = iInnerNode.getAttribute("ID");

                        //Se  hidden rimuovo l'elemento dai forzati
                        String hidden = "0";

                        if (iInnerNode.hasAttribute("hidden")) {
                            hidden = iInnerNode.getAttribute("hidden");
                            forceAddMID.remove(iInnerNode.getAttribute("ID"));
                        }

                        if (forceAddMID.containsKey(MID)) {
                            forceAdd = true;
                        }

                        String MID_parent = iInnerNode.getAttribute("mid_parent");
                        String mandatory = iInnerNode.getAttribute("mandatory");
                        String datatype = iInnerNode.getAttribute("datatype");
                        String editable = iInnerNode.getAttribute("editable");
                        String foxmlname = iInnerNode.getAttribute("forxmlname");
                        String sequence = iInnerNode.getAttribute("sequence");

                        String sequencemulti = "";

                        if (MID_parent.equals("22") || MID_parent.equals("45")) {
                            Node searchparent = iInnerNode;
                            boolean found = false;

                            while (!found) {
                                Element x = (Element) searchparent.getParentNode();
                                if (x.getAttribute("ID").equals("22")) {
                                    sequencemulti = x.getAttribute("sequence");
                                    found = true;
                                } else {
                                    searchparent = searchparent.getParentNode();
                                }
                            }
                        }

                        //Add contributors management
                        if (MID_parent.equals("11") || MID_parent.equals("13")) {
                            Node searchparent = iInnerNode;
                            boolean found = false;

                            while (!found) {
                                Element x = (Element) searchparent.getParentNode();
                                if (x.getAttribute("ID").equals("11")) {
                                    sequencemulti = x.getAttribute("sequence");
                                    found = true;
                                } else {
                                    searchparent = searchparent.getParentNode();
                                }
                            }
                        }

                        if (MID.equals("11")) {
                            Element x = (Element) iInnerNode;
                            sequencemulti = x.getAttribute("sequence");
                        }

                        String foxmlnamespace = iInnerNode.getAttribute("fornamespace");

                        if (!metadata_namespaces.containsValue(foxmlnamespace)) {
                            int count = metadata_namespaces.size();
                            count++;
                            metadata_namespaces.put("ns" + count, foxmlnamespace);
                        }

                        String description = null;
                        String DESCRIPTION_DE = null;

                        TreeMap<Object, Metadata> submetadatas = new TreeMap<Object, Metadata>();

                        NodeList innerList = iInnerNode.getChildNodes();
                        for (int z = 0; z < innerList.getLength(); z++) {
                            if (innerList.item(z).getNodeType() == Node.ELEMENT_NODE) {
                                Element iDescrNode = (Element) innerList.item(z);

                                if (iDescrNode.getAttribute("isocode").equals(sLang)) {
                                    description = iDescrNode.getTextContent();
                                } else if (iDescrNode.getAttribute("isocode").equals("de")) {
                                    DESCRIPTION_DE = iDescrNode.getTextContent();
                                }

                                if (iDescrNode.getTagName().equals("metadatas")) {
                                    if (MID.equals("98")) {
                                        metadata_reader_metadatas(iDescrNode, submetadatas, true, forceAddMID,
                                                sLang);
                                    } else {
                                        metadata_reader_metadatas(iDescrNode, submetadatas, false, forceAddMID,
                                                sLang);
                                    }
                                }
                            }
                        }

                        //Fallback DE
                        if (description == null) {
                            description = DESCRIPTION_DE;
                        }
                        if (description == null && !iInnerNode.getTagName().equals("metadata")) {
                            throw new Exception(
                                    "Can't find description for metadata " + iInnerNode.getTagName());
                        }

                        if ((mandatory.equals("Y") || forceAdd == true)) {
                            int mid_parent = 0;
                            if (!MID_parent.equals("")) {
                                mid_parent = Integer.parseInt(MID_parent);
                            }

                            Metadata t = new Metadata(Integer.parseInt(MID), mid_parent, description, datatype,
                                    editable, foxmlname, null, foxmlnamespace, mandatory, hidden,
                                    sequencemulti);
                            t.submetadatas = submetadatas;

                            String index = sequence;

                            if (index == null || index.equals("")) {
                                index = MID;
                            }
                            int param = Integer.parseInt(index);
                            metadatas.put(param, t);
                        }

                        if (forceAddMID.containsKey(MID.toString())) {
                            forceAdd = false;
                        }
                    }
                }
            }
        }
    } catch (Exception ex) {
        logger.error(ex.getMessage());
    }
}

From source file:de.huberlin.wbi.hiway.am.galaxy.GalaxyApplicationMaster.java

/**
 * A helper function for parsing a Galaxy tool's XML file
 * /*from   ww w .j a  v  a2 s.  com*/
 * @param file
 *            the XML file to be parsed
 * @return the Galaxy tools described in the XML file
 */
private GalaxyTool parseToolFile(File file) {
    System.out.println("Parsing Galaxy tool file " + file);
    try {
        DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
        String path = file.getCanonicalPath();
        String dir = path.substring(0, path.lastIndexOf("/"));
        Document doc = builder.parse(file);
        Element rootEl = doc.getDocumentElement();
        Transformer transformer = TransformerFactory.newInstance().newTransformer();
        transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
        StreamResult result = new StreamResult(new StringWriter());
        DOMSource source = new DOMSource(rootEl);
        transformer.transform(source, result);
        String toolDescription = result.getWriter().toString();

        // (1) parse macros, if any
        NodeList macrosNds = rootEl.getElementsByTagName("macros");
        Map<String, String> macrosByName = new HashMap<>();
        for (int i = 0; i < macrosNds.getLength(); i++) {
            Node macrosNd = macrosNds.item(i);
            macrosByName.putAll(processMacros(macrosNd, dir));
        }

        // (2) insert macros into the XML and parse the document
        Pattern p = Pattern.compile("<expand macro=\"([^\"]*)\"(>.*?</expand>|/>)", Pattern.DOTALL);
        Matcher m = p.matcher(toolDescription);
        while (m.find()) {
            String name = m.group(1);
            String replace = m.group(0);
            String with = macrosByName.get(name);
            if (m.group(2).startsWith(">")) {
                String yield = m.group(2).substring(1, m.group(2).indexOf("</expand>"));
                with = with.replaceAll("<yield/>", yield.trim());
            }
            if (with != null)
                toolDescription = toolDescription.replace(replace, with);
        }

        doc = builder.parse(new InputSource(new StringReader(toolDescription)));
        rootEl = doc.getDocumentElement();
        String version = rootEl.hasAttribute("version") ? rootEl.getAttribute("version") : "1.0.0";
        String id = rootEl.getAttribute("id");
        GalaxyTool tool = new GalaxyTool(id, version, dir, galaxyPath);

        // (3) determine requirements (libraries and executables) of this tool; requirements have to be parsed such that the environment of the task can be
        // set to include them
        NodeList requirementNds = rootEl.getElementsByTagName("requirement");
        for (int i = 0; i < requirementNds.getLength(); i++) {
            Element requirementEl = (Element) requirementNds.item(i);
            String requirementName = requirementEl.getChildNodes().item(0).getNodeValue().trim();
            String requirementVersion = requirementEl.getAttribute("version");
            tool.addRequirement(requirementName, requirementVersion);
        }

        // (4) determine and set the template for the command of the task; this template will be compiled at runtime by Cheetah
        Element commandEl = (Element) rootEl.getElementsByTagName("command").item(0);
        if (commandEl != null) {
            String command = commandEl.getChildNodes().item(0).getNodeValue().trim();
            String script = command.split(" ")[0];
            String interpreter = commandEl.getAttribute("interpreter");
            if (interpreter.length() > 0) {
                command = command.replace(script, dir + "/" + script);
                command = interpreter + " " + command;
            }
            command = command.replaceAll("\\.value", "");
            command = command.replaceAll("\\.dataset", "");
            tool.setTemplate(command);
        }

        // (5) determine the parameters (atomic, conditional and repeat) of this tool
        Element inputsEl = (Element) rootEl.getElementsByTagName("inputs").item(0);
        if (inputsEl != null)
            tool.setParams(getParams(inputsEl, tool));

        // (6) determine the output files produced by this tool
        Element outputsEl = (Element) rootEl.getElementsByTagName("outputs").item(0);
        if (outputsEl != null) {
            NodeList dataNds = outputsEl.getElementsByTagName("data");
            for (int i = 0; i < dataNds.getLength(); i++) {
                Element dataEl = (Element) dataNds.item(i);
                String name = dataEl.getAttribute("name");
                GalaxyParamValue param = new GalaxyParamValue(name);
                tool.setPath(name);
                tool.addParam(param);

                String format = dataEl.getAttribute("format");
                String metadata_source = dataEl.getAttribute("metadata_source");
                if (format.equals("input") && metadata_source != null && metadata_source.length() > 0) {
                    param.setDataType(metadata_source);
                } else {
                    param.setDataType(format);
                }

                String from_work_dir = dataEl.getAttribute("from_work_dir");
                param.setFrom_work_dir(from_work_dir);
            }
        }

        // (7) register the tool in the Galaxy tool data structure
        if (tool.getTemplate() != null) {
            Map<String, GalaxyTool> toolMap = addAndGetToolMap(id);
            toolMap.put(version, tool);
        }

        return tool;
    } catch (SAXException | IOException | TransformerException | XPathExpressionException
            | ParserConfigurationException e) {
        e.printStackTrace();
        System.exit(-1);
        return null;
    }
}

From source file:marytts.tools.voiceimport.HTKLabeler.java

/**
 * //from  w  w w  . j ava  2s  .  com
 * This computes a string of phonetic symbols out of an prompt allophones mary xml:
 * - standard phones are taken from "ph" attribute
 * @param tokens
 * @return
 */
private String collectTranscription(NodeList tokens) {

    // TODO: make delims argument
    // String Tokenizer devides transcriptions into syllables
    // syllable delimiters and stress symbols are retained
    String delims = "',-";

    // String storing the original transcription begins with a pause
    String orig = " pau ";

    // get original phone String
    for (int tNr = 0; tNr < tokens.getLength(); tNr++) {

        Element token = (Element) tokens.item(tNr);

        // only look at it if there is a sampa to change
        if (token.hasAttribute("ph")) {

            String sampa = token.getAttribute("ph");

            List<String> sylsAndDelims = new ArrayList<String>();
            StringTokenizer sTok = new StringTokenizer(sampa, delims, true);

            while (sTok.hasMoreElements()) {
                String currTok = sTok.nextToken();

                if (delims.indexOf(currTok) == -1) {
                    // current Token is no delimiter
                    for (Allophone ph : allophoneSet.splitIntoAllophones(currTok)) {
                        // orig += ph.name() + " ";
                        if (ph.name().trim().equals("_"))
                            continue;
                        orig += replaceTrickyPhones(ph.name().trim()) + " ";
                    } // ... for each phone
                } // ... if no delimiter
            } // ... while there are more tokens    
        }

        // TODO: simplify
        if (token.getTagName().equals("t")) {

            // if the following element is no boundary, insert a non-pause delimiter
            if (tNr == tokens.getLength() - 1
                    || !((Element) tokens.item(tNr + 1)).getTagName().equals("boundary")) {
                orig += "vssil "; // word boundary

            }

        } else if (token.getTagName().equals("boundary")) {

            orig += "ssil "; // phrase boundary

        } else {
            // should be "t" or "boundary" elements
            assert (false);
        }

    } // ... for each t-Element
    orig += "pau";
    return orig;
}

From source file:marytts.tools.voiceimport.HTKLabeler.java

private String collectTranscriptionAndWord(NodeList tokens) {

    // TODO: make delims argument
    // String Tokenizer devides transcriptions into syllables
    // syllable delimiters and stress symbols are retained
    String delims = "',-";

    // String storing the original transcription begins with a pause
    String orig = " pau ";
    String word, HTKWORD;/* w  ww .  j  a  va 2s.  c  om*/
    boolean first_word_phone = true;
    // get original phone String
    for (int tNr = 0; tNr < tokens.getLength(); tNr++) {

        Element token = (Element) tokens.item(tNr);

        // only look at it if there is a sampa to change
        if (token.hasAttribute("ph")) {
            word = token.getTextContent().trim();
            HTKWORD = word.toUpperCase();
            first_word_phone = true;

            String sampa = token.getAttribute("ph");

            List<String> sylsAndDelims = new ArrayList<String>();
            StringTokenizer sTok = new StringTokenizer(sampa, delims, true);

            while (sTok.hasMoreElements()) {
                String currTok = sTok.nextToken();

                if (delims.indexOf(currTok) == -1) {
                    // current Token is no delimiter
                    for (Allophone ph : allophoneSet.splitIntoAllophones(currTok)) {
                        // orig += ph.name() + " ";
                        if (ph.name().trim().equals("_"))
                            continue;
                        orig += replaceTrickyPhones(ph.name().trim());
                        if (first_word_phone) {
                            orig += "-" + HTKWORD + " ";
                            first_word_phone = false;
                        } else
                            orig += " ";
                    } // ... for each phone
                } // ... if no delimiter
            } // ... while there are more tokens    
        }

        // TODO: simplify
        if (token.getTagName().equals("t")) {

            // if the following element is no boundary, insert a non-pause delimiter
            if (tNr == tokens.getLength() - 1
                    || !((Element) tokens.item(tNr + 1)).getTagName().equals("boundary")) {
                orig += "vssil "; // word boundary

            }

        } else if (token.getTagName().equals("boundary")) {

            orig += "ssil "; // phrase boundary

        } else {
            // should be "t" or "boundary" elements
            assert (false);
        }

    } // ... for each t-Element
    orig += "pau";
    return orig;
}

From source file:marytts.tools.voiceimport.HTKLabeler.java

/**
 * /*from   w  w w  .  ja  v a  2 s .c  o  m*/
 * This computes a string of words out of an prompt allophones mary xml:
 * - standard phones are taken from "ph" attribute
 * @param tokens
 * @return
 */
private String collectWordTranscription(NodeList tokens) {

    // TODO: make delims argument
    // String Tokenizer devides transcriptions into syllables
    // syllable delimiters and stress symbols are retained
    String delims = "',-";

    // String storing the original transcription begins with a pause
    String orig = " pau ";
    String HTKWORD_xml_transcription;
    String mary_transcription;
    String HTKWORD, word;

    // get original phone String
    for (int tNr = 0; tNr < tokens.getLength(); tNr++) {

        Element token = (Element) tokens.item(tNr);

        // only look at it if there is a sampa to change
        if (token.hasAttribute("ph")) {
            HTKWORD_xml_transcription = "";
            mary_transcription = "";
            String sampa = token.getAttribute("ph");
            mary_transcription = sampa.trim().replace(" ", "");
            List<String> sylsAndDelims = new ArrayList<String>();
            StringTokenizer sTok = new StringTokenizer(sampa, delims, true);

            while (sTok.hasMoreElements()) {
                String currTok = sTok.nextToken();

                if (delims.indexOf(currTok) == -1) {
                    // current Token is no delimiter
                    for (Allophone ph : allophoneSet.splitIntoAllophones(currTok)) {
                        // orig += ph.name() + " ";
                        if (ph.name().trim().equals("_"))
                            continue;
                        HTKWORD_xml_transcription += replaceTrickyPhones(ph.name().trim()) + " ";
                        //globalwordlexicon += HTKWORD + " " + HTKWORD_xml_transcription;                             
                    } // ... for each phone
                } // ... if no delimiter
            } // ... while there are more tokens

            word = token.getTextContent().trim();
            HTKWORD = word.toUpperCase();

            HTKWORD_xml_transcription = HTKWORD_xml_transcription.trim();

            if ((token.hasAttribute("g2p_method") && token.getAttribute("g2p_method").equals("privatedict"))
                    // this is for rawxml entry with token with ph attribute 
                    || !token.hasAttribute("g2p_method")) {
                HTKWORD = HTKWORD + "_" + HTKWORD_xml_transcription.replaceAll(" ", "");
                //System.out.println("HTKWORD private lexicon or rawxml ph: " + HTKWORD);
            }

            // dictionary
            //System.out.println("HTKWORD: "  + HTKWORD + " HTKWORD_xml_transcription: "  + HTKWORD_xml_transcription);
            HTKdictionary.add(HTKWORD + " " + HTKWORD_xml_transcription);
            Totaldictionary
                    .add(HTKWORD + " " + HTKWORD_xml_transcription.replace(" ", "") + " " + mary_transcription);

            String[] entries;
            entries = lexicon.lookup(word);
            //insert here all the different possible transcriptions                    
            for (int i = 0; i < entries.length; i++) {
                String HTKTranscription = entries[i];
                mary_transcription = HTKTranscription.replace(" ", "");
                HTKTranscription = HTKTranscription.replace("' ", "");
                HTKTranscription = HTKTranscription.replace("- ", "");
                //TODO: replaceTrickyPhones HTKTranscription
                HTKdictionary.add(HTKWORD + " " + HTKTranscription);
                Totaldictionary
                        .add(HTKWORD + " " + HTKTranscription.replace(" ", "") + " " + mary_transcription);
            }

            orig += HTKWORD + " ";

        }

        // TODO: simplify
        if (token.getTagName().equals("t")) {

            // if the following element is no boundary, insert a non-pause delimiter
            if (tNr == tokens.getLength() - 1
                    || !((Element) tokens.item(tNr + 1)).getTagName().equals("boundary")) {
                orig += "vssil "; // word boundary

            }

        } else if (token.getTagName().equals("boundary")) {

            orig += "ssil "; // phrase boundary

        } else {
            // should be "t" or "boundary" elements
            assert (false);
        }

    } // ... for each t-Element
    orig += "pau";
    return orig;
}

From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java

@Override
public void init(Element config) throws IOException {
    this.name = Main.cloudBucket.toLowerCase();
    this.staged_sync_location.mkdirs();
    try {/*from  w w w .  j a v a  2  s  . c  om*/
        if (config.hasAttribute("default-bucket-location")) {
            bucketLocation = RegionUtils.getRegion(config.getAttribute("default-bucket-location"));

        }
        if (config.hasAttribute("connection-check-interval")) {
            this.checkInterval = Integer.parseInt(config.getAttribute("connection-check-interval"));
        }
        if (config.hasAttribute("block-size")) {
            int sz = (int) StringUtils.parseSize(config.getAttribute("block-size"));
            HashBlobArchive.MAX_LEN = sz;
        }
        if (config.hasAttribute("allow-sync")) {
            HashBlobArchive.allowSync = Boolean.parseBoolean(config.getAttribute("allow-sync"));
            if (config.hasAttribute("sync-check-schedule")) {
                try {
                    new SyncFSScheduler(config.getAttribute("sync-check-schedule"));
                } catch (Exception e) {
                    SDFSLogger.getLog().error("unable to start sync scheduler", e);
                }
            }

        }
        if (config.hasAttribute("upload-thread-sleep-time")) {
            int tm = Integer.parseInt(config.getAttribute("upload-thread-sleep-time"));
            HashBlobArchive.THREAD_SLEEP_TIME = tm;
        }
        if (config.hasAttribute("cache-writes")) {
            HashBlobArchive.cacheWrites = Boolean.parseBoolean(config.getAttribute("cache-writes"));
        }
        if (config.hasAttribute("cache-reads")) {
            HashBlobArchive.cacheReads = Boolean.parseBoolean(config.getAttribute("cache-reads"));
        }
        if (config.hasAttribute("sync-files")) {
            boolean syncf = Boolean.parseBoolean(config.getAttribute("sync-files"));
            if (syncf) {
                new FileReplicationService(this);
            }
        }
        int rsp = 0;
        int wsp = 0;
        if (config.hasAttribute("read-speed")) {
            rsp = Integer.parseInt(config.getAttribute("read-speed"));
        }
        if (config.hasAttribute("write-speed")) {
            wsp = Integer.parseInt(config.getAttribute("write-speed"));
        }
        if (config.hasAttribute("local-cache-size")) {
            long sz = StringUtils.parseSize(config.getAttribute("local-cache-size"));
            HashBlobArchive.setLocalCacheSize(sz);
        }
        if (config.hasAttribute("metadata-version")) {
            this.mdVersion = Integer.parseInt(config.getAttribute("metadata-version"));
        }
        if (config.hasAttribute("map-cache-size")) {
            int sz = Integer.parseInt(config.getAttribute("map-cache-size"));
            HashBlobArchive.MAP_CACHE_SIZE = sz;
        }
        if (config.hasAttribute("io-threads")) {
            int sz = Integer.parseInt(config.getAttribute("io-threads"));
            Main.dseIOThreads = sz;
        }
        if (config.hasAttribute("clustered")) {
            this.clustered = Boolean.parseBoolean(config.getAttribute("clustered"));
        }
        if (config.hasAttribute("delete-unclaimed")) {
            this.deleteUnclaimed = Boolean.parseBoolean(config.getAttribute("delete-unclaimed"));
        }
        if (config.hasAttribute("glacier-archive-days")) {
            this.glacierDays = Integer.parseInt(config.getAttribute("glacier-archive-days"));
            if (this.glacierDays > 0)
                Main.checkArchiveOnRead = true;
        }
        if (config.hasAttribute("infrequent-access-days")) {
            this.infrequentAccess = Integer.parseInt(config.getAttribute("infrequent-access-days"));
        }
        if (config.hasAttribute("simple-s3")) {
            EncyptUtils.baseEncode = Boolean.parseBoolean(config.getAttribute("simple-s3"));
            this.simpleS3 = true;
        }
        if (config.hasAttribute("md5-sum")) {
            this.md5sum = Boolean.parseBoolean(config.getAttribute("md5-sum"));
            if (!this.md5sum) {
                System.setProperty("com.amazonaws.services.s3.disableGetObjectMD5Validation", "true");
                System.setProperty("com.amazonaws.services.s3.disablePutObjectMD5Validation", "true");
            }

        }
        ClientConfiguration clientConfig = new ClientConfiguration();
        if (config.hasAttribute("use-v4-signer")) {
            boolean v4s = Boolean.parseBoolean(config.getAttribute("use-v4-signer"));

            if (v4s) {
                clientConfig.setSignerOverride("AWSS3V4SignerType");
            }
        }
        if (config.hasAttribute("use-basic-signer")) {
            boolean v4s = Boolean.parseBoolean(config.getAttribute("use-basic-signer"));
            if (v4s) {
                clientConfig.setSignerOverride("S3SignerType");
            }
        }

        clientConfig.setMaxConnections(Main.dseIOThreads * 2);
        clientConfig.setConnectionTimeout(10000);
        clientConfig.setSocketTimeout(10000);

        String s3Target = null;
        if (config.getElementsByTagName("connection-props").getLength() > 0) {
            Element el = (Element) config.getElementsByTagName("connection-props").item(0);
            if (el.hasAttribute("connection-timeout"))
                clientConfig.setConnectionTimeout(Integer.parseInt(el.getAttribute("connection-timeout")));
            if (el.hasAttribute("socket-timeout"))
                clientConfig.setSocketTimeout(Integer.parseInt(el.getAttribute("socket-timeout")));
            if (el.hasAttribute("local-address"))
                clientConfig.setLocalAddress(InetAddress.getByName(el.getAttribute("local-address")));
            if (el.hasAttribute("max-retry"))
                clientConfig.setMaxErrorRetry(Integer.parseInt(el.getAttribute("max-retry")));
            if (el.hasAttribute("protocol")) {
                String pr = el.getAttribute("protocol");
                if (pr.equalsIgnoreCase("http"))
                    clientConfig.setProtocol(Protocol.HTTP);
                else
                    clientConfig.setProtocol(Protocol.HTTPS);

            }
            if (el.hasAttribute("s3-target")) {
                s3Target = el.getAttribute("s3-target");
            }
            if (el.hasAttribute("proxy-host")) {
                clientConfig.setProxyHost(el.getAttribute("proxy-host"));
            }
            if (el.hasAttribute("proxy-domain")) {
                clientConfig.setProxyDomain(el.getAttribute("proxy-domain"));
            }
            if (el.hasAttribute("proxy-password")) {
                clientConfig.setProxyPassword(el.getAttribute("proxy-password"));
            }
            if (el.hasAttribute("proxy-port")) {
                clientConfig.setProxyPort(Integer.parseInt(el.getAttribute("proxy-port")));
            }
            if (el.hasAttribute("proxy-username")) {
                clientConfig.setProxyUsername(el.getAttribute("proxy-username"));
            }
        }

        if (s3Target != null && s3Target.toLowerCase().startsWith("https")) {
            TrustStrategy acceptingTrustStrategy = new TrustStrategy() {
                @Override
                public boolean isTrusted(X509Certificate[] certificate, String authType) {
                    return true;
                }
            };
            SSLSocketFactory sf = new SSLSocketFactory(acceptingTrustStrategy,
                    SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
            clientConfig.getApacheHttpClientConfig().withSslSocketFactory(sf);
        }
        if (awsCredentials != null)
            s3Service = new AmazonS3Client(awsCredentials, clientConfig);
        else
            s3Service = new AmazonS3Client(new InstanceProfileCredentialsProvider(), clientConfig);
        if (bucketLocation != null) {
            s3Service.setRegion(bucketLocation);
            System.out.println("bucketLocation=" + bucketLocation.toString());
        }
        if (s3Target != null) {
            s3Service.setEndpoint(s3Target);
            System.out.println("target=" + s3Target);
        }
        if (config.hasAttribute("disableDNSBucket")) {
            s3Service.setS3ClientOptions(new S3ClientOptions()
                    .withPathStyleAccess(Boolean.parseBoolean(config.getAttribute("disableDNSBucket")))
                    .disableChunkedEncoding());
            System.out.println(
                    "disableDNSBucket=" + Boolean.parseBoolean(config.getAttribute("disableDNSBucket")));
        }
        if (!s3Service.doesBucketExist(this.name)) {
            s3Service.createBucket(this.name);
            SDFSLogger.getLog().info("created new store " + name);
            ObjectMetadata md = new ObjectMetadata();
            md.addUserMetadata("currentsize", "0");
            md.addUserMetadata("currentcompressedsize", "0");
            md.addUserMetadata("clustered", "true");
            md.addUserMetadata("lastupdate", Long.toString(System.currentTimeMillis()));
            md.addUserMetadata("hostname", InetAddress.getLocalHost().getHostName());
            md.addUserMetadata("port", Integer.toString(Main.sdfsCliPort));

            this.clustered = true;
            byte[] sz = Long.toString(System.currentTimeMillis()).getBytes();
            if (md5sum) {
                String mds = BaseEncoding.base64().encode(ServiceUtils.computeMD5Hash(sz));
                md.setContentMD5(mds);
            }
            md.setContentLength(sz.length);
            this.binm = "bucketinfo/"
                    + EncyptUtils.encHashArchiveName(Main.DSEID, Main.chunkStoreEncryptionEnabled);
            s3Service.putObject(this.name, binm, new ByteArrayInputStream(sz), md);
        } else {
            Map<String, String> obj = null;
            ObjectMetadata omd = null;
            try {
                omd = s3Service.getObjectMetadata(this.name, binm);
                obj = omd.getUserMetadata();
                obj.get("currentsize");
            } catch (Exception e) {
                omd = null;
                SDFSLogger.getLog().debug("unable to find bucketinfo object", e);
            }
            if (omd == null) {
                try {
                    this.binm = "bucketinfo/"
                            + EncyptUtils.encHashArchiveName(Main.DSEID, Main.chunkStoreEncryptionEnabled);
                    omd = s3Service.getObjectMetadata(this.name, binm);
                    obj = omd.getUserMetadata();
                    obj.get("currentsize");
                } catch (Exception e) {
                    omd = null;
                    SDFSLogger.getLog().debug("unable to find bucketinfo object", e);
                }
            }
            if (omd == null) {
                ObjectMetadata md = new ObjectMetadata();
                md.addUserMetadata("currentsize", "0");
                md.addUserMetadata("currentcompressedsize", "0");
                md.addUserMetadata("clustered", "true");
                md.addUserMetadata("lastupdate", Long.toString(System.currentTimeMillis()));
                md.addUserMetadata("hostname", InetAddress.getLocalHost().getHostName());
                md.addUserMetadata("port", Integer.toString(Main.sdfsCliPort));

                this.clustered = true;
                this.binm = "bucketinfo/"
                        + EncyptUtils.encHashArchiveName(Main.DSEID, Main.chunkStoreEncryptionEnabled);
                byte[] sz = Long.toString(System.currentTimeMillis()).getBytes();
                if (md5sum) {
                    String mds = BaseEncoding.base64().encode(ServiceUtils.computeMD5Hash(sz));
                    md.setContentMD5(mds);
                }
                md.setContentLength(sz.length);
                s3Service.putObject(this.name, binm, new ByteArrayInputStream(sz), md);
            } else {
                if (obj.containsKey("currentsize")) {
                    long cl = Long.parseLong((String) obj.get("currentsize"));
                    if (cl >= 0) {
                        HashBlobArchive.currentLength.set(cl);

                    } else
                        SDFSLogger.getLog().warn("The S3 objectstore DSE did not close correctly len=" + cl);
                } else {
                    SDFSLogger.getLog().warn(
                            "The S3 objectstore DSE did not close correctly. Metadata tag currentsize was not added");
                }

                if (obj.containsKey("currentcompressedsize")) {
                    long cl = Long.parseLong((String) obj.get("currentcompressedsize"));
                    if (cl >= 0) {
                        HashBlobArchive.compressedLength.set(cl);

                    } else
                        SDFSLogger.getLog().warn("The S3 objectstore DSE did not close correctly clen=" + cl);
                } else {
                    SDFSLogger.getLog().warn(
                            "The S3 objectstore DSE did not close correctly. Metadata tag currentsize was not added");
                }
                if (obj.containsKey("clustered")) {
                    this.clustered = Boolean.parseBoolean(obj.get("clustered"));
                } else
                    this.clustered = false;

                obj.put("clustered", Boolean.toString(this.clustered));
                omd.setUserMetadata(obj);
                try {

                    updateObject(binm, omd);
                } catch (Exception e) {
                    SDFSLogger.getLog().warn("unable to update bucket info in init", e);
                    SDFSLogger.getLog().info("created new store " + name);
                    ObjectMetadata md = new ObjectMetadata();
                    md.addUserMetadata("currentsize", "0");
                    md.addUserMetadata("lastupdate", Long.toString(System.currentTimeMillis()));
                    md.addUserMetadata("currentcompressedsize", "0");
                    md.addUserMetadata("clustered", Boolean.toString(this.clustered));
                    md.addUserMetadata("hostname", InetAddress.getLocalHost().getHostName());
                    md.addUserMetadata("port", Integer.toString(Main.sdfsCliPort));
                    byte[] sz = Long.toString(System.currentTimeMillis()).getBytes();
                    if (md5sum) {
                        String mds = BaseEncoding.base64().encode(ServiceUtils.computeMD5Hash(sz));
                        md.setContentMD5(mds);
                    }
                    md.setContentLength(sz.length);
                    s3Service.putObject(this.name, binm, new ByteArrayInputStream(sz), md);

                }
            }
        }
        ArrayList<Transition> trs = new ArrayList<Transition>();
        if (this.glacierDays > 0 && s3Target == null) {
            Transition transToArchive = new Transition().withDays(this.glacierDays)
                    .withStorageClass(StorageClass.Glacier);
            trs.add(transToArchive);
        }

        if (this.infrequentAccess > 0 && s3Target == null) {
            Transition transToArchive = new Transition().withDays(this.infrequentAccess)
                    .withStorageClass(StorageClass.StandardInfrequentAccess);
            trs.add(transToArchive);

        }
        if (trs.size() > 0) {
            BucketLifecycleConfiguration.Rule ruleArchiveAndExpire = new BucketLifecycleConfiguration.Rule()
                    .withId("SDFS Automated Archive Rule for Block Data").withPrefix("blocks/")
                    .withTransitions(trs).withStatus(BucketLifecycleConfiguration.ENABLED.toString());
            List<BucketLifecycleConfiguration.Rule> rules = new ArrayList<BucketLifecycleConfiguration.Rule>();
            rules.add(ruleArchiveAndExpire);

            BucketLifecycleConfiguration configuration = new BucketLifecycleConfiguration().withRules(rules);

            // Save configuration.
            s3Service.setBucketLifecycleConfiguration(this.name, configuration);
        } else if (s3Target == null) {
            s3Service.deleteBucketLifecycleConfiguration(this.name);
        }
        HashBlobArchive.init(this);
        HashBlobArchive.setReadSpeed(rsp);
        HashBlobArchive.setWriteSpeed(wsp);
        Thread th = new Thread(this);
        th.start();
    } catch (Exception e) {
        SDFSLogger.getLog().error("unable to start service", e);
        throw new IOException(e);
    }

}

From source file:de.decoit.visa.rdf.RDFManager.java

/**
 * Import the contents of a VSA template into the topology. The devices of
 * the VSA will be grouped into a group with the specified name. Connections
 * between VSA and existing topology are created according to the user's
 * input.//from  ww w . ja  v a  2 s .  c om
 *
 * @param pTplID ID number of the template
 * @param pGroupName Name of the VSA group
 * @param pConnTargets Information about connections to the existing
 *            topology
 * @param pConnVLANs Information about VLAN assignment of new interfaces
 * @throws IOException if the template RDF/XML file cannot be accessed
 * @throws RDFSourceException if the RDF/XML file contains errors
 */
public void importRDFTemplate(int pTplID, String pGroupName, Map<String, String> pConnTargets,
        Map<String, String> pConnVLANs) throws IOException, RDFSourceException {
    ComponentGroup vsaCG = TEBackend.TOPOLOGY_STORAGE.getComponentGroupByName(pGroupName);

    Document tpl = vsaTemplates.get(pTplID);
    Element vsaElement = (Element) (tpl.getElementsByTagName("VSA").item(0));

    Path vsaRDF = Paths.get("res/vsa", vsaElement.getAttribute("rdf"));

    InputStream is = Files.newInputStream(vsaRDF);
    String modURI = VISA.createModelURI(vsaCG.getIdentifier());

    ds.begin(ReadWrite.WRITE);

    try {
        activeNamedModel = ds.getNamedModel(modURI);

        // If the model contains statements, clear it before importing the
        // new statements
        if (!activeNamedModel.isEmpty()) {
            activeNamedModel.removeAll();
        }

        // Read the RDF file into the model
        activeNamedModel.read(is, null);

        // Remove existing grouping information from the template
        List<RDFNode> cgList = activeNamedModel.listObjectsOfProperty(VISABackup.GROUP).toList();
        for (RDFNode node : cgList) {
            if (node.isResource()) {
                // If the group node is a resource, remove the name literal
                // connected to it
                Resource res = (Resource) node;

                activeNamedModel.removeAll(res, VISA.NAME, null);
            }

            activeNamedModel.removeAll(null, VISABackup.GROUP, node);
        }

        // Remove network information from the model
        List<RDFNode> netList = activeNamedModel.listObjectsOfProperty(VISA.NETWORK).toList();
        for (RDFNode node : netList) {
            if (node.isResource()) {
                Resource res = (Resource) node;

                activeNamedModel.removeAll(res, VISA.INTERNAL_NAME, null);
                activeNamedModel.removeAll(res, VISA.TYPE, null);
                activeNamedModel.removeAll(res, VISA.VALUE, null);
                activeNamedModel.removeAll(res, VISA.NETMASK_LENGTH, null);

                activeNamedModel.removeAll(null, VISA.NETWORK, res);
            }
        }

        // Remove address information from the model
        List<RDFNode> ifList = activeNamedModel.listObjectsOfProperty(VISA.ADDRESS).toList();
        for (RDFNode node : ifList) {
            if (node.isResource()) {
                Resource res = (Resource) node;

                activeNamedModel.removeAll(res, VISA.INTERNAL_NAME, null);
                activeNamedModel.removeAll(res, VISA.TYPE, null);
                activeNamedModel.removeAll(res, VISA.VALUE, null);

                activeNamedModel.removeAll(null, VISA.ADDRESS, res);
            }
        }

        // Add new grouping information to model
        List<RDFNode> devList = activeNamedModel.listObjectsOfProperty(VISA.DEVICE).toList();
        for (RDFNode node : devList) {
            if (node.isResource()) {
                Resource devRes = (Resource) node;

                StringBuilder sbURI = new StringBuilder(VISABackup.getURI());
                sbURI.append(vsaCG.getIdentifier());
                Resource cgRes = activeNamedModel.getResource(sbURI.toString());

                activeNamedModel.add(devRes, VISABackup.GROUP, cgRes);
                activeNamedModel.add(cgRes, VISABackup.NAME, vsaCG.getName());
            }
        }

        // Alter the local names of the nodes
        String lnSuffix = preventLocalNameCollisions(modURI);

        // Alter the root node to fit the root node of the current model
        alterRootNode(modURI, rootNode);

        // Process data stored in the model and create topology objects
        // from it
        HashSet<String> addedLocNames = processModel(modURI);

        // Insert the new model into the existing one
        ds.getDefaultModel().add(activeNamedModel);
        activeNamedModel = null;

        int routerID = 0;
        for (Map.Entry<String, String> connEntry : pConnTargets.entrySet()) {
            Element e = tpl.getElementById(connEntry.getKey());
            StringBuilder sbSrc = new StringBuilder(e.getAttribute("component"));
            sbSrc.append(lnSuffix);

            if (e.hasAttribute("vlan")) {
                StringBuilder sbVLAN = new StringBuilder(e.getAttribute("vlan"));
                sbVLAN.append(lnSuffix);
            }

            // NetworkComponent inside the VSA
            NetworkComponent ncSrc = TEBackend.TOPOLOGY_STORAGE.getComponent(sbSrc.toString());

            // NetworkComponent in the existing topology
            NetworkComponent ncTarget = TEBackend.TOPOLOGY_STORAGE.getComponent(connEntry.getValue());
            addedLocNames.add(ncTarget.getRDFLocalName());

            // Interface inside the VSA (source)
            Interface ifSrc = ncSrc.getConfig().createInterface(PortOrientation.TOP);

            // Interface in the existing topology (target)
            Interface ifTarget = ncTarget.getConfig().createInterface(PortOrientation.TOP);

            GroupInterface gIf = TEBackend.TOPOLOGY_STORAGE.getComponentGroupByName(pGroupName)
                    .createOuterConnection(ifSrc, ifTarget);
            TEBackend.TOPOLOGY_STORAGE.createCable(ifSrc, ifTarget, gIf);

            // Check if the target component is a switch and the connection is part of a VLAN
            if (ncTarget instanceof NCSwitch && pConnVLANs.containsKey(connEntry.getKey())) {
                HashSet<VLAN> vlan = ifTarget.getAllVLAN();

                // Add the VLAN to the target interface
                vlan.add(TEBackend.TOPOLOGY_STORAGE.getVLAN(pConnVLANs.get(connEntry.getKey())));
                ifTarget.setVLAN(vlan);
            }

            // If a target VLAN inside the VSA is specified, create a router VM to connect the topology with that VLAN
            if (e.hasAttribute("vlan") && e.getAttribute("vlan").length() > 0) {
                StringBuilder sbVLAN = new StringBuilder(e.getAttribute("vlan"));
                sbVLAN.append(lnSuffix);

                ArrayList<String> ifOrientation = new ArrayList<>();
                ifOrientation.add(PortOrientation.TOP.toString());
                ifOrientation.add(PortOrientation.TOP.toString());
                StringBuilder sbRtName = new StringBuilder("VSA Router ");
                sbRtName.append(routerID);

                NCVM router = TEBackend.TOPOLOGY_STORAGE.createVM(ifOrientation, sbRtName.toString(), null,
                        null);
                router.getConfig().setComponentGroup(vsaCG.getName());
                HashMap<String, Interface> ifMap = router.getConfig().getPorts();
                Set<String> ifMapKeySet = ifMap.keySet();
                Iterator<String> it = ifMapKeySet.iterator();

                // Configure the interface connected to the VSA
                Interface ifInt = ncSrc.getConfig().createInterface(PortOrientation.TOP);
                Interface rtIfInt = ifMap.get(it.next());
                TEBackend.TOPOLOGY_STORAGE.createCable(ifInt, rtIfInt, null);

                VLAN intVLAN = TEBackend.TOPOLOGY_STORAGE.getVLAN(sbVLAN.toString());
                HashSet<VLAN> ifIntVLANs = new HashSet<>();
                ifIntVLANs.add(intVLAN);
                ifInt.setVLAN(ifIntVLANs);

                // Configure the interface connected to the topology
                Interface ifExt = ncSrc.getConfig().createInterface(PortOrientation.TOP);
                Interface rtIfExt = ifMap.get(it.next());
                TEBackend.TOPOLOGY_STORAGE.createCable(ifExt, rtIfExt, null);

                if (pConnVLANs.containsKey(connEntry.getKey())) {
                    VLAN extVLAN = TEBackend.TOPOLOGY_STORAGE.getVLAN(pConnVLANs.get(connEntry.getKey()));
                    HashSet<VLAN> ifExtVLANs = new HashSet<>();
                    ifExtVLANs.add(extVLAN);
                    ifExt.setVLAN(ifExtVLANs);
                }
            }
        }

        // Layout the topology
        TEBackend.TOPOLOGY_STORAGE.layoutTopology();

        TEBackend.TOPOLOGY_STORAGE.updateInterfaceOrientations(addedLocNames);

        ds.commit();
    } catch (Throwable ex) {
        ds.abort();

        throw ex;
    } finally {
        activeNamedModel = null;

        ds.end();
        TDB.sync(ds);

        is.close();
    }
}