Example usage for org.apache.commons.digester3 Digester addRule

List of usage examples for org.apache.commons.digester3 Digester addRule

Introduction

In this page you can find the example usage for org.apache.commons.digester3 Digester addRule.

Prototype

public void addRule(String pattern, Rule rule) 

Source Link

Document

Register a new Rule matching the specified pattern.

Usage

From source file:org.apache.commons.digester3.examples.api.dbinsert.Main.java

private static void addRules(Digester d, java.sql.Connection conn) {

    // --------------------------------------------------
    // when we encounter a "table" tag, do the following:

    // Create a new instance of class Table, and push that
    // object onto the digester stack of objects. We only need
    // this so that when a row is inserted, it can find out what
    // the enclosing tablename was.
    ////ww w  .  ja  v  a  2s  .  c  o m
    // Note that the object is popped off the stack at the end of the
    // "table" tag (normal behaviour for ObjectCreateRule). Because we
    // never added the table object to some parent object, when it is
    // popped off the digester stack it becomes garbage-collected. That
    // is fine in this situation; we've done all the necessary work and
    // don't need the table object any more.
    d.addObjectCreate("database/table", Table.class);

    // Map *any* attributes on the table tag to appropriate
    // setter-methods on the top object on the stack (the Table
    // instance created by the preceeding rule). We only expect one
    // attribute, though: a 'name' attribute specifying what table
    // we are inserting rows into.
    d.addSetProperties("database/table");

    // --------------------------------------------------
    // When we encounter a "row" tag, invoke methods on the provided
    // RowInserterRule instance.
    //
    // This rule creates a Row instance and pushes it on the digester
    // object stack, rather like ObjectCreateRule, so that the column
    // tags have somewhere to store their information. And when the
    // </row> end tag is found, the rule will trigger to remove this
    // object from the stack, and also do an actual database insert.
    //
    // Note that the rule instance we are passing to the digester has
    // been initialised with some useful data (the SQL connection).
    //
    // Note also that in this case we are not using the digester's
    // factory methods to create the rule instance; that's just a
    // convenience - and obviously not an option for Rule classes
    // that are not part of the digester core implementation.
    RowInserterRule rowInserterRule = new RowInserterRule(conn);
    d.addRule("database/table/row", rowInserterRule);

    // --------------------------------------------------
    // when we encounter a "column" tag, call setColumn on the top
    // object on the stack, passing two parameters: the "name"
    // attribute, and the text within the tag body.
    d.addCallMethod("database/table/row/column", "addColumn", 2);
    d.addCallParam("database/table/row/column", 0, "name");
    d.addCallParam("database/table/row/column", 1);
}

From source file:org.apache.commons.digester3.examples.plugins.pipeline.CompoundTransform.java

public static void addRules(Digester d, String patternPrefix) {
    PluginCreateRule pcr = new PluginCreateRule(Transform.class);
    d.addRule(patternPrefix + "/subtransform", pcr);
    d.addSetNext(patternPrefix + "/subtransform", "addTransform");
}

From source file:org.apache.commons.digester3.examples.plugins.pipeline.Pipeline.java

public static void main(String[] args) {
    if (args.length != 1) {
        System.err.println("usage: pipeline config-file");
        System.exit(-1);/*from   w ww  . ja  va  2s  . c  o m*/
    }
    String configFile = args[0];

    Digester digester = new Digester();
    PluginRules rc = new PluginRules();
    digester.setRules(rc);

    digester.addObjectCreate("pipeline", Pipeline.class);

    digester.addCallMethod("pipeline/source", "setSource", 1);
    digester.addCallParam("pipeline/source", 0, "file");

    PluginCreateRule pcr = new PluginCreateRule(Transform.class);
    digester.addRule("pipeline/transform", pcr);
    digester.addSetNext("pipeline/transform", "setTransform");

    digester.addCallMethod("pipeline/destination", "setDest", 1);
    digester.addCallParam("pipeline/destination", 0, "file");

    Pipeline pipeline = null;
    try {
        pipeline = digester.parse(configFile);
    } catch (Exception e) {
        System.err.println("oops exception occurred during parse.");
        e.printStackTrace();
        System.exit(-1);
    }

    try {
        pipeline.execute();
    } catch (Exception e) {
        System.err.println("oops exception occurred during pipeline execution.");
        e.printStackTrace();
        System.exit(-1);
    }
}

From source file:org.gbif.metadata.eml.EmlFactory.java

/**
 * Uses rule based parsing to read the EML XML and build the EML model.
 * Note the following: - Metadata provider rules are omitted on the assumption that the provider is the same as the
 * creator - Contact rules are omitted on the assumption that contacts are covered by the creator and associated
 * parties - Publisher rules are omitted on the assumption the publisher is covered by the creator and associated
 * parties//from   w  ww.j a  v  a  2s. c o m
 *
 * @param xml To read. Note this will be closed before returning
 *
 * @return The EML populated
 *
 * @throws IOException  If the Stream cannot be read from
 * @throws SAXException If the XML is not well formed
 */
public static Eml build(InputStream xml) throws IOException, SAXException, ParserConfigurationException {
    Digester digester = new Digester();
    digester.setNamespaceAware(true);

    // push the EML object onto the stack
    Eml eml = new Eml();
    digester.push(eml);

    // add the rules

    // language as xml:lang attribute
    digester.addCallMethod("eml", "setMetadataLanguage", 1);
    digester.addCallParam("eml", 0, "xml:lang");
    // guid as packageId attribute
    digester.addCallMethod("eml", "setPackageId", 1);
    digester.addCallParam("eml", 0, "packageId");

    // alternative ids
    digester.addCallMethod("eml/dataset/alternateIdentifier", "addAlternateIdentifier", 1);
    digester.addCallParam("eml/dataset/alternateIdentifier", 0);

    // title together with language
    digester.addCallMethod("eml/dataset/title", "setTitle", 2);
    digester.addCallParam("eml/dataset/title", 0);
    digester.addCallParam("eml/dataset/title", 1, "xml:lang");

    digester.addBeanPropertySetter("eml/dataset/language", "language");

    // descriptions, broken into multiple paragraphs
    digester.addCallMethod("eml/dataset/abstract/para", "addDescriptionPara", 1);
    digester.addCallParam("eml/dataset/abstract/para", 0);

    digester.addBeanPropertySetter("eml/dataset/additionalInfo/para", "additionalInfo");
    digester.addRule("eml/dataset/intellectualRights/para", new NodeCreateRule(Node.ELEMENT_NODE));
    digester.addSetNext("eml/dataset/intellectualRights/para", "parseIntellectualRights");
    digester.addCallMethod("eml/dataset/methods/methodStep/description/para", "addMethodStep", 1);
    digester.addCallParam("eml/dataset/methods/methodStep/description/para", 0);
    digester.addBeanPropertySetter("eml/dataset/methods/sampling/studyExtent/description/para", "studyExtent");
    digester.addBeanPropertySetter("eml/dataset/methods/sampling/samplingDescription/para",
            "sampleDescription");
    digester.addBeanPropertySetter("eml/dataset/methods/qualityControl/description/para", "qualityControl");
    digester.addBeanPropertySetter("eml/dataset/distribution/online/url", "distributionUrl");
    digester.addBeanPropertySetter("eml/dataset/purpose/para", "purpose");
    digester.addBeanPropertySetter("eml/dataset/maintenance/description/para", "updateFrequencyDescription");
    digester.addCallMethod("eml/dataset/maintenance/maintenanceUpdateFrequency", "setUpdateFrequency", 1);
    digester.addCallParam("eml/dataset/maintenance/maintenanceUpdateFrequency", 0);
    digester.addCallMethod("eml/additionalMetadata/metadata/gbif/citation", "setCitation", 2);
    digester.addCallParam("eml/additionalMetadata/metadata/gbif/citation", 0);
    digester.addCallParam("eml/additionalMetadata/metadata/gbif/citation", 1, "identifier");
    digester.addCallMethod("eml/additionalMetadata/metadata/gbif/specimenPreservationMethod",
            "addSpecimenPreservationMethod", 1);
    digester.addCallParam("eml/additionalMetadata/metadata/gbif/specimenPreservationMethod", 0);
    digester.addBeanPropertySetter("eml/additionalMetadata/metadata/gbif/resourceLogoUrl", "logoUrl");
    digester.addBeanPropertySetter("eml/additionalMetadata/metadata/gbif/hierarchyLevel", "hierarchyLevel");
    digester.addCallMethod("eml/dataset/pubDate", "setPubDateAsString", 1);
    digester.addCallParam("eml/dataset/pubDate", 0);

    digester.addCallMethod("eml/additionalMetadata/metadata/gbif/dateStamp", "setDateStamp", 1);
    digester.addCallParam("eml/additionalMetadata/metadata/gbif/dateStamp", 0);

    addAgentRules(digester, "eml/dataset/creator", "addCreator");
    addAgentRules(digester, "eml/dataset/metadataProvider", "addMetadataProvider");
    addAgentRules(digester, "eml/dataset/contact", "addContact");
    addAgentRules(digester, "eml/dataset/associatedParty", "addAssociatedParty");
    addKeywordRules(digester);
    addBibliographicCitations(digester);
    addGeographicCoverageRules(digester);
    addTemporalCoverageRules(digester);
    addLivingTimePeriodRules(digester);
    addFormationPeriodRules(digester);
    addTaxonomicCoverageRules(digester);
    addProjectRules(digester);
    addCollectionRules(digester);
    addPhysicalDataRules(digester);
    addJGTICuratorialIUnit(digester);

    // now parse and return the EML
    try {
        digester.parse(xml);
    } finally {
        xml.close();
    }

    return eml;
}