List of usage examples for org.xml.sax SAXException printStackTrace
public void printStackTrace()
From source file:i5.las2peer.services.mobsos.SurveyService.java
@POST @Consumes(MediaType.APPLICATION_JSON)/*from w w w .j a v a2s. com*/ @Path("surveys/{id}/responses") @Summary("submit response data to given survey.") @ApiResponses(value = { @ApiResponse(code = 200, message = "Survey response submitted successfully."), @ApiResponse(code = 400, message = "Survey response invalid -or- questionnaire form invalid. Cause: ..."), @ApiResponse(code = 404, message = "Survey does not exist -or- No questionnaire defined for survey."), @ApiResponse(code = 400, message = "Survey response already submitted."), }) public HttpResponse submitSurveyResponseJSON(@PathParam("id") int id, @ContentParam String answerJSON) { Date now = new Date(); String onAction = "submitting response to survey " + id; try { // retrieve survey by id; HttpResponse rs = getSurvey(id); if (rs.getStatus() != 200) { return rs; } JSONObject s = (JSONObject) JSONValue.parse(rs.getResult()); // check if survey expired/not started SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); df.setTimeZone(TimeZone.getTimeZone("GMT")); Date start = df.parse((String) s.get("start")); Date end = df.parse((String) s.get("end")); if (now.getTime() > end.getTime()) { HttpResponse resp = new HttpResponse("Cannot submit response. Survey expired."); resp.setStatus(403); return resp; } else if (now.getTime() < start.getTime()) { HttpResponse resp = new HttpResponse("Cannot submit response. Survey has not begun, yet."); resp.setStatus(403); return resp; } // check for questionnaire form int qid = Integer.parseInt(s.get("qid") + ""); if (qid == -1) { HttpResponse result = new HttpResponse("No questionnaire defined for survey " + id + "!"); result.setStatus(404); return result; } // retrieve questionnaire form for survey to do answer validation HttpResponse r = downloadQuestionnaireForm(qid); if (200 != r.getStatus()) { // if questionnaire form does not exist, pass on response containing error status return r; } Document form; JSONObject answer; // parse form to XML document incl. validation try { form = validateQuestionnaireData(r.getResult()); } catch (SAXException e) { HttpResponse result = new HttpResponse("Questionnaire form is invalid! Cause: " + e.getMessage()); result.setStatus(400); return result; } try { //System.out.println(answerJSON); answer = (JSONObject) JSONValue.parseWithException(answerJSON); } catch (ParseException e) { HttpResponse result = new HttpResponse( "Survey response is not valid JSON! Cause: " + e.getMessage()); result.setStatus(400); return result; } JSONObject answerFieldTable; // validate if answer matches form. try { answerFieldTable = validateResponse(form, answer); } catch (IllegalArgumentException e) { HttpResponse result = new HttpResponse("Survey response is invalid! Cause: " + e.getMessage()); result.setStatus(400); return result; } // after all validation finally persist survey response in database int surveyId = id; String sub = (String) getActiveUserInfo().get("sub"); if (getActiveAgent().getId() == getActiveNode().getAnonymous().getId()) { sub += now.getTime(); } Connection conn = null; PreparedStatement stmt = null; ResultSet rset = null; try { conn = dataSource.getConnection(); stmt = conn.prepareStatement( "insert into " + jdbcSchema + ".response(uid,sid,qkey,qval,time) values (?,?,?,?,?)"); Iterator<String> it = answerFieldTable.keySet().iterator(); while (it.hasNext()) { String qkey = it.next(); String qval = "" + answerFieldTable.get(qkey); stmt.setString(1, sub); stmt.setInt(2, surveyId); stmt.setString(3, qkey); stmt.setString(4, qval); stmt.setTimestamp(5, new Timestamp(now.getTime())); stmt.addBatch(); } stmt.executeBatch(); HttpResponse result = new HttpResponse("Response to survey " + id + " submitted successfully."); result.setStatus(200); return result; } catch (SQLException | UnsupportedOperationException e) { if (0 <= e.getMessage().indexOf("Duplicate")) { HttpResponse result = new HttpResponse("Survey response already submitted!"); result.setStatus(409); return result; } else { e.printStackTrace(); return internalError(onAction); } } finally { try { if (rset != null) rset.close(); } catch (Exception e) { e.printStackTrace(); return internalError(onAction); } try { if (stmt != null) stmt.close(); } catch (Exception e) { e.printStackTrace(); return internalError(onAction); } try { if (conn != null) conn.close(); } catch (Exception e) { e.printStackTrace(); return internalError(onAction); } } } catch (Exception e) { e.printStackTrace(); return internalError(onAction); } }
From source file:net.sourceforge.seqware.webservice.resources.tables.ProcessIDResource.java
/** {@inheritDoc} */ @Override//from w w w. jav a 2s . c o m public Representation put(Representation rep) { authenticate(); Representation toreturn = null; if (rep.getMediaType().equals(MediaType.APPLICATION_XML)) { JaxbObject<Processing> jo = new JaxbObject<Processing>(); Processing p = null; try { String text = rep.getText(); p = (Processing) XmlTools.unMarshal(jo, new Processing(), text); } catch (IOException e) { e.printStackTrace(); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, e); } catch (SAXException ex) { ex.printStackTrace(); throw new ResourceException(Status.CLIENT_ERROR_UNPROCESSABLE_ENTITY, ex); } try { Log.info("Updating Processing " + p.getSwAccession()); // Move the Hibernate calls before the direct DB access so that // the authentication of this user is checked by the Hibernate layer ProcessingService ps = BeanFactory.getProcessingServiceBean(); Processing processing = (Processing) testIfNull(ps.findBySWAccession(p.getSwAccession())); Log.debug("Checking processing permission for " + registration.getEmailAddress()); processing.givesPermission(registration); if (p.getOwner() != null) { processing.setOwner(BeanFactory.getRegistrationServiceBean() .findByEmailAddress(p.getOwner().getEmailAddress())); } else { processing.setOwner(registration); } processing.setStatus(p.getStatus() == null ? "pending" : p.getStatus()); processing.setTaskGroup(p.isTaskGroup()); if (p.getRunStartTimestamp() != null) { processing.setRunStartTimestamp(p.getRunStartTimestamp()); } if (p.getRunStopTimestamp() != null) { processing.setRunStopTimestamp(p.getRunStopTimestamp()); } if (p.getFiles() != null) { if (processing.getFiles() == null) { processing.setFiles(p.getFiles()); } else { processing.getFiles().addAll(p.getFiles()); } } if (p.getIUS() != null) { HashSet<IUS> set = new HashSet<IUS>(); IUSService is = BeanFactory.getIUSServiceBean(); for (IUS i : p.getIUS()) { IUS newI = is.findBySWAccession(i.getSwAccession()); if (newI != null && newI.givesPermission(registration)) { set.add(newI); } else if (newI == null) { Log.info("Could not be found " + i); } } if (processing.getIUS() == null) { processing.setIUS(set); } else { processing.getIUS().addAll(set); } } if (p.getLanes() != null) { HashSet<Lane> set = new HashSet<Lane>(); LaneService ls = BeanFactory.getLaneServiceBean(); for (Lane l : p.getLanes()) { Lane newL = ls.findBySWAccession(l.getSwAccession()); if (newL != null && newL.givesPermission(registration)) { set.add(newL); } else if (newL == null) { Log.info("Could not be found " + l); } } if (processing.getLanes() == null) { processing.setLanes(set); } else { processing.getLanes().addAll(set); } } if (p.getSamples() != null) { HashSet<Sample> set = new HashSet<Sample>(); SampleService ss = BeanFactory.getSampleServiceBean(); for (Sample s : p.getSamples()) { Sample newS = ss.findBySWAccession(s.getSwAccession()); if (newS != null && newS.givesPermission(registration)) { set.add(newS); } else if (newS == null) { Log.info("Could not be found " + s); } } if (processing.getSamples() == null) { processing.setSamples(set); } else { processing.getSamples().addAll(set); } } if (p.getSequencerRuns() != null) { HashSet<SequencerRun> set = new HashSet<SequencerRun>(); SequencerRunService srs = BeanFactory.getSequencerRunServiceBean(); for (SequencerRun sr : p.getSequencerRuns()) { SequencerRun newSR = srs.findBySWAccession(sr.getSwAccession()); if (newSR != null && newSR.givesPermission(registration)) { set.add(newSR); } else if (newSR == null) { Log.info("Could not be found " + sr); } } if (processing.getSequencerRuns() == null) { processing.setSequencerRuns(set); } else { processing.getSequencerRuns().addAll(set); } } if (p.getStudies() != null) { HashSet<Study> set = new HashSet<Study>(); StudyService srs = BeanFactory.getStudyServiceBean(); for (Study sr : p.getStudies()) { Study newS = srs.findBySWAccession(sr.getSwAccession()); if (newS != null && newS.givesPermission(registration)) { set.add(newS); } else if (newS == null) { Log.info("Could not be found " + sr); } } if (processing.getSequencerRuns() == null) { processing.setStudies(set); } else { processing.getStudies().addAll(set); } } if (p.getChildren() != null || p.getParents() != null) { HashSet<Processing> childSet = new HashSet<Processing>(); for (Processing proc : p.getChildren()) { Processing newProc = ps.findBySWAccession(proc.getSwAccession()); if (newProc != null && newProc.givesPermission(registration)) { childSet.add(newProc); } else if (newProc == null) { Log.info("Could not be found " + proc); } } if (processing.getChildren() == null) { processing.setChildren(childSet); } else { processing.getChildren().addAll(childSet); } HashSet<Processing> parentSet = new HashSet<Processing>(); for (Processing proc : p.getParents()) { Processing newProc = ps.findBySWAccession(proc.getSwAccession()); if (newProc != null && newProc.givesPermission(registration)) { parentSet.add(newProc); } else if (newProc == null) { Log.info("Could not be found " + proc); } } if (processing.getParents() == null) { processing.setParents(parentSet); } else { processing.getParents().addAll(parentSet); } } if (p.getWorkflowRun() != null && (processing.getWorkflowRun() == null || p.getWorkflowRun().getSwAccession() != processing.getWorkflowRun().getSwAccession())) { WorkflowRunService wrs = BeanFactory.getWorkflowRunServiceBean(); WorkflowRun newWr = wrs.findBySWAccession(p.getWorkflowRun().getSwAccession()); if (newWr != null && newWr.givesPermission(registration)) { processing.setWorkflowRun(newWr); } else if (newWr == null) { Log.info("Could not be found " + p.getWorkflowRun()); } } if (p.getWorkflowRunByAncestorWorkflowRunId() != null && (processing.getWorkflowRunByAncestorWorkflowRunId() == null || p.getWorkflowRunByAncestorWorkflowRunId().getSwAccession() != processing .getWorkflowRunByAncestorWorkflowRunId().getSwAccession())) { WorkflowRunService wrs = BeanFactory.getWorkflowRunServiceBean(); WorkflowRun newWr = wrs .findBySWAccession(p.getWorkflowRunByAncestorWorkflowRunId().getSwAccession()); if (newWr != null && newWr.givesPermission(registration)) { processing.setWorkflowRunByAncestorWorkflowRunId(newWr); } else if (newWr == null) { Log.info("Could not be found " + p.getWorkflowRunByAncestorWorkflowRunId()); } } if (p.getProcessingAttributes() != null && !p.getProcessingAttributes().isEmpty()) { //SEQWARE-1577 - AttributeAnnotator cascades deletes when annotating // processing.getProcessingAttributes().clear(); for (ProcessingAttribute pa : p.getProcessingAttributes()) { pa.setProcessing(processing); processing.getProcessingAttributes().add(pa); } } ps.update(registration, processing); //Direct DB calls if (p.getFiles() != null) { addNewFiles(p); } if (p.getIUS() != null) { addNewIUSes(p); } if (p.getLanes() != null) { addNewLanes(p); } if (p.getSequencerRuns() != null) { addNewSequencerRuns(p); } if (p.getStudies() != null) { addNewStudies(p); } if (p.getExperiments() != null) { addNewExperiments(p); } if (p.getSamples() != null) { addNewSamples(p); } if (p.getChildren() != null || p.getParents() != null) { addNewRelationships(p); } if (p.getWorkflowRun() != null) { MetadataDB mdb = DBAccess.get(); try { ReturnValue ret = mdb.update_processing_workflow_run(p.getProcessingId(), p.getWorkflowRun().getSwAccession()); if (ret.getExitStatus() != ReturnValue.SUCCESS) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Error in Update Processing Workflow Run with error " + ret.getExitStatus()); } } finally { this.closeConnectionStatementResultSet(mdb, null); // this should be redundant DBAccess.close(); } } if (p.getWorkflowRunByAncestorWorkflowRunId() != null) { MetadataDB mdb = DBAccess.get(); try { mdb.add_workflow_run_ancestor(p.getWorkflowRunByAncestorWorkflowRunId().getSwAccession(), p.getProcessingId()); } finally { this.closeConnectionStatementResultSet(mdb, null); // this should be redundant DBAccess.close(); } } ReturnValue newProcessing = Processing.clone(p), ret; MetadataDB mdb = DBAccess.get(); try { ret = DBAccess.get().update_processing_event(p.getProcessingId(), newProcessing); } finally { this.closeConnectionStatementResultSet(mdb, null); // this should be redundant DBAccess.close(); } if (ret.getExitStatus() != ReturnValue.SUCCESS) { throw new ResourceException(Status.SERVER_ERROR_INTERNAL, "Updating the Processing failed with error " + ret.getExitStatus()); } Hibernate3DtoCopier copier = new Hibernate3DtoCopier(); Document line = XmlTools.marshalToDocument(jo, copier.hibernate2dto(processing)); toreturn = XmlTools.getRepresentation(line); getResponse().setEntity(toreturn); getResponse().setStatus(Status.SUCCESS_CREATED); } catch (SecurityException e) { getResponse().setStatus(Status.CLIENT_ERROR_FORBIDDEN, e); } catch (Exception e) { e.printStackTrace(); getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, e); } finally { DBAccess.close(); } } else { throw new ResourceException(Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE); } return toreturn; }
From source file:net.sourceforge.seqware.webservice.resources.tables.WorkflowRunIDResource.java
/** {@inheritDoc} * @return *//*from w w w. j a va 2 s. c o m*/ @Override public Representation put(Representation entity) { Representation toreturn = null; if (entity.getMediaType().equals(MediaType.APPLICATION_XML)) { WorkflowRun newWR = null; JaxbObject<WorkflowRun> jo = new JaxbObject<>(); try { String text = entity.getText(); Log.debug(text); newWR = (WorkflowRun) XmlTools.unMarshal(jo, new WorkflowRun(), text); } catch (SAXException ex) { ex.printStackTrace(); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, ex); } catch (IOException e) { e.printStackTrace(); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, e); } try { WorkflowRun wr = updateWorkflowRun(newWR); Hibernate3DtoCopier copier = new Hibernate3DtoCopier(); Document line = XmlTools.marshalToDocument(jo, copier.hibernate2dto(wr)); toreturn = XmlTools.getRepresentation(line); getResponse().setEntity(toreturn); getResponse().setLocationRef(getRequest().getRootRef() + "/workflowruns/" + newWR.getSwAccession()); getResponse().setStatus(Status.SUCCESS_CREATED); } catch (SecurityException e) { getResponse().setStatus(Status.CLIENT_ERROR_FORBIDDEN, e); } catch (SQLException ex) { ex.printStackTrace(); throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, ex); } catch (Exception e) { e.printStackTrace(); getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, e); } finally { try { entity.exhaust(); } catch (IOException ex) { ex.printStackTrace(); throw new ResourceException(Status.SERVER_ERROR_INTERNAL, ex); } entity.release(); DBAccess.close(); } } else { throw new ResourceException(Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE); } return toreturn; }
From source file:org.alfresco.repo.transfer.report.TransferReporterImpl.java
/** * Create a new transfer report of success * //from w w w . j a va2 s . c o m * @return NodeRef the node ref of the new transfer report */ public NodeRef createTransferReport(String transferName, Transfer transfer, TransferTarget target, TransferDefinition definition, List<TransferEvent> events, File snapshotFile) { Map<QName, Serializable> properties = new HashMap<QName, Serializable>(); String title = transferName; String description = "Transfer Report - target: " + target.getName(); String name = transferName + ".xml"; properties.put(ContentModel.PROP_NAME, name); properties.put(ContentModel.PROP_TITLE, title); properties.put(ContentModel.PROP_DESCRIPTION, description); ChildAssociationRef ref = nodeService.createNode(target.getNodeRef(), ContentModel.ASSOC_CONTAINS, QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, name), TransferModel.TYPE_TRANSFER_REPORT, properties); ContentWriter writer = contentService.getWriter(ref.getChildRef(), ContentModel.PROP_CONTENT, true); writer.setLocale(Locale.getDefault()); writer.setMimetype(MimetypeMap.MIMETYPE_XML); writer.setEncoding(DEFAULT_ENCODING); // final XMLTransferReportWriter reportWriter = new XMLTransferReportWriter(); BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(writer.getContentOutputStream())); try { reportWriter.startTransferReport(DEFAULT_ENCODING, bufferedWriter); // Header reportWriter.writeTarget(target); // Definition of transfer reportWriter.writeDefinition(definition); // Events of transfer reportWriter.writeTransferEvents(events); /** * Write the node summary details to the transfer report */ TransferManifestProcessor processor = new TransferManifestProcessor() { public void processTransferManifestNode(TransferManifestNormalNode node) { try { reportWriter.writeNodeSummary(node); } catch (SAXException error) { error.printStackTrace(); } } public void processTransferManifestNode(TransferManifestDeletedNode node) { try { reportWriter.writeNodeSummary(node); } catch (SAXException error) { error.printStackTrace(); } } public void processTransferManifiestHeader(TransferManifestHeader header) { /* NO-OP */ } public void startTransferManifest() { /* NO-OP */ } public void endTransferManifest() { /* NO-OP */ } }; /** * Step 3: wire up the manifest reader to a manifest processor */ SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); SAXParser parser; parser = saxParserFactory.newSAXParser(); XMLTransferManifestReader reader = new XMLTransferManifestReader(processor); /** * Step 4: start the magic Give the manifest file to the manifest reader */ try { parser.parse(snapshotFile, reader); } catch (IOException error) { //TODO temp code error.printStackTrace(); return null; } reportWriter.endTransferReport(); return ref.getChildRef(); } catch (SAXException se) { //TODO Temp code return null; } catch (ParserConfigurationException error) { // TODO temp code error.printStackTrace(); return null; } finally { try { bufferedWriter.close(); } catch (IOException error) { error.printStackTrace(); } } }
From source file:org.apache.cayenne.tools.DbImporterTaskTest.java
@SuppressWarnings("unchecked") private void verifyResult(File map, File mapFileCopy) { try {// w w w.java 2 s . c o m FileReader control = new FileReader(map.getAbsolutePath() + "-result"); FileReader test = new FileReader(mapFileCopy); DetailedDiff diff = new DetailedDiff(new Diff(control, test)); if (!diff.similar()) { for (Difference d : ((List<Difference>) diff.getAllDifferences())) { System.out.println("-------------------------------------------"); System.out.println(d.getTestNodeDetail().getNode()); System.out.println(d.getControlNodeDetail().getValue()); } fail(diff.toString()); } } catch (SAXException e) { e.printStackTrace(); fail(); } catch (IOException e) { e.printStackTrace(); fail(); } }
From source file:org.apache.struts.tiles.xmlDefinition.I18nFactorySet.java
/** * Parse specified xml file and add definition to specified definitions set. * This method is used to load several description files in one instances list. * If filename exists and definition set is <code>null</code>, create a new set. Otherwise, return * passed definition set (can be <code>null</code>). * @param servletContext Current servlet context. Used to open file. * @param filename Name of file to parse. * @param xmlDefinitions Definitions set to which definitions will be added. If null, a definitions * set is created on request./* www . j av a2 s . co m*/ * @return XmlDefinitionsSet The definitions set created or passed as parameter. * @throws DefinitionsFactoryException On errors parsing file. */ protected XmlDefinitionsSet parseXmlFile(ServletContext servletContext, String filename, XmlDefinitionsSet xmlDefinitions) throws DefinitionsFactoryException { try { InputStream input = servletContext.getResourceAsStream(filename); // Try to load using real path. // This allow to load config file under websphere 3.5.x // Patch proposed Houston, Stephen (LIT) on 5 Apr 2002 if (null == input) { try { input = new java.io.FileInputStream(servletContext.getRealPath(filename)); } catch (Exception e) { } } // If the config isn't in the servlet context, try the class loader // which allows the config files to be stored in a jar if (input == null) { input = getClass().getResourceAsStream(filename); } // If still nothing found, this mean no config file is associated if (input == null) { if (log.isDebugEnabled()) { log.debug("Can't open file '" + filename + "'"); } return xmlDefinitions; } // Check if parser already exist. // Doesn't seem to work yet. //if( xmlParser == null ) if (true) { xmlParser = new XmlParser(); xmlParser.setValidating(isValidatingParser); } // Check if definition set already exist. if (xmlDefinitions == null) { xmlDefinitions = new XmlDefinitionsSet(); } xmlParser.parse(input, xmlDefinitions); } catch (SAXException ex) { if (log.isDebugEnabled()) { log.debug("Error while parsing file '" + filename + "'."); ex.printStackTrace(); } throw new DefinitionsFactoryException("Error while parsing file '" + filename + "'. " + ex.getMessage(), ex); } catch (IOException ex) { throw new DefinitionsFactoryException( "IO Error while parsing file '" + filename + "'. " + ex.getMessage(), ex); } return xmlDefinitions; }
From source file:org.apache.tika.sax.CTAKESContentHandler.java
/** * Deserializes XML-based output from cTAKES to jCas object. * @param stream of XML file./*from ww w. j a v a 2 s . c o m*/ * @param jcas {@see jCas} object used to keep the jcas from XML file. */ public static void deserialize(InputStream stream, JCas jcas) { try { XmiCasDeserializer.deserialize(new BufferedInputStream(stream), jcas.getCas()); } catch (FileNotFoundException fnfe) { // TODO Auto-generated catch block fnfe.printStackTrace(); } catch (SAXException ioe) { // TODO Auto-generated catch block ioe.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { stream.close(); } catch (IOException ioe) { // TODO Auto-generated catch block ioe.printStackTrace(); } } }
From source file:org.apache.tiles.xmlDefinition.I18nFactorySet.java
/** * Parse specified xml file and add definition to specified definitions set. * This method is used to load several description files in one instances list. * If filename exists and definition set is <code>null</code>, create a new set. Otherwise, return * passed definition set (can be <code>null</code>). * @param servletContext Current servlet context. Used to open file. * @param filename Name of file to parse. * @param xmlDefinitions Definitions set to which definitions will be added. If null, a definitions * set is created on request./*from ww w . j a v a 2s . co m*/ * @return XmlDefinitionsSet The definitions set created or passed as parameter. * @throws DefinitionsFactoryException On errors parsing file. */ private XmlDefinitionsSet parseXmlFile(ServletContext servletContext, String filename, XmlDefinitionsSet xmlDefinitions) throws DefinitionsFactoryException { try { InputStream input = servletContext.getResourceAsStream(filename); // Try to load using real path. // This allow to load config file under websphere 3.5.x // Patch proposed Houston, Stephen (LIT) on 5 Apr 2002 if (null == input) { try { input = new java.io.FileInputStream(servletContext.getRealPath(filename)); } catch (Exception e) { } } // If still nothing found, this mean no config file is associated if (input == null) { if (log.isDebugEnabled()) { log.debug("Can't open file '" + filename + "'"); } return xmlDefinitions; } // Check if parser already exist. // Doesn't seem to work yet. //if( xmlParser == null ) if (true) { xmlParser = new XmlParser(); xmlParser.setValidating(isValidatingParser); } // Check if definition set already exist. if (xmlDefinitions == null) { xmlDefinitions = new XmlDefinitionsSet(); } xmlParser.parse(input, xmlDefinitions); } catch (SAXException ex) { if (log.isDebugEnabled()) { log.debug("Error while parsing file '" + filename + "'."); ex.printStackTrace(); } throw new DefinitionsFactoryException("Error while parsing file '" + filename + "'. " + ex.getMessage(), ex); } catch (IOException ex) { throw new DefinitionsFactoryException( "IO Error while parsing file '" + filename + "'. " + ex.getMessage(), ex); } return xmlDefinitions; }
From source file:org.archive.crawler.migrate.MigrateH1to3Tool.java
public void instanceMain(String[] args) throws Exception { if (args.length != 2) { printHelp();// w w w.j a v a 2s. c o m return; } String sourceOrderXmlFileArg = args[0]; String destinationH3JobDirArg = args[1]; File sourceOrderXmlFile = new File(sourceOrderXmlFileArg); if (!sourceOrderXmlFile.isFile()) { System.err.println("ERROR sourceOrderXmlFileArg is not a file: " + sourceOrderXmlFileArg); System.exit(1); } File destinationH3JobDir = new File(destinationH3JobDirArg); org.archive.util.FileUtils.ensureWriteableDirectory(destinationH3JobDir); System.out.println("H1 source: " + sourceOrderXmlFile.getAbsolutePath()); System.out.println("H3 destination: " + destinationH3JobDir.getAbsolutePath()); System.out.print("Migrating settings..."); InputStream inStream = getClass() .getResourceAsStream("/org/archive/crawler/migrate/migrate-template-crawler-beans.cxml"); String template = IOUtils.toString(inStream); inStream.close(); Map<String, String> migrateH1toH3Map = getMigrateMap(); try { sourceOrderXmlDom = DOCUMENT_BUILDER.parse(sourceOrderXmlFile); } catch (SAXException e) { System.err.println("ERROR caught exception parsing input file: " + e.getMessage() + "\n"); e.printStackTrace(); } Map<String, String> h1simpleSettings = flattenH1Order(sourceOrderXmlDom); List<String> notApplicable = new ArrayList<String>(); List<String> needsAttention = new ArrayList<String>(); int migrated = 0; StringBuilder sb = new StringBuilder(); for (String key : h1simpleSettings.keySet()) { String beanPath = migrateH1toH3Map.get(key); String value = h1simpleSettings.get(key); System.out.print("."); if (beanPath == null) { // no equivalence rule needsAttention.add(key + " " + value); continue; } if (beanPath.startsWith("$")) { // rule indicates not-available/not-applicable notApplicable.add(key + " " + value); continue; } if (beanPath.startsWith("*")) { // TODO: needs special handling if (beanPath.equals("*metadata.userAgentTemplate")) { splitH1userAgent(value, sb); migrated += 2; } else { needsAttention.add(key + " " + value); } continue; } if (beanPath.startsWith("^")) { // uppercase to new enum-style value = value.toUpperCase(); beanPath = beanPath.substring(1); } sb.append(beanPath).append("=").append(value).append("\n"); migrated++; } System.out.println(); System.out.println(); // patch all overrides derived from H1 into H3 template String beansCxml = template.replace("###MIGRATE_OVERRIDES###", sb.toString()); File targetBeansXmlFile = new File(destinationH3JobDir, "crawler-beans.cxml"); FileUtils.writeStringToFile(targetBeansXmlFile, beansCxml); File sourceSeedsTxtFile = new File(sourceOrderXmlFile.getParentFile(), "seeds.txt"); File destinationSeedsTxtFile = new File(destinationH3JobDir, "seeds.txt"); if (!sourceSeedsTxtFile.isFile()) { System.err.println("ERROR sourceSeedsTxtFile not found: " + sourceSeedsTxtFile); System.exit(1); } FileUtils.copyFile(sourceSeedsTxtFile, destinationSeedsTxtFile); System.out.println(notApplicable.size() + " settings skipped as not-applicable"); System.out.println("These are probably harmless, but if the following settings were"); System.out.println("important to your crawl process, investigate other options."); listProblems(notApplicable); System.out.println(); System.out.println(needsAttention.size() + " settings may need attention"); System.out.println("Please review your original crawl and the created H3 job, for each"); System.out.println("of the following, and manually update as needed."); listProblems(needsAttention); System.out.println(); System.out.println(migrated + " H1 settings successfully migrated to H3 configuration"); System.out.println(); System.out.println("Review your converted crawler-beans.cxml at:"); System.out.println(targetBeansXmlFile.getAbsolutePath()); }
From source file:org.azkfw.crawler.config.CrawlerConfig.java
/** * ??//from w w w . ja v a2 s.c o m * * @param aFile * @return */ public static CrawlerConfig parse(final String aFile) { CrawlerConfig config = null; try { Digester digester = new Digester(); digester.addRule("crawler", new ObjectCreateRule(CrawlerConfig.class)); digester.addRule("crawler/controller", new ObjectCreateRule(CrawlerControllerConfig.class)); digester.addRule("crawler/controller", new SetPropertiesRule()); digester.addRule("crawler/controller", new SetNextRule("setController")); digester.addRule("crawler/logger", new ObjectCreateRule(CrawlerLoggerConfig.class)); digester.addRule("crawler/logger", new SetPropertiesRule()); digester.addRule("crawler/logger", new SetNextRule("setLogger")); digester.addRule("crawler/manager", new ObjectCreateRule(CrawlerManagerConfig.class)); digester.addRule("crawler/manager", new SetPropertiesRule()); digester.addRule("crawler/manager", new SetNextRule("setManager")); digester.addRule("crawler/threads", new ObjectCreateRule(ArrayList.class)); digester.addRule("crawler/threads", new SetNextRule("setThreads")); digester.addRule("crawler/threads/thread", new ObjectCreateRule(CrawlerThreadConfig.class)); digester.addRule("crawler/threads/thread", new SetPropertiesRule()); digester.addRule("crawler/threads/thread", new SetNextRule("add")); digester.addRule("crawler/threads/thread/task", new ObjectCreateRule(CrawlerTaskConfig.class)); digester.addRule("crawler/threads/thread/task", new SetPropertiesRule()); digester.addRule("crawler/threads/thread/task", new SetNextRule("setTask")); digester.addRule("crawler/threads/thread/task/parameter", new ObjectCreateRule(CrawlerParameterConfig.class)); digester.addRule("crawler/threads/thread/task/parameter", new SetPropertiesRule()); digester.addRule("crawler/threads/thread/task/parameter", new SetNextRule("addParameter")); digester.addRule("crawler/threads/thread/schedule", new ObjectCreateRule(CrawlerScheduleConfig.class)); digester.addRule("crawler/threads/thread/schedule", new SetPropertiesRule()); digester.addRule("crawler/threads/thread/schedule", new SetNextRule("setSchedule")); digester.addRule("crawler/threads/thread/schedule/parameter", new ObjectCreateRule(CrawlerParameterConfig.class)); digester.addRule("crawler/threads/thread/schedule/parameter", new SetPropertiesRule()); digester.addRule("crawler/threads/thread/schedule/parameter", new SetNextRule("addParameter")); config = (CrawlerConfig) digester.parse(new File(aFile)); } catch (SAXException ex) { ex.printStackTrace(); } catch (IOException ex) { ex.printStackTrace(); } return config; }