List of usage examples for javax.sql.rowset.serial SerialBlob SerialBlob
public SerialBlob(Blob blob) throws SerialException, SQLException
SerialBlob
object that is a serialized version of the given Blob
object. From source file:au.com.ish.derbydump.derbydump.main.DumpTest.java
@Parameterized.Parameters(name = "{0}") public static Collection<Object[]> setupTestMatrix() throws Exception { List<Object[]> result = new ArrayList<Object[]>(); //testing numbers (BIGINT, DECIMAL, REAL, SMALLINT, INTEGER, DOUBLE) {// w ww .j a v a2 s .com //standard set of numbers String[] columns = new String[] { "c1 BIGINT", "c2 DECIMAL(10,2)", "c3 REAL", "c4 SMALLINT", "c5 INTEGER", "c6 DOUBLE" }; Object[] row1 = new Object[] { new BigInteger("12"), new BigDecimal("12.12"), new Float("12.1"), Integer.valueOf(12), Integer.valueOf(24), Double.valueOf(12.12) }; String validOutput1 = "(12,12.12,12.1,12,24,12.12),"; Object[] row2 = new Object[] { new BigInteger("42"), new BigDecimal("42.12"), new Float("42.14"), Integer.valueOf(42), Integer.valueOf(64), Double.valueOf(42.14) }; String validOutput2 = "(42,42.12,42.14,42,64,42.14),"; Object[] row3 = new Object[] { new BigInteger("42"), new BigDecimal("42"), new Float("42"), Integer.valueOf(42), Integer.valueOf(64), Double.valueOf(42) }; String validOutput3 = "(42,42.00,42.0,42,64,42.0),"; Object[] row4 = new Object[] { new BigInteger("42"), new BigDecimal("42.1234"), new Float("42.1434"), Integer.valueOf(42), Integer.valueOf(64), Double.valueOf(42.1234) }; String validOutput4 = "(42,42.12,42.1434,42,64,42.1234),"; Object[] row5 = new Object[] { BigDecimal.ZERO, BigDecimal.ZERO, new Float("0"), Integer.valueOf(0), Integer.valueOf(0), Double.valueOf(0) }; String validOutput5 = "(0,0.00,0.0,0,0,0.0),"; //test nulls Object[] row6 = new Object[] { null, null, null, null, null, null }; String validOutput6 = "(NULL,NULL,NULL,NULL,NULL,NULL);"; Object[] values = new Object[] { row1, row2, row3, row4, row5, row6 }; String[] validOutput = new String[] { validOutput1, validOutput2, validOutput3, validOutput4, validOutput5, validOutput6 }; result.add(new Object[] { "testNumbers", null, columns, values, validOutput, false, false }); } //testing strings { String[] columns = new String[] { "c1 VARCHAR(20)", "c2 VARCHAR(20)", "c3 VARCHAR(20)" }; //test normal characters Object[] row1 = new Object[] { "123", "abc", "" }; String validOutput1 = "('123','abc',''),"; //test nulls Object[] row2 = new Object[] { "%", null, "" }; String validOutput2 = "('%',NULL,''),"; //test quotes and tabs Object[] row3 = new Object[] { "'test'", "\"test\"", "\t" }; String validOutput3 = "('\\'test\\'','\"test\"','\\t'),"; //test new line chars Object[] row4 = new Object[] { "\n", "\r", "\n\r" }; String validOutput4 = "('\\n','\\r','\\n\\r');"; Object[] values = new Object[] { row1, row2, row3, row4 }; String[] validOutput = new String[] { validOutput1, validOutput2, validOutput3, validOutput4 }; result.add(new Object[] { "testStrings", null, columns, values, validOutput, false, false }); } //testing dates { String[] columns = new String[] { "c1 TIMESTAMP", "c2 TIMESTAMP" }; // test standard dates Calendar c = Calendar.getInstance(TimeZone.getDefault()); c.set(Calendar.YEAR, 2013); c.set(Calendar.MONTH, 5); c.set(Calendar.DAY_OF_MONTH, 6); c.set(Calendar.HOUR_OF_DAY, 11); c.set(Calendar.MINUTE, 10); c.set(Calendar.SECOND, 10); c.set(Calendar.MILLISECOND, 11); Calendar c2 = (Calendar) c.clone(); c2.add(Calendar.DATE, -5000); Object[] row1 = new Object[] { c.getTime(), c2.getTime() }; String validOutput1 = "('2013-06-06 11:10:10.011','1999-09-28 11:10:10.011'),"; Object[] row2 = new Object[] { "2012-07-07 08:54:33", "1999-09-09 10:04:10" }; String validOutput2 = "('2012-07-07 08:54:33.0','1999-09-09 10:04:10.0'),"; Object[] row3 = new Object[] { null, null }; String validOutput3 = "(NULL,NULL);"; Object[] values = new Object[] { row1, row2, row3 }; String[] validOutput = new String[] { validOutput1, validOutput2, validOutput3 }; result.add(new Object[] { "testDates", null, columns, values, validOutput, false, false }); } //testing CLOB { String[] columns = new String[] { "c1 CLOB" }; Object[] row1 = new Object[] { "<clob value here>" }; String validOutput1 = "('<clob value here>'),"; Object[] row2 = new Object[] { null }; String validOutput2 = "(NULL);"; Object[] values = new Object[] { row1, row2 }; String[] validOutput = new String[] { validOutput1, validOutput2 }; result.add(new Object[] { "testClob", null, columns, values, validOutput, false, false }); } //testing BLOB { String[] columns = new String[] { "c1 BLOB" }; Object[] row1 = new Object[] { getTestImage() }; Blob serialBlob = new SerialBlob(IOUtils.toByteArray(getTestImage())); String validOutput1 = "(" + Column.processBinaryData(serialBlob) + "),"; Object[] row2 = new Object[] { null }; String validOutput2 = "(NULL);"; Object[] values = new Object[] { row1, row2 }; String[] validOutput = new String[] { validOutput1, validOutput2 }; result.add(new Object[] { "testBlob", null, columns, values, validOutput, false, false }); } //testing skipping table { String[] columns = new String[] { "c1 VARCHAR(5)" }; Object[] row1 = new Object[] { "123" }; String validOutput1 = ""; Object[] row2 = new Object[] { null }; String validOutput2 = "(NULL);"; Object[] values = new Object[] { row1, row2 }; String[] validOutput = new String[] { validOutput1, validOutput2 }; result.add(new Object[] { "testSkip", null, columns, values, validOutput, true, false }); } //testing renaming table { String[] columns = new String[] { "c1 VARCHAR(5)" }; Object[] row1 = new Object[] { "123" }; String validOutput1 = "('123'),"; Object[] row2 = new Object[] { null }; String validOutput2 = "(NULL);"; Object[] values = new Object[] { row1, row2 }; String[] validOutput = new String[] { validOutput1, validOutput2 }; result.add(new Object[] { "testRename", "testRenameNew", columns, values, validOutput, false, false }); } //testing empty table { String[] columns = new String[] { "c1 VARCHAR(5)" }; Object[] values = new Object[] { new Object[] {} }; String[] validOutput = new String[] {}; result.add(new Object[] { "testEmptyTable", null, columns, values, validOutput, true, false }); } //testing truncate table { String[] columns = new String[] { "c1 VARCHAR(5)" }; Object[] values = new Object[] { new Object[] {} }; String[] validOutput = new String[] {}; result.add(new Object[] { "testTruncateTable", null, columns, values, validOutput, true, true }); } return result; }
From source file:com.playright.servlet.DataController.java
private static CoverageData getCoverageDateFromRequest(HttpServletRequest request) throws ServletException { CoverageData cd = new CoverageData(); try {// ww w.ja va 2 s . c o m if (!"".equalsIgnoreCase(request.getParameter("id")) && request.getParameter("id") != null) { cd.setId(Integer.parseInt(request.getParameter("id"))); } SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy"); java.util.Date ud = sdf.parse(request.getParameter("newsDate")); cd.setNewsDate(new java.sql.Date(ud.getTime())); cd.setNewspaper(request.getParameter("newspaper")); cd.setHeadline(request.getParameter("headline")); cd.setLanguage(request.getParameter("language")); cd.setEdition(request.getParameter("edition")); cd.setSupplement(request.getParameter("supplement")); cd.setSource(request.getParameter("source")); if (!"".equalsIgnoreCase(request.getParameter("pageNo")) && request.getParameter("pageNo") != null) { cd.setPageNo(Integer.parseInt(request.getParameter("pageNo"))); } if (!"".equalsIgnoreCase(request.getParameter("height")) && request.getParameter("height") != null) { cd.setHeight(Integer.parseInt(request.getParameter("height"))); } if (!"".equalsIgnoreCase(request.getParameter("width")) && request.getParameter("width") != null) { cd.setWidth(Integer.parseInt(request.getParameter("width"))); } if (!"".equalsIgnoreCase(request.getParameter("totalArticleSize")) && request.getParameter("totalArticleSize") != null) { cd.setTotalArticleSize(Integer.parseInt(request.getParameter("totalArticleSize"))); } if (!"".equalsIgnoreCase(request.getParameter("circulationFigure")) && request.getParameter("circulationFigure") != null) { cd.setCirculationFigure(Integer.parseInt(request.getParameter("circulationFigure"))); } if (!"".equalsIgnoreCase(request.getParameter("journalistFactor")) && request.getParameter("journalistFactor") != null) { cd.setJournalistFactor(Integer.parseInt(request.getParameter("journalistFactor"))); } if (!"".equalsIgnoreCase(request.getParameter("quantitativeAve")) && request.getParameter("quantitativeAve") != null) { cd.setQuantitativeAve(new BigDecimal(request.getParameter("quantitativeAve"))); } if (!"".equalsIgnoreCase(request.getParameter("imageExists")) && request.getParameter("imageExists") != null) { cd.setImageExists(request.getParameter("imageExists")); } Blob b = null; String fileName = ""; String contentType = ""; try { Part filePart = request.getPart("image"); InputStream fileContent = filePart.getInputStream(); byte[] bytes = IOUtils.toByteArray(fileContent); b = new SerialBlob(bytes); fileName = filePart.getSubmittedFileName(); contentType = filePart.getContentType(); } catch (IOException ex) { Logger.getLogger(DataController.class.getName()).log(Level.SEVERE, null, ex); } if (b != null && b.length() != 0) { cd.setImageBlob(b); cd.setImageFileName(fileName); cd.setImageType(contentType); } } catch (ParseException ex) { Logger.getLogger(DataController.class.getName()).log(Level.SEVERE, null, ex); } catch (SQLException ex) { Logger.getLogger(DataController.class.getName()).log(Level.SEVERE, null, ex); } return cd; }
From source file:net.sf.taverna.t2.provenance.lineageservice.EventProcessor.java
/** * note: this method can be called as part of a recursion on sub-workflows * @param df /*from w ww . j av a2 s. co m*/ * @param dataflowID the UUID for the entire dataflow (may be a sub-dataflow) * @param localName the external name of the dataflow. Null if this is top level, not null if a sub-dataflow * @return the wfInstanceRef for this workflow structure */ @SuppressWarnings("unchecked") public String processDataflowStructure(Dataflow df, String dataflowID, String externalName) { String localWfInstanceID = getWfInstanceID(); dataflowDepth++; try { List<Var> vars = new ArrayList<Var>(); // check whether we already have this WF in the DB List<String> wfNames = null; try { wfNames = pq.getAllWFnames(); } catch (SQLException e) { logger.warn("Problem processing dataflow structure for " + dataflowID, e); } if (wfNames != null && wfNames.contains(dataflowID)) { // already in the DB // logger.info("workflow structure with ID "+dataflowID+" is in the DB -- clearing static portion"); // clearing the portion of the static DB that pertains to this specific WF. // it is going to be rewritten right away in the rest of this method // this is simpler to implement than selectively avoiding duplicate writes to the DB pw.clearDBStatic(dataflowID); } else { // logger.warn("new workflow structure with ID "+dataflowID); } // ////// // add workflow ID -- this is NOT THE SAME AS the wfInstanceID // ///// // this could be a nested workflow -- in this case, override its wfInstanceID with that of its parent String parentDataflow; if ((parentDataflow = wfNestingMap.get(dataflowID)) == null) { Element serializeDataflow = XMLSerializerRegistry.getInstance().getSerializer() .serializeDataflow(df); String dataflowString = null; try { XMLOutputter outputter = new XMLOutputter(); StringWriter stringWriter = new StringWriter(); outputter.output(serializeDataflow, stringWriter); dataflowString = stringWriter.toString(); } catch (java.io.IOException e) { logger.error("Could not serialise dataflow", e); } Blob blob = new SerialBlob(dataflowString.getBytes("UTF-8")); // this is a top level dataflow description pw.addWFId(dataflowID, null, externalName, blob); // set its dataflowID with no parent // localWfInstanceID = DUMMY_INSTANCE_ID; // CHECK ?? this was not set at all in this branch of the if-then if (getWfInstanceID() == null) setWfInstanceID(DUMMY_INSTANCE_ID); } else { Element serializeDataflow = XMLSerializerRegistry.getInstance().getSerializer() .serializeDataflow(df); String dataflowString = null; try { XMLOutputter outputter = new XMLOutputter(); StringWriter stringWriter = new StringWriter(); outputter.output(serializeDataflow, stringWriter); dataflowString = stringWriter.toString(); } catch (java.io.IOException e) { logger.error("Could not serialise dataflow", e); } Blob blob = new SerialBlob(dataflowString.getBytes("UTF-8")); // we are processing a nested workflow structure logger.debug("dataflow " + dataflowID + " with external name " + externalName + " is nested within " + parentDataflow); pw.addWFId(dataflowID, parentDataflow, externalName, blob); // set its dataflowID along with its parent // override wfInstanceID to point to top level -- UNCOMMENTED PM 9/09 CHECK localWfInstanceID = pq.getRuns(parentDataflow, null).get(0).getInstanceID(); // logger.debug("overriding nested WFRef "+getWfInstanceID()+" with parent WFRef "+localWfInstanceID); } pw.addWFInstanceId(dataflowID, localWfInstanceID); // wfInstanceID stripped by stripWfInstanceHeader() above // ////// // add processors along with their variables // ///// List<? extends Processor> processors = df.getProcessors(); for (Processor p : processors) { // logger.info("adding processor "+p.getLocalName()); String pName = p.getLocalName(); //CHECK get type of first activity and set this as the type of the processor itself List<? extends Activity<?>> activities = p.getActivityList(); String pType = null; if (activities != null && !activities.isEmpty()) { pType = activities.get(0).getClass().getCanonicalName(); } pw.addProcessor(pName, pType, dataflowID, false); // false: not a top level processor // /// // add all input ports for this processor as input variables // /// List<? extends ProcessorInputPort> inputs = p.getInputPorts(); for (ProcessorInputPort ip : inputs) { Var inputVar = new Var(); inputVar.setPName(pName); inputVar.setWfInstanceRef(dataflowID); inputVar.setVName(ip.getName()); inputVar.setTypeNestingLevel(ip.getDepth()); inputVar.setInput(true); // logger.info("processDataflowStructure: adding input var "+pName+":"+ip.getName()); vars.add(inputVar); } // /// // add all output ports for this processor as output variables // /// List<? extends ProcessorOutputPort> outputs = p.getOutputPorts(); for (ProcessorOutputPort op : outputs) { Var outputVar = new Var(); outputVar.setPName(pName); outputVar.setWfInstanceRef(dataflowID); outputVar.setVName(op.getName()); outputVar.setTypeNestingLevel(op.getDepth()); outputVar.setInput(false); vars.add(outputVar); } // check for nested structures: if the activity is DataflowActivity // then this processor is a nested workflow // make an entry into wfNesting map with its ID and recurse on the nested workflow for (Activity a : activities) { if (a.getClass().getCanonicalName().contains("DataflowActivity")) { Dataflow nested = (Dataflow) a.getConfiguration(); logger.debug("RECURSION ON nested workflow: " + p.getLocalName() + " with id: " + nested.getInternalIdentier() + " from " + externalName + " at depth " + dataflowDepth); wfNestingMap.put(nested.getInternalIdentier(), dataflowID); // child -> parent ////////////// /// RECURSIVE CALL ////////////// processDataflowStructure(nested, nested.getInternalIdentier(), p.getLocalName()); // PM added 5/10 dataflowDepth--; //List<? extends Processor> procs = nested.getProcessors(); // for (Processor nestedP:procs) { // System.out.println("recursion on nested processor: "+nestedP.getLocalName()); // } } } } // end for each processor // //// // add inputs to entire dataflow // //// String pName = INPUT_CONTAINER_PROCESSOR; // overridden -- see below // check whether we are processing a nested workflow. in this case // the input vars are not assigned to the INPUT processor but to the containing dataflow if (externalName != null) { // override the default if we are nested or someone external name is provided pName = externalName; } List<? extends DataflowInputPort> inputPorts = df.getInputPorts(); for (DataflowInputPort ip : inputPorts) { Var inputVar = new Var(); inputVar.setPName(pName); inputVar.setWfInstanceRef(dataflowID); inputVar.setVName(ip.getName()); inputVar.setTypeNestingLevel(ip.getDepth()); inputVar.setInput(true); // CHECK PM modified 11/08 -- input vars are actually outputs of input processors... vars.add(inputVar); } // //// // add outputs of entire dataflow // //// pName = OUTPUT_CONTAINER_PROCESSOR; // overridden -- see below // check whether we are processing a nested workflow. in this case // the output vars are not assigned to the OUTPUT processor but to the containing dataflow if (externalName != null) { // we are nested pName = externalName; } List<? extends DataflowOutputPort> outputPorts = df.getOutputPorts(); for (DataflowOutputPort op : outputPorts) { Var outputVar = new Var(); outputVar.setPName(pName); outputVar.setWfInstanceRef(dataflowID); outputVar.setVName(op.getName()); outputVar.setTypeNestingLevel(op.getDepth()); outputVar.setInput(false); // CHECK PM modified 11/08 -- output vars are actually outputs of output processors... vars.add(outputVar); } pw.addVariables(vars, dataflowID); // //// // add arc records using the dataflow links // retrieving the processor names requires navigating from links to // source/sink and from there to the processors // //// List<? extends Datalink> links = df.getLinks(); for (Datalink l : links) { // TODO cover the case of arcs from an input and to an output to // the entire dataflow String sourcePname = null; String sinkPname = null; if (l.getSource() instanceof ProcessorOutputPort) { sourcePname = ((ProcessorOutputPort) l.getSource()).getProcessor().getLocalName(); } else { // System.out.println("found link from dataflow input"); } if (l.getSink() instanceof ProcessorInputPort) { sinkPname = ((ProcessorInputPort) l.getSink()).getProcessor().getLocalName(); } else { // System.out.println("found link to dataflow output"); } if (sourcePname != null && sinkPname != null) { // System.out.println("adding regular internal arc"); pw.addArc(l.getSource().getName(), sourcePname, l.getSink().getName(), sinkPname, dataflowID); } else if (sourcePname == null) { // link is from dataflow input or subflow input if (externalName != null) { // link from subflow input sourcePname = externalName; } else { sourcePname = INPUT_CONTAINER_PROCESSOR; } //Ian added this logic since there were some null sinkPnameRefs with merge ports if (sinkPname == null) { // link is to dataflow output if (externalName != null) { // link from subflow input sinkPname = externalName; } else { sinkPname = OUTPUT_CONTAINER_PROCESSOR; } } // System.out.println("adding arc from dataflow input"); pw.addArc(l.getSource().getName(), sourcePname, l.getSink().getName(), sinkPname, dataflowID); } else if (sinkPname == null) { // link is to dataflow output if (externalName != null) { // link from subflow input sinkPname = externalName; } else { sinkPname = OUTPUT_CONTAINER_PROCESSOR; } //Ian added this bit at the same time as the null sinkPnameRef logic above - hope it is correct if (sourcePname == null) { // link is from dataflow input or subflow input if (externalName != null) { // link from subflow input sourcePname = externalName; } else { sourcePname = INPUT_CONTAINER_PROCESSOR; } } // System.out.println("adding arc to dataflow output"); pw.addArc(l.getSource().getName(), sourcePname, l.getSink().getName(), sinkPname, dataflowID); } } // logger.info("completed processing dataflow " + dataflowID); } catch (Exception e) { logger.error("Problem processing provenance for dataflow", e); } // logger.debug("wfInstanceID at the end of processDataflowStructure: "+getWfInstanceID()); return dataflowID; }
From source file:jp.go.nict.langrid.management.web.model.service.ServiceModelUtil.java
private static InterfaceDefinitionModel makeInterfaceModel(ServiceInterfaceDefinition entity) throws ServiceManagerException { InterfaceDefinitionModel model = new InterfaceDefinitionModel(); model.setProtocolId(entity.getProtocolId()); try {//from w w w.ja v a 2 s .co m byte[] b = IOUtils.toByteArray(entity.getDefinition().getBinaryStream()); model.setDefinition(new SerialBlob(b)); } catch (IOException e) { throw new ServiceManagerException(e); } catch (SQLException e) { throw new ServiceManagerException(e); } return model; }
From source file:gov.medicaid.services.impl.ProviderEnrollmentServiceBean.java
/** * Saves the attachment blob contents.//w ww. j a va 2s . c o m * * @param content the content to be saved * @return the content id * @throws IOException if the stream cannot be saved * @throws SQLException if content cannot be transformed to a blob * @throws SerialException if content cannot be transformed to a blob */ private String saveAttachmentContent(InputStream content) throws IOException, SerialException, SQLException { String contentId = UUID.randomUUID().toString(); BinaryContent binaryContent = new BinaryContent(); binaryContent.setContentId(contentId); binaryContent.setContent(new SerialBlob(IOUtils.toByteArray(content))); getEm().persist(binaryContent); return contentId; }
From source file:lasige.steeldb.jdbc.BFTRowSet.java
/** * Sets the designated column in either the current row or the insert * row of this <code>CachedRowSetImpl</code> object with the given * <code>java.sql.Blob</code> value. * * This method updates a column value in either the current row or * the insert row of this rowset, but it does not update the * database. If the cursor is on a row in the rowset, the * method {@link #updateRow} must be called to update the database. * If the cursor is on the insert row, the method {@link #insertRow} * must be called, which will insert the new row into both this rowset * and the database. Both of these methods must be called before the * cursor moves to another row.//from w ww .j av a 2s.co m * * @param columnIndex the first column is <code>1</code>, the second * is <code>2</code>, and so on; must be <code>1</code> or larger * and equal to or less than the number of columns in this rowset * @param b the new column <code>Blob</code> value * @throws SQLException if (1) the given column index is out of bounds, * (2) the cursor is not on one of this rowset's rows or its * insert row, or (3) this rowset is * <code>ResultSet.CONCUR_READ_ONLY</code> */ public void updateBlob(int columnIndex, Blob b) throws SQLException { // sanity check. checkIndex(columnIndex); // make sure the cursor is on a valid row checkCursor(); // SerialBlob will help in getting the byte array and storing it. // We need to be checking DatabaseMetaData.locatorsUpdatorCopy() // or through RowSetMetaData.locatorsUpdatorCopy() if (dbmslocatorsUpdateCopy) { getCurrentRow().setColumnObject(columnIndex, new SerialBlob(b)); } else { throw new SQLException(resBundle.handleGetObject("cachedrowsetimpl.opnotsupp").toString()); } }
From source file:org.apache.taverna.provenance.lineageservice.EventProcessor.java
private Blob serialize(Dataflow df) { Element serializeDataflow = null; xmlSerializer.serializeDataflow(df);//FIXME String dataflowString = null; try {// w w w.j a v a2 s . co m XMLOutputter outputter = new XMLOutputter(); StringWriter stringWriter = new StringWriter(); outputter.output(serializeDataflow, stringWriter); dataflowString = stringWriter.toString(); } catch (java.io.IOException e) { logger.error("Could not serialise dataflow", e); // FIXME Bad Exception handling! } return new SerialBlob(dataflowString.getBytes("UTF-8")); }
From source file:org.batoo.jpa.jdbc.AbstractColumn.java
/** * {@inheritDoc}//w w w .j ava 2s .c o m * */ @Override public Object convertValue(Connection connection, final Object value) { if (value == null) { return null; } if (this.temporalType != null) { switch (this.temporalType) { case DATE: if (value instanceof java.sql.Date) { return value; } if (value instanceof Date) { return new java.sql.Date(((Date) value).getTime()); } return new java.sql.Date(((Calendar) value).getTimeInMillis()); case TIME: if (value instanceof java.sql.Time) { return value; } if (value instanceof Date) { return new java.sql.Time(((Date) value).getTime()); } return new java.sql.Time(((Calendar) value).getTimeInMillis()); case TIMESTAMP: if (value instanceof java.sql.Timestamp) { return value; } if (value instanceof Date) { return new java.sql.Timestamp(((Date) value).getTime()); } return new java.sql.Timestamp(((Calendar) value).getTimeInMillis()); } } if (this.numberType != null) { return ReflectHelper.convertNumber((Number) value, this.numberType); } if (this.enumType != null) { final Enum<?> enumValue = (Enum<?>) value; if (this.enumType == EnumType.ORDINAL) { return enumValue.ordinal(); } return enumValue.name(); } if (this.lob) { try { if (this.javaType == String.class) { return new SerialClob(((String) value).toCharArray()); } else if (this.javaType == char[].class) { return new SerialClob((char[]) value); } else if (this.javaType == byte[].class) { return new SerialBlob((byte[]) value); } else { final ByteArrayOutputStream os = new ByteArrayOutputStream(); final ObjectOutputStream oos = new ObjectOutputStream(os); try { oos.writeObject(value); } finally { oos.close(); } return new SerialBlob(os.toByteArray()); } } catch (final Exception e) { throw new PersistenceException("Cannot set parameter", e); } } else { return value; } }
From source file:org.batoo.jpa.jdbc.ValueConverter.java
/** * convert java objects to jdbc friendly * //from w w w . j ava 2s . c o m * @param value * jdbc raw value * @param javaType * type of value * @param temporalType * temporal type * @param enumType * enum type * @param lob * is Lob * @return jdbc friendly value * @since 2.0.1 */ public static Object toJdbc(Object value, Class<?> javaType, TemporalType temporalType, EnumType enumType, boolean lob) { if (value == null) { return null; } if (temporalType != null) { switch (temporalType) { case DATE: if (value instanceof java.sql.Date) { return value; } if (value instanceof Date) { return new java.sql.Date(((Date) value).getTime()); } return new java.sql.Date(((Calendar) value).getTimeInMillis()); case TIME: if (value instanceof java.sql.Time) { return value; } if (value instanceof Date) { return new java.sql.Time(((Date) value).getTime()); } return new java.sql.Time(((Calendar) value).getTimeInMillis()); case TIMESTAMP: if (value instanceof java.sql.Timestamp) { return value; } if (value instanceof Date) { return new java.sql.Timestamp(((Date) value).getTime()); } return new java.sql.Timestamp(((Calendar) value).getTimeInMillis()); } } if (Number.class.isAssignableFrom(javaType)) { return ReflectHelper.convertNumber((Number) value, javaType); } if (enumType != null) { final Enum<?> enumValue = (Enum<?>) value; if (enumType == EnumType.ORDINAL) { return enumValue.ordinal(); } return enumValue.name(); } if (lob) { try { if (javaType == String.class) { return new SerialClob(((String) value).toCharArray()); } else if (javaType == char[].class) { return new SerialClob((char[]) value); } else if (javaType == byte[].class) { return new SerialBlob((byte[]) value); } else { final ByteArrayOutputStream os = new ByteArrayOutputStream(); final ObjectOutputStream oos = new ObjectOutputStream(os); try { oos.writeObject(value); } finally { oos.close(); } return new SerialBlob(os.toByteArray()); } } catch (final Exception e) { throw new PersistenceException("Cannot set parameter", e); } } else { return value; } }
From source file:org.jumpmind.db.platform.postgresql.PostgreSqlDatabasePlatform.java
@Override public Object[] getObjectValues(BinaryEncoding encoding, String[] values, Column[] orderedMetaData, boolean useVariableDates, boolean fitToColumn) { Object[] objectValues = super.getObjectValues(encoding, values, orderedMetaData, useVariableDates, fitToColumn);// w ww . j a va 2 s . c o m for (int i = 0; i < orderedMetaData.length; i++) { if (orderedMetaData[i] != null && orderedMetaData[i].getMappedTypeCode() == Types.BLOB && objectValues[i] != null) { try { objectValues[i] = new SerialBlob((byte[]) objectValues[i]); } catch (Exception e) { throw new RuntimeException(e); } } } return objectValues; }