List of usage examples for org.joda.time DateTime getMonthOfYear
public int getMonthOfYear()
From source file:org.apereo.portal.portlets.statistics.BaseStatisticsReportController.java
License:Apache License
/** Build the aggregation {@link DataTable} */ protected final DataTable buildAggregationReport(F form) throws TypeMismatchException { //Pull data out of form for per-group fetching final AggregationInterval interval = form.getInterval(); final DateMidnight start = form.getStart(); final DateMidnight end = form.getEnd(); final DateTime startDateTime = start.toDateTime(); //Use a query end of the end date at 23:59:59 final DateTime endDateTime = end.plusDays(1).toDateTime().minusSeconds(1); //Get the list of DateTimes used on the X axis in the report final List<DateTime> reportTimes = this.intervalHelper.getIntervalStartDateTimesBetween(interval, startDateTime, endDateTime, maxIntervals); final Map<D, SortedSet<T>> groupedAggregations = createColumnDiscriminatorMap(form); //Determine the ValueType of the date/time column. Use the most specific column type possible final ValueType dateTimeColumnType; if (interval.isHasTimePart()) { //If start/end are the same day just display the time if (startDateTime.toDateMidnight().equals(endDateTime.toDateMidnight())) { dateTimeColumnType = ValueType.TIMEOFDAY; }//from w w w . j av a 2 s. c o m //interval has time data and start/end are on different days, show full date time else { dateTimeColumnType = ValueType.DATETIME; } } //interval is date only else { dateTimeColumnType = ValueType.DATE; } //Setup the date/time column description final ColumnDescription dateTimeColumn; switch (dateTimeColumnType) { case TIMEOFDAY: { dateTimeColumn = new ColumnDescription("time", dateTimeColumnType, "Time"); break; } default: { dateTimeColumn = new ColumnDescription("date", dateTimeColumnType, "Date"); } } final DataTable table = new JsonDataTable(); table.addColumn(dateTimeColumn); //Setup columns in the DataTable final Set<D> columnGroups = groupedAggregations.keySet(); for (final D columnMapping : columnGroups) { final Collection<ColumnDescription> columnDescriptions = this.getColumnDescriptions(columnMapping, form); table.addColumns(columnDescriptions); } //Query for all aggregation data in the time range for all groups. Only the //interval and discriminator data is used from the keys. final Set<K> keys = createAggregationsQueryKeyset(columnGroups, form); final BaseAggregationDao<T, K> baseAggregationDao = this.getBaseAggregationDao(); final Collection<T> aggregations = baseAggregationDao.getAggregations(startDateTime, endDateTime, keys, extractGroupsArray(columnGroups)); //Organize the results by group and sort them chronologically by adding them to the sorted set for (final T aggregation : aggregations) { final D discriminator = aggregation.getAggregationDiscriminator(); final SortedSet<T> results = groupedAggregations.get(discriminator); results.add(aggregation); } //Build Map from discriminator column mapping to result iterator to allow putting results into //the correct column AND the correct time slot in the column Comparator<? super D> comparator = getDiscriminatorComparator(); final Map<D, PeekingIterator<T>> groupedAggregationIterators = new TreeMap<D, PeekingIterator<T>>( (comparator)); for (final Entry<D, SortedSet<T>> groupedAggregationEntry : groupedAggregations.entrySet()) { groupedAggregationIterators.put(groupedAggregationEntry.getKey(), Iterators.peekingIterator(groupedAggregationEntry.getValue().iterator())); } /* * populate the data, filling in blank spots. The full list of interval DateTimes is used to create every row in the * query range. Then the iterator */ for (final DateTime rowTime : reportTimes) { // create the row final TableRow row = new TableRow(); // add the date to the first cell final Value dateTimeValue; switch (dateTimeColumnType) { case DATE: { dateTimeValue = new DateValue(rowTime.getYear(), rowTime.getMonthOfYear() - 1, rowTime.getDayOfMonth()); break; } case TIMEOFDAY: { dateTimeValue = new TimeOfDayValue(rowTime.getHourOfDay(), rowTime.getMinuteOfHour(), 0); break; } default: { dateTimeValue = new DateTimeValue(rowTime.getYear(), rowTime.getMonthOfYear() - 1, rowTime.getDayOfMonth(), rowTime.getHourOfDay(), rowTime.getMinuteOfHour(), 0, 0); break; } } row.addCell(new TableCell(dateTimeValue)); for (final PeekingIterator<T> groupedAggregationIteratorEntry : groupedAggregationIterators.values()) { List<Value> values = null; if (groupedAggregationIteratorEntry.hasNext()) { final T aggr = groupedAggregationIteratorEntry.peek(); if (rowTime.equals(aggr.getDateTime())) { //Data is for the correct time slot, advance the iterator groupedAggregationIteratorEntry.next(); values = createRowValues(aggr, form); } } //Gap in the data, fill it in using a null aggregation if (values == null) { values = createRowValues(null, form); } //Add the values to the row for (final Value value : values) { row.addCell(value); } } table.addRow(row); } return table; }
From source file:org.ash.history.CalendarH.java
License:Open Source License
/** * Gets the end value of time selection * // w ww. j a va2 s . co m * @return the value */ long getEndSelectionPlus2659() { DateTime endDaySelect = new DateTime(getEndSelection()); DateTime endDaySelectPlus2659 = new DateTime(endDaySelect.getYear(), endDaySelect.getMonthOfYear(), endDaySelect.getDayOfMonth(), 23, 59, 59, 999); if (endDaySelectPlus2659.getMillis() < getEndBDB()) { return endDaySelectPlus2659.getMillis(); } else { return getEndBDB(); } }
From source file:org.ash.history.CalendarH.java
License:Open Source License
/** * Get fragged days for Calendar./*from w w w . j a v a 2 s . c om*/ * * @param begin0 * @param end0 * @return */ private long[] getFraggedDays(long begin0, long end0) { DateTime begin = new DateTime(begin0); DateTime end = new DateTime(end0); DateTime beginTmp = new DateTime(begin); DateTime beginDayPlus0000 = new DateTime(beginTmp.getYear(), beginTmp.getMonthOfYear(), beginTmp.getDayOfMonth(), 0, 0, 0, 0); DateTime endTmp = new DateTime(end); DateTime endPlus2359 = new DateTime(endTmp.getYear(), endTmp.getMonthOfYear(), endTmp.getDayOfMonth(), 23, 59, 59, 999); Interval interval = new Interval(beginDayPlus0000, endPlus2359); Days days = Days.daysIn(interval); int daysBetween = days.getDays(); long[] flaggedDates = new long[daysBetween + 1]; DateTime tmp = new DateTime(begin); // Load days. for (int i = 0; i <= daysBetween; i++) { DateTime tmp1 = tmp.plusDays(i); flaggedDates[i] = tmp1.getMillis(); } return flaggedDates; }
From source file:org.assertj.jodatime.api.DateTimeAssert.java
License:Apache License
/** * Returns true if both datetime are in the same year and month, false otherwise. * /*from w w w .j av a 2 s . c o m*/ * @param actual the actual datetime. expected not be null * @param other the other datetime. expected not be null * @return true if both datetime are in the same year and month, false otherwise */ private static boolean haveSameYearAndMonth(DateTime actual, DateTime other) { return haveSameYear(actual, other) && actual.getMonthOfYear() == other.getMonthOfYear(); }
From source file:org.attribyte.wp.model.Site.java
License:Apache License
/** * Builds the permalink for a post from this site. * @param post The post./*from w w w . j a v a2 s . co m*/ * @return The permalink string. * @see <a href="https://codex.wordpress.org/Using_Permalinks">https://codex.wordpress.org/Using_Permalinks</a> */ public String buildPermalink(final Post post) { final String authorSlug = post.author != null ? Strings.nullToEmpty(post.author.slug) : ""; final List<TaxonomyTerm> categories = post.categories(); final Term categoryTerm = categories.size() > 0 ? categories.get(0).term : defaultCategory; final String category = categoryTerm != null ? categoryTerm.slug : ""; final String post_id = Long.toString(post.id); final DateTime publishTime = new DateTime(post.publishTimestamp); final String year = Integer.toString(publishTime.getYear()); final String monthnum = String.format("%02d", publishTime.getMonthOfYear()); final String day = String.format("%02d", publishTime.getDayOfMonth()); final String hour = String.format("%02d", publishTime.getHourOfDay()); final String minute = String.format("%02d", publishTime.getMinuteOfHour()); final String second = String.format("%02d", publishTime.getSecondOfMinute()); final String path = permalinkStructure.replace("%year%", year).replace("%monthnum%", monthnum) .replace("%day%", day).replace("%hour%", hour).replace("%minute%", minute) .replace("%second%", second).replace("%post_id%", post_id).replace("%postname%", post.slug) .replace("%category%", category).replace("%author%", authorSlug); return baseURL + path; }
From source file:org.conqat.engine.bugzilla.lib.Bug.java
License:Apache License
/** Get milliseconds of an enumeration field that is holding a date. */ public long getMilliSeconds(EBugzillaField field) { // TODO (BH): Why variable here? long milliSeconds = 0; // TODO (BH): I would invert the condition and return/throw here to // reduce the nesting. if (fields.get(field) != null) { // TODO (BH): Why store value and overwrite in next line? You could // also move this outside of the if and use the variable in the if // expression. String bugzillaDate = StringUtils.EMPTY_STRING; bugzillaDate = fields.get(field); // TODO (BH): Make constants from these pattern Pattern todayPattern = Pattern.compile("[0-9]{2}:[0-9]{2}:[0-9]{2}"); Pattern lastWeekPattern = Pattern.compile("[A-Z][a-z][a-z] [0-9]{2}:[0-9]{2}"); Pattern anyDatePattern = Pattern.compile("[0-9]{4}-[0-9]{2}-[0-9]{2}"); // TODO (BH): Variables only used once. Inline? Matcher todayMatcher = todayPattern.matcher(bugzillaDate); Matcher lastWeekMatcher = lastWeekPattern.matcher(bugzillaDate); Matcher anyDateMatcher = anyDatePattern.matcher(bugzillaDate); if (anyDateMatcher.matches()) { // TODO (BH): Make this a constant? DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd"); // TODO (BH): Directly return? milliSeconds = dateTimeFormatter.parseDateTime(bugzillaDate).getMillis(); } else if (lastWeekMatcher.matches()) { DateTime lastWeekDate = new DateTime(Chronic.parse(bugzillaDate).getBeginCalendar().getTime()); // Since jchronic parses the Bugzilla format exactly seven days // to late, we need to subtract those 7 days. // TODO (BH): Directly return? milliSeconds = lastWeekDate.minusDays(7).getMillis(); } else if (todayMatcher.matches()) { DateTime todayDate = new DateTime(); // TODO (BH): Make this a constant? DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern("HH:mm:ss"); DateTime fieldDate = dateTimeFormatter.parseDateTime(bugzillaDate); // TODO (BH): Directly return? milliSeconds = new DateTime(todayDate.getYear(), todayDate.getMonthOfYear(), todayDate.getDayOfMonth(), fieldDate.getHourOfDay(), fieldDate.getMinuteOfHour(), fieldDate.getSecondOfMinute()).getMillis(); } else {//from w w w . ja va2 s . co m // TODO (BH): I think this is not a good way of handling this // error as the argument might be valid, but the data is just // not good. Better use a checked exception, such as // ConQATException. throw new IllegalArgumentException("Field is not a Bugzilla date."); } } else { // TODO (BH): I think this is not a good way of handling this error // as the argument might be valid, but the data is just not present. // Better use a checked exception, such as ConQATException. throw new IllegalArgumentException("Argument is not a Bugzilla field."); } return milliSeconds; }
From source file:org.countandra.utils.CountandraUtils.java
License:Apache License
private void denormalizedIncrement(Mutator<String> m, String category, String ptimeDimensions, String denormalizedKey, long time, int value) { DateTime dt = new DateTime(time); DateTime dtm = new DateTime(dt.getYear(), dt.getMonthOfYear(), dt.getDayOfMonth(), dt.getHourOfDay(), dt.getMinuteOfHour());//from w w w.jav a2 s .c o m DateTime dtH = new DateTime(dt.getYear(), dt.getMonthOfYear(), dt.getDayOfMonth(), dt.getHourOfDay(), 0); DateTime dtD = new DateTime(dt.getYear(), dt.getMonthOfYear(), dt.getDayOfMonth(), 0, 0); DateTime dtM = new DateTime(dt.getYear(), dt.getMonthOfYear(), 1, 0, 0); DateTime dtY = new DateTime(dt.getYear(), 1, 1, 0, 0); String[] timeDimensions = ptimeDimensions.split(","); for (int i = 0; i < timeDimensions.length; i++) { switch (hshSupportedTimeDimensions.get(timeDimensions[i])) { case MINUTELY: incrementCounter(m, category, denormalizedKey, TimeDimension.MINUTELY.getSCode(), dtm.getMillis(), value); break; case HOURLY: incrementCounter(m, category, denormalizedKey, TimeDimension.HOURLY.getSCode(), dtH.getMillis(), value); break; case DAILY: incrementCounter(m, category, denormalizedKey, TimeDimension.DAILY.getSCode(), dtD.getMillis(), value); break; case MONTHLY: incrementCounter(m, category, denormalizedKey, TimeDimension.MONTHLY.getSCode(), dtM.getMillis(), value); break; case YEARLY: incrementCounter(m, category, denormalizedKey, TimeDimension.YEARLY.getSCode(), dtY.getMillis(), value); break; case ALLTIME: incrementCounter(m, category, denormalizedKey, TimeDimension.ALLTIME.getSCode(), 0L, value); break; } } }
From source file:org.dataconservancy.packaging.gui.presenter.impl.PackageMetadataPresenterImpl.java
License:Apache License
private void setExistingValues() { if (getController().getPackageState().hasPackageMetadataValues()) { view.clearAllFields();/*from w w w . j a v a 2 s. co m*/ if (!Util.isEmptyOrNull(getController().getPackageState().getPackageName())) { view.getPackageNameField().getPropertyInput() .setText(getController().getPackageState().getPackageName()); } if (getController().getPackageState() .getPackageMetadataValues(GeneralParameterNames.DOMAIN_PROFILE) != null && !getController().getPackageState() .getPackageMetadataValues(GeneralParameterNames.DOMAIN_PROFILE).isEmpty() && !Util.isEmptyOrNull(getController().getPackageState() .getPackageMetadataValues(GeneralParameterNames.DOMAIN_PROFILE).get(0))) { URI domainProfileURI = null; try { domainProfileURI = new URI(getController().getPackageState() .getPackageMetadataValues(GeneralParameterNames.DOMAIN_PROFILE).get(0)); } catch (URISyntaxException e) { view.getErrorLabel().setText(TextFactory.getText(ErrorKey.DOMAIN_PROFILE_PARSE_ERROR)); view.getErrorLabel().setVisible(true); view.scrollToTop(); } if (domainProfileURI != null) { for (Map.Entry<String, URI> idEntry : domainProfileIdMap.entrySet()) { if (idEntry.getValue().equals(domainProfileURI)) { view.getDomainProfilesComboBox().setValue(idEntry.getKey()); break; } } } } view.getAllDynamicFields().stream().filter( node -> getController().getPackageState().getPackageMetadataValues(node.getId()) != null) .forEach(node -> { if (node instanceof TextPropertyBox) { if (node.getUserData() == null || !((String) node.getUserData()).equalsIgnoreCase("repeatable")) { ((TextPropertyBox) node).getPropertyInput().setText(getController() .getPackageState().getPackageMetadataValues(node.getId()).get(0)); } } else if (node instanceof DatePropertyBox) { DateTime date = DateUtility.parseDateString(getController().getPackageState() .getPackageMetadataValues(node.getId()).get(0)); if (date != null) { ((DatePropertyBox) node).getPropertyInput().setValue( LocalDate.of(date.getYear(), date.getMonthOfYear(), date.getDayOfMonth())); } } else if (node instanceof VBox) { getController().getPackageState().getPackageMetadataValues(node.getId()).stream() .filter(value -> !Util.isEmptyOrNull(value)).forEach(value -> ((VBox) node) .getChildren().add(new RemovableLabel(value, (VBox) node))); } }); } }
From source file:org.dataconservancy.ui.services.CollectionActivityServiceImpl.java
License:Apache License
/** * returns creation activity for a collection * @param collection/*from w w w. j a v a 2 s . c o m*/ * @return activity */ private Activity retrieveCreationActivityForCollection(Collection collection) { DateTime depositDate = collection.getDepositDate(); Activity activity = new Activity(); Person actor = userService.get(collection.getDepositorId()); activity.setActor(actor); activity.setDateTimeOfOccurrence(new DateTime(depositDate.getYear(), depositDate.getMonthOfYear(), depositDate.getDayOfMonth(), depositDate.getHourOfDay(), depositDate.getMinuteOfHour(), depositDate.getSecondOfMinute())); activity.setType(Activity.Type.COLLECTION_DEPOSIT); activity.setCount(1); return activity; }
From source file:org.ecocean.servlet.importer.ImportExcelMetadata.java
License:Open Source License
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { //set up for response response.setContentType("text/html"); PrintWriter out = response.getWriter(); boolean locked = false; String context = "context0"; context = ServletUtilities.getContext(request); Shepherd myShepherd = new Shepherd(context); myShepherd.beginDBTransaction();// w w w.jav a 2 s . co m AssetStore assetStore = AssetStore.getDefault(myShepherd); myShepherd.commitDBTransaction(); System.out.println("\n\nStarting ImportExcelMetadata servlet..."); //setup data dir String rootWebappPath = getServletContext().getRealPath("/"); File webappsDir = new File(rootWebappPath).getParentFile(); File shepherdDataDir = new File(webappsDir, CommonConfiguration.getDataDirectoryName(context)); if (!shepherdDataDir.exists()) { shepherdDataDir.mkdirs(); } File tempSubdir = new File(webappsDir, "temp"); if (!tempSubdir.exists()) { tempSubdir.mkdirs(); } System.out.println("\n\n Finished directory creation..."); String fileName = "None"; StringBuffer messages = new StringBuffer(); boolean successfullyWroteFile = false; File finalFile = new File(tempSubdir, "temp.csv"); try { MultipartParser mp = new MultipartParser(request, (CommonConfiguration.getMaxMediaSizeInMegabytes(context) * 1048576)); Part part; while ((part = mp.readNextPart()) != null) { String name = part.getName(); if (part.isParam()) { // it's a parameter part ParamPart paramPart = (ParamPart) part; String value = paramPart.getStringValue(); } if (part.isFile()) { FilePart filePart = (FilePart) part; fileName = ServletUtilities.cleanFileName(filePart.getFileName()); if (fileName != null) { System.out.println("ImportExcelMetadata is trying to upload file " + fileName); //File thisSharkDir = new File(encountersDir.getAbsolutePath() +"/"+ encounterNumber); //if(!thisSharkDir.exists()){thisSharkDir.mkdirs();} finalFile = new File(tempSubdir, fileName); filePart.writeTo(finalFile); successfullyWroteFile = true; System.out.println("\n\nImportExcelMetadata successfully uploaded the file!"); } } } try { if (successfullyWroteFile) { System.out.println("\n\n Starting Excel Metadata content import"); //OK, we have our CSV file //let's import CSVReader reader = new CSVReader(new FileReader(finalFile)); List<String[]> allLines = reader.readAll(); System.out.println("\n\n Read in the file!"); //let's detect the size of this array by reading the number of header columns in row 0 String[] headerNames = allLines.get(0); int numColumns = headerNames.length; int numRows = allLines.size(); //determine the Occurrence_ID column as it is at the end int occurrenceIDColumnNumber = -1; for (int g = 0; g < numColumns; g++) { if (headerNames[g].equals("Occurrence_ID")) { occurrenceIDColumnNumber = g; } } for (int i = 1; i < numRows; i++) { System.out.println("\n\n Processing row " + i); boolean newEncounter = true; boolean newShark = true; String[] line = allLines.get(i); boolean ok2import = true; Encounter enc = new Encounter(); myShepherd.beginDBTransaction(); //line[0] is the sample_ID String encNumber = line[0].trim(); if ((encNumber != null) && (!encNumber.equals(""))) { if (myShepherd.isEncounter(encNumber)) { enc = myShepherd.getEncounter(encNumber); newEncounter = false; } else { enc.setCatalogNumber(encNumber); enc.setState("approved"); } } else { ok2import = false; messages.append("<li>Row " + i + ": could not find sample/encounter ID in the first column of row " + i + ".</li>"); System.out.println( " Could not find sample/encounter ID in the first column of row " + i + "."); } //line[1] is the IndividualID String individualID = line[1].trim(); if (individualID != null) { enc.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process set marked individual to " + individualID + ".</p>"); //enc.setIndividualID(individualID); System.out.println( " Setting Individual ID for row " + i + ". Value: " + individualID); } //line[2] is the latitude String latitude = line[2].trim(); if ((latitude != null) && (!latitude.equals(""))) { try { enc.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process set latitude to " + latitude + ".</p>"); Double lat = new Double(latitude); enc.setDecimalLatitude(lat); System.out.println( " Setting latitude for row " + i + ". Value: " + latitude); } catch (NumberFormatException nfe) { messages.append("<li>Row " + i + " for sample ID " + enc.getCatalogNumber() + ": Latitude hit a NumberFormatException in row " + i + " and could not be imported. The listed value was: " + latitude + "</li>"); } } //line[3] is the latitude String longitude = line[3].trim(); if ((longitude != null) && (!longitude.equals(""))) { try { enc.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process set longitude to " + longitude + ".</p>"); Double longie = new Double(longitude); enc.setDecimalLongitude(longie); System.out.println( " Setting longitude for row " + i + ". Value: " + longitude); } catch (NumberFormatException nfe) { nfe.printStackTrace(); messages.append("<li>Row " + i + " for sample ID " + enc.getCatalogNumber() + ": Longitude hit a NumberFormatException in row " + i + " and could not be imported. The listed value was: " + longitude + "</li>"); } } //line[4] is the date_time String isoDate = line[4].trim(); if ((isoDate != null) && (!isoDate.equals(""))) { StringTokenizer tks = new StringTokenizer(isoDate, "-"); int numTokens = tks.countTokens(); DateTimeFormatter parser2 = ISODateTimeFormat.dateTimeParser(); enc.setMonth(-1); enc.setDay(-1); enc.setYear(-1); enc.setHour(-1); enc.setMinutes("00"); try { DateTime time = parser2.parseDateTime(isoDate); enc.setYear(time.getYear()); if (numTokens >= 2) { enc.setMonth(time.getMonthOfYear()); } if (numTokens >= 3) { enc.setDay(time.getDayOfMonth()); } if (isoDate.indexOf("T") != -1) { int minutes = time.getMinuteOfHour(); String minutes2 = (new Integer(minutes)).toString(); if ((time.getHourOfDay() != 0) && (minutes != 0)) { enc.setHour(time.getHourOfDay()); if (isoDate.indexOf(":") != -1) { enc.setMinutes(minutes2); } } } enc.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process set date to " + enc.getDate() + ".</p>"); System.out.println(" Set date for encounter: " + enc.getDate()); } catch (IllegalArgumentException iae) { iae.printStackTrace(); messages.append("<li>Row " + i + ": could not import the date and time for row: " + i + ". Cancelling the import for this row.</li>"); ok2import = false; } } //line[5] get locationID String locationID = line[5].trim(); if (line.length >= 6) { if ((locationID != null) && (!locationID.equals(""))) { enc.setLocationID(locationID); enc.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process set location ID to " + locationID + ".</p>"); System.out.println( " Setting location ID for row " + i + ". Value: " + locationID); } } //line[6] get sex String sex = line[6].trim(); if (line.length >= 7) { if ((sex != null) && (!sex.equals(""))) { if (sex.equals("M")) { enc.setSex("male"); } else if (sex.equals("F")) { enc.setSex("female"); } else { enc.setSex("unknown"); } System.out.println(" Setting sex for row " + i + ". Value: " + sex); enc.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process set sex to " + enc.getSex() + ".</p>"); } //line[occurrenceIDColumnNumber] get Occurrence_ID Occurrence occur = new Occurrence(); if (occurrenceIDColumnNumber != -1) { String occurID = line[occurrenceIDColumnNumber]; if (myShepherd.isOccurrence(occurID)) { occur = myShepherd.getOccurrence(occurID); boolean isNew = occur.addEncounter(enc); if (isNew) { occur.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process added encounter " + enc.getCatalogNumber() + ".</p>"); } } else { occur = new Occurrence(occurID, enc); occur.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process added encounter " + enc.getCatalogNumber() + ".</p>"); myShepherd.getPM().makePersistent(occur); } } } if (ok2import) { System.out.println(" ok2import"); myShepherd.commitDBTransaction(); if (newEncounter) { myShepherd.storeNewEncounter(enc, enc.getCatalogNumber()); } //before proceeding with haplotype and loci importing, we need to create the tissue sample myShepherd.beginDBTransaction(); Encounter enc3 = myShepherd.getEncounter(encNumber); TissueSample ts = new TissueSample(encNumber, ("sample_" + encNumber)); if (myShepherd.isTissueSample(("sample_" + encNumber), encNumber)) { ts = myShepherd.getTissueSample(("sample_" + encNumber), encNumber); } else { myShepherd.getPM().makePersistent(ts); enc3.addTissueSample(ts); } System.out.println(" Added TissueSample."); //let's set genetic Sex if ((sex != null) && (!sex.equals(""))) { SexAnalysis sexDNA = new SexAnalysis( ("analysis_" + enc3.getCatalogNumber() + "_sex"), sex, enc3.getCatalogNumber(), ("sample_" + enc3.getCatalogNumber())); if (myShepherd.isGeneticAnalysis(ts.getSampleID(), encNumber, ("analysis_" + enc3.getCatalogNumber() + "_sex"), "SexAnalysis")) { sexDNA = myShepherd.getSexAnalysis(ts.getSampleID(), encNumber, ("analysis_" + enc3.getCatalogNumber() + "_sex")); sexDNA.setSex(sex); } else { ts.addGeneticAnalysis(sexDNA); myShepherd.getPM().makePersistent(sexDNA); } enc3.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br />" + "Import SRGD process added or updated genetic sex analysis " + sexDNA.getAnalysisID() + " for tissue sample " + ts.getSampleID() + ".<br />" + sexDNA.getHTMLString()); } System.out.println(" Added genetic sex."); //line[7] get haplotype if (line.length >= 8) { String haplo = line[7].trim(); if ((haplo != null) && (!haplo.equals(""))) { //TBD check id this analysis already exists System.out.println(" Starting haplotype."); MitochondrialDNAAnalysis mtDNA = new MitochondrialDNAAnalysis( ("analysis_" + enc3.getCatalogNumber()), haplo, enc3.getCatalogNumber(), ("sample_" + enc3.getCatalogNumber())); if (myShepherd.isGeneticAnalysis(ts.getSampleID(), encNumber, ("analysis_" + enc3.getCatalogNumber()), "MitochondrialDNA")) { mtDNA = myShepherd.getMitochondrialDNAAnalysis(ts.getSampleID(), encNumber, ("analysis_" + enc3.getCatalogNumber())); mtDNA.setHaplotype(haplo); System.out.println(" Haplotype reset."); } else { ts.addGeneticAnalysis(mtDNA); myShepherd.getPM().makePersistent(mtDNA); System.out.println(" Added new haplotype."); } enc3.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br />" + "Import SRGD process added or updated mitochondrial DNA analysis (haplotype) " + mtDNA.getAnalysisID() + " for tissue sample " + ts.getSampleID() + ".<br />" + mtDNA.getHTMLString()); System.out.println(" Added haplotype."); } else { System.out.println(" Did NOT add haplotype."); } } ArrayList<Locus> loci = new ArrayList<Locus>(); //loci value import if (line.length >= 9) { for (int f = 8; f < numColumns; f++) { if (line.length > (f + 2)) { String l1 = line[f].trim(); String l2 = line[f + 1].trim(); String locusName = headerNames[f].replaceAll("L_", ""); System.out.println(" Loaded loci name."); //verify that we're looking at the right loci and everything matches up nicely if ((l1 != null) && (l2 != null) && (!l1.equals("")) && (!l2.equals("")) && (!locusName.equals("")) && (headerNames[f].trim().toLowerCase().startsWith("l_")) && (headerNames[f + 1].trim().toLowerCase().startsWith("l_")) && (headerNames[f].trim().toLowerCase() .equals(headerNames[f + 1].trim().toLowerCase()))) { //get allele values Integer intA = new Integer(l1); Integer intB = new Integer(l2); Locus myLocus = new Locus(locusName, intA, intB); loci.add(myLocus); } f++; } } } //TBD check if this analysis already exists if (loci.size() > 0) { System.out.println(" Found msMarkers!!!!!!!!!!!!1"); MicrosatelliteMarkersAnalysis microAnalysis = new MicrosatelliteMarkersAnalysis( (ts.getSampleID() + "_msMarkerAnalysis"), ts.getSampleID(), enc.getCatalogNumber(), loci); if (myShepherd.isGeneticAnalysis(ts.getSampleID(), encNumber, (ts.getSampleID() + "_msMarkerAnalysis"), "MicrosatelliteMarkers")) { microAnalysis = myShepherd.getMicrosatelliteMarkersAnalysis(ts.getSampleID(), encNumber, (ts.getSampleID() + "_msMarkerAnalysis")); microAnalysis.setLoci(loci); } else { ts.addGeneticAnalysis(microAnalysis); myShepherd.getPM().makePersistent(microAnalysis); } System.out.println(" Added ms markers."); enc3.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br />" + "Import SRGD process added or updated microsatellite markers of analysis " + microAnalysis.getAnalysisID() + " for tissue sample " + ts.getSampleID() + ".<br />" + microAnalysis.getHTMLString()); } myShepherd.commitDBTransaction(); if (!individualID.equals("")) { MarkedIndividual indie = new MarkedIndividual(); myShepherd.beginDBTransaction(); Encounter enc2 = myShepherd.getEncounter(encNumber); if (myShepherd.isMarkedIndividual(individualID)) { indie = myShepherd.getMarkedIndividual(individualID); newShark = false; } else { //indie.setIndividualID(individualID); FIXME !!! } //OK to generically add it as the addEncounter() method will ignore it if already added to marked individual indie.addEncounter(enc2); if ((indie.getSex() == null) || ((enc2.getSex() != null) && (indie.getSex() != enc2.getSex()))) { indie.setSex(enc2.getSex()); indie.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process set sex to " + enc2.getSex() + ".</p>"); } if ((indie.getHaplotype() == null) && (enc2.getHaplotype() != null)) { indie.doNotSetLocalHaplotypeReflection(enc2.getHaplotype()); } indie.refreshDependentProperties(); indie.addComments("<p><em>" + request.getRemoteUser() + " on " + (new java.util.Date()).toString() + "</em><br>" + "Import SRGD process added encounter " + enc2.getCatalogNumber() + ".</p>"); myShepherd.commitDBTransaction(); if (newShark) { myShepherd.storeNewMarkedIndividual(indie); } } } else { myShepherd.rollbackDBTransaction(); } //out.println("Imported row: "+line); } } else { locked = true; System.out.println("ImportSRGD: For some reason the import failed without exception."); } } catch (Exception le) { locked = true; myShepherd.rollbackDBTransaction(); myShepherd.closeDBTransaction(); le.printStackTrace(); } if (!locked) { myShepherd.commitDBTransaction(); myShepherd.closeDBTransaction(); out.println(ServletUtilities.getHeader(request)); out.println( "<p><strong>Success!</strong> I have successfully uploaded and imported your SRGD CSV file.</p>"); if (messages.toString().equals("")) { messages.append("None"); } out.println("<p>The following error messages were reported during the import process:<br /><ul>" + messages + "</ul></p>"); out.println("<p><a href=\"appadmin/import.jsp\">Return to the import page</a></p>"); out.println(ServletUtilities.getFooter(context)); } } catch (IOException lEx) { lEx.printStackTrace(); out.println(ServletUtilities.getHeader(request)); out.println( "<strong>Error:</strong> I was unable to upload your SRGD CSV. Please contact the webmaster about this message."); out.println(ServletUtilities.getFooter(context)); } catch (NullPointerException npe) { npe.printStackTrace(); out.println(ServletUtilities.getHeader(request)); out.println("<strong>Error:</strong> I was unable to import SRGD data as no file was specified."); out.println(ServletUtilities.getFooter(context)); } out.close(); }