Example usage for org.joda.time Period Period

List of usage examples for org.joda.time Period Period

Introduction

In this page you can find the example usage for org.joda.time Period Period.

Prototype

private Period(int[] values, PeriodType type) 

Source Link

Document

Constructor used when we trust ourselves.

Usage

From source file:jiajiechen.countdown.CountDownGUI.java

License:Open Source License

private void doRefresh() {
    DateTime d = new DateTime();
    time.setText(DateTimeFormat.longDateTime().print(d));
    // TableRowModel<String> lm = new DefaultListModel<String>();
    DefaultTableModel tableModel = (DefaultTableModel) table.getModel();
    while (tableModel.getRowCount() < countdown.size()) {
        final Object rows[][] = { { "", "" } };
        tableModel.addRow(rows);//from  w w  w.  ja v  a 2s .  c om
    }
    Integer row = 0;
    for (Entry<String, DateTime> e : countdown.entrySet()) {
        DateTime itemdate = e.getValue();
        Period period = new Period(d, itemdate);
        PeriodFormatter pf = new PeriodFormatterBuilder().appendYears().appendSuffix("y ").appendMonths()
                .appendSuffix("m ").appendWeeks().appendSuffix("w ").appendDays().appendSuffix("d ")
                .appendHours().appendSuffix("h ").appendMinutes().appendSuffix("m ").appendSeconds()
                .appendSuffix("s ").toFormatter();
        tableModel.setValueAt(e.getKey(), row, 0);
        tableModel.setValueAt(pf.print(period), row, 1);
        row++;
    }

}

From source file:jongo.rest.xstream.Usage.java

License:Open Source License

/**
 * Calculates the time Jongo has been running and returns a string representing it, i.e. 2 days 10 hours...
 * @return a string with the uptime./*w w w .  ja  va 2s  .c om*/
 */
public String getUptime() {
    Period period = new Period(this.start, new DateTime());
    return PeriodFormat.getDefault().print(period);
}

From source file:mobi.daytoday.DayToDay.DateWrap.java

License:Apache License

/**
 * Find the difference between the two dates and express it in natural
 * language//w  w  w .j  a  va  2s .c o  m
 * 
 * @param dateOne
 *          - date in DATE_FORMAT format
 * @param dateTwo
 *          - date in DATE_FORMAT format
 * @return time between the two given dates in natural terms
 * @throws Exception
 *           - if there is any error
 */
public static String naturalInterval(String dateOne, String dateTwo) throws Exception {
    DateTime firstDate = dtForm.parseDateTime(dateOne);
    DateTime secondDate = dtForm.parseDateTime(dateTwo);

    Period period = new Period(firstDate, secondDate);

    PeriodFormatter formatter = new PeriodFormatterBuilder().appendYears().appendSuffix(" year ", " years ")
            .appendMonths().appendSuffix(" month ", " months ").appendWeeks().appendSuffix(" week ", " weeks ")
            .appendDays().appendSuffix(" day", " days").printZeroNever().toFormatter();

    if (formatter.print(period).matches(".*-.*")) {
        return formatter.print(period.negated());
    }

    return formatter.print(period);
}

From source file:net.longfalcon.newsj.Backfill.java

License:Open Source License

private void backfillGroup(NewsClient nntpClient, Group group) {
    System.out.println("Processing  " + group.getName());
    try {//  ww w .j  av a2 s .  c om
        long startLoop = System.currentTimeMillis();
        NewsgroupInfo newsgroupInfo = new NewsgroupInfo();
        boolean exists = nntpClient.selectNewsgroup(group.getName(), newsgroupInfo);
        if (!exists) {
            System.out.println("Could not select group (bad name?): " + group.getName());
            return;
        }

        int backfillTarget = group.getBackfillTarget();
        long targetPost = dayToPost(nntpClient, group, backfillTarget, true);
        long localFirstRecord = group.getFirstRecord();
        long localLastRecord = group.getLastRecord();
        if (localFirstRecord == 0 || localLastRecord == 0) {
            _log.warn("Group " + group.getName()
                    + " has invalid numbers.  Have you run update on it?  Have you set the backfill days amount?");
            return;
        }
        Period daysServerHasPeriod = new Period(
                postDate(nntpClient, newsgroupInfo.getFirstArticleLong(), false),
                postDate(nntpClient, newsgroupInfo.getLastArticleLong(), false));
        Period localDaysPeriod = new Period(postDate(nntpClient, localFirstRecord, false), new DateTime());
        _log.info(String.format(
                "Group %s: server has %s - %s, or %s.\nLocal first = %s (%s). Backfill target of %s days is post %s",
                newsgroupInfo.getNewsgroup(), newsgroupInfo.getFirstArticleLong(),
                newsgroupInfo.getLastArticleLong(), _periodFormatter.print(daysServerHasPeriod),
                localFirstRecord, _periodFormatter.print(localDaysPeriod), backfillTarget, targetPost));

        if (targetPost >= localFirstRecord) { //if our estimate comes back with stuff we already have, finish
            _log.info("Nothing to do, we already have the target post.");
            return;
        }
        //get first and last part numbers from newsgroup
        if (targetPost < newsgroupInfo.getFirstArticleLong()) {
            _log.warn(
                    "WARNING: Backfill came back as before server's first.  Setting targetpost to server first.");
            targetPost = newsgroupInfo.getFirstArticleLong();
        }
        //calculate total number of parts
        long total = localFirstRecord - targetPost;
        boolean done = false;
        //set first and last, moving the window by maxxMssgs
        long last = localFirstRecord - 1L;
        long first = last - FetchBinaries.MESSAGE_BUFFER + 1L; //set initial "chunk"
        if (targetPost > first) {
            first = targetPost;
        }
        while (!done) {
            TransactionStatus transaction = transactionManager.getTransaction(
                    new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));
            _log.info(String.format("Getting %s parts (%s in queue)", last - first + 1, first - targetPost));
            fetchBinaries.scan(nntpClient, group, first, last, "backfill", false); // TODO add support for compressed headers

            group.setFirstRecord(first);
            group.setLastUpdated(new Date());
            groupDAO.update(group);
            if (first == targetPost) {
                done = true;
            } else {
                //Keep going: set new last, new first, check for last chunk.
                last = first - 1;
                first = last - FetchBinaries.MESSAGE_BUFFER + 1;
                if (targetPost > first) {
                    first = targetPost;
                }
            }
            transactionManager.commit(transaction);
        }
        DateTime firstRecordPostDate = postDate(nntpClient, first, false);

        Date firstRecordPostDateDate = null;
        if (firstRecordPostDate != null) {
            firstRecordPostDateDate = firstRecordPostDate.toDate();
        }
        group.setFirstRecordPostdate(firstRecordPostDateDate);
        group.setLastUpdated(new Date());
        groupDAO.update(group);

        Period groupTime = new Period(startLoop, System.currentTimeMillis());
        _log.info("Group processed in " + _periodFormatter.print(groupTime));
    } catch (Exception e) {
        _log.error(e, e);
    }
}

From source file:net.longfalcon.newsj.Backfill.java

License:Open Source License

private int daysOld(DateTime then) {
    DateTime now = DateTime.now();//from   www  .j  av a2 s  . c o m
    if (then == null || then.isAfter(now)) {
        return 0;
    } else {
        Period period = new Period(then, now);
        return period.getDays();
    }
}

From source file:net.longfalcon.newsj.Binaries.java

License:Open Source License

public void updateGroup(NewsClient nntpClient, Group group) {
    /*// w ww. j ava 2  s .com
    * */
    System.out.println("Processing  " + group.getName());
    try {
        NewsgroupInfo newsgroupInfo = new NewsgroupInfo();
        boolean exists = nntpClient.selectNewsgroup(group.getName(), newsgroupInfo);
        if (!exists) {
            System.out.println("Could not select group (bad name?): " + group.getName());
            return;
        }

        //Attempt to repair any missing parts before grabbing new ones
        partRepair(nntpClient, group);

        //Get first and last part numbers from newsgroup
        long firstArticle = newsgroupInfo.getFirstArticleLong();
        long lastArticle = newsgroupInfo.getLastArticleLong();
        long groupLastArticle = lastArticle; // this is to hold the true last article in the group

        // For new newsgroups - determine here how far you want to go back.
        if (group.getLastRecord() == 0) {
            if (newGroupScanByDays) {
                firstArticle = backfill.dayToPost(nntpClient, group, newGroupDaysToScan, true);
                if (firstArticle == 0) {
                    _log.warn("Skipping group: " + group.getName());
                    return;
                }
            } else {
                if (firstArticle > (lastArticle - newGroupMsgsToScan)) {
                    //firstArticle = firstArticle; //????
                } else {
                    firstArticle = lastArticle - newGroupMsgsToScan;
                }
            }
            DateTime firstRecordPostDate = backfill.postDate(nntpClient, firstArticle, false);
            // update group record
            group.setFirstRecord(firstArticle);
            group.setFirstRecordPostdate(firstRecordPostDate.toDate());
            updateGroupModel(group);
        } else {
            firstArticle = group.getLastRecord() + 1;
        }

        // Generate postdates for first and last records, for those that upgraded
        if ((group.getFirstRecordPostdate() == null || group.getLastRecordPostdate() == null)
                && (group.getLastRecord() != 0 && group.getFirstRecord() != 0)) {
            DateTime groupFirstPostDate = backfill.postDate(nntpClient, group.getFirstRecord(), false);
            group.setFirstRecordPostdate(groupFirstPostDate.toDate());
            DateTime groupLastPostDate = backfill.postDate(nntpClient, group.getLastRecord(), false);
            group.setLastRecordPostdate(groupLastPostDate.toDate());
            updateGroupModel(group);
        }

        // Deactivate empty groups
        if ((lastArticle - firstArticle) <= 5) {
            /*group.setActive(false);
            group.setLastUpdated(new Date());
            updateGroupModel(group);*/
            // todo: enable when "empty group" is more clearly understood
        }

        // Calculate total number of parts
        long totalParts = groupLastArticle - firstArticle + 1;

        // If total is bigger than 0 it means we have new parts in the newsgroup
        if (totalParts > 0) {
            _log.info(String.format("Group %s has %d new parts.", group.getName(), totalParts));
            _log.info(String.format("First: %d Last: %d Local last: %d", firstArticle, lastArticle,
                    group.getLastRecord()));
            if (group.getLastRecord() == 0) {
                _log.info("New group starting with "
                        + (newGroupScanByDays ? newGroupDaysToScan + " days" : newGroupMsgsToScan + " messages")
                        + " worth.");
            }

            boolean done = false;

            long startLoopTime = System.currentTimeMillis();
            while (!done) {

                if (totalParts > messageBuffer) {
                    if (firstArticle + messageBuffer > groupLastArticle) {
                        lastArticle = groupLastArticle;
                    } else {
                        lastArticle = firstArticle + messageBuffer;
                    }
                }

                _log.info(String.format("Getting %d parts (%d to %d) - %d in queue",
                        lastArticle - firstArticle + 1, firstArticle, lastArticle,
                        groupLastArticle - lastArticle));

                //get headers from newsgroup
                long lastId = 0;
                try {
                    lastId = fetchBinaries.scan(nntpClient, group, firstArticle, lastArticle, "update",
                            compressedHeaders); // magic string
                } catch (Exception e) {
                    _log.error(e.toString(), e);
                }
                if (lastId == 0) {
                    // scan failed - skip group
                    return;
                }

                group.setLastRecord(lastArticle);
                group.setLastUpdated(new Date());
                updateGroupModel(group);

                if (lastArticle == groupLastArticle) {
                    done = true;
                } else {
                    lastArticle = lastId;
                    firstArticle = lastArticle + 1;
                }

            }

            DateTime lastRecordPostDate = backfill.postDate(nntpClient, lastArticle, false);
            // DEBUG REMOVE
            if (lastRecordPostDate == null) {
                _log.error("retrying backfill.postDate(nntpClient, " + lastArticle + ", true)");
                lastRecordPostDate = backfill.postDate(nntpClient, lastArticle, true);
                if (lastRecordPostDate == null) {
                    lastRecordPostDate = new DateTime();
                }
            }
            group.setLastRecordPostdate(lastRecordPostDate.toDate());
            group.setLastUpdated(new Date());
            updateGroupModel(group);
            Period loopTime = new Period(startLoopTime, System.currentTimeMillis());
            _log.info(String.format("Group processed in %s seconds", _periodFormatter.print(loopTime)));
        }
    } catch (IOException e) {
        _log.error(e.toString(), e);
    }

}

From source file:net.longfalcon.newsj.Binaries.java

License:Open Source License

@Transactional(propagation = Propagation.REQUIRES_NEW)
private void partRepair(NewsClient nntpClient, Group group) throws IOException {
    List<PartRepair> partRepairList = partRepairDAO.findByGroupIdAndAttempts(group.getId(), 5, true);
    long partsRepaired = 0;
    long partsFailed = 0;

    int partRepairListSize = partRepairList.size();
    if (partRepairListSize > 0) {
        _log.info("Attempting to repair " + partRepairListSize + " parts...");

        //loop through each part to group into ranges
        Map<Long, Long> ranges = new LinkedHashMap<>();
        long lastNum = 0;
        long lastPart = 0;
        for (PartRepair partRepair : partRepairList) {
            long partRepairNumberId = partRepair.getNumberId();
            if ((lastNum + 1) == partRepairNumberId) {
                ranges.put(lastPart, partRepairNumberId);
            } else {
                lastPart = partRepairNumberId;
                ranges.put(lastPart, partRepairNumberId);
            }//from   w w  w. ja  v a 2 s  . c o  m
            lastNum = partRepairNumberId;
        }

        //download missing parts in ranges
        long startLoopTime = System.currentTimeMillis();
        for (Map.Entry<Long, Long> entry : ranges.entrySet()) {

            long partFrom = entry.getKey();
            long partTo = entry.getValue();

            _log.info("repairing " + partFrom + " to " + partTo);

            //get article from newsgroup
            fetchBinaries.scan(nntpClient, group, partFrom, partTo, "partrepair", compressedHeaders);

            //check if the articles were added
            List<Long> articlesRange = ArrayUtil.rangeList(partFrom, partTo);
            // This complete clusterf*ck is due to the Part table lacking a groupId column.
            // TODO: add a groupId column to Part table!!!!
            List<PartRepair> partRepairs = partRepairDAO.findByGroupIdAndNumbers(group.getId(), articlesRange);
            for (PartRepair partRepair : partRepairs) {
                List<Part> partList = partDAO.findByNumberAndGroupId(partRepair.getNumberId(), group.getId());
                Part part = partList.isEmpty() ? null : partList.get(0);
                if (part != null && partRepair.getNumberId() == part.getNumber()) {
                    partsRepaired++;

                    //article was added, delete from partrepair
                    // May need to be stored for later to prevent modification
                    _log.info("part " + part.getNumber() + " successfully added");
                    partRepairDAO.deletePartRepair(partRepair);
                } else {
                    partsFailed++;

                    //article was not added, increment attempts
                    int attempts = partRepair.getAttempts();
                    partRepair.setAttempts(attempts + 1);
                    _log.info("part " + partRepair.getNumberId() + " was not added");
                    partRepairDAO.updatePartRepair(partRepair);
                }
            }
        }
        Period repairLoopTime = new Period(startLoopTime, System.currentTimeMillis());
        _log.info(partsRepaired + " parts repaired.");
        _log.info("repair took " + _periodFormatter.print(repairLoopTime));
    }

    //remove articles that we cant fetch after 5 attempts
    //change to HQL Delete?
    List<PartRepair> partRepairsToDelete = partRepairDAO.findByGroupIdAndAttempts(group.getId(), 5, false);
    for (PartRepair partRepair : partRepairsToDelete) {
        partRepairDAO.deletePartRepair(partRepair);
    }
}

From source file:net.longfalcon.newsj.CategoryService.java

License:Open Source License

public int determineCategory(long groupId, final String releaseName) {
    tmpCat = 0;/*  w w w .j  av  a2s .  com*/
    long startTime = System.currentTimeMillis();
    Group group = groupDAO.findGroupByGroupId(groupId);
    String groupName = group.getName();
    //
    // Try and determine based on group - First Pass
    //
    Pattern pattern = Pattern.compile("alt\\.binaries\\.ath", Pattern.CASE_INSENSITIVE);
    Matcher matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isConsole(releaseName)) {
            return tmpCat;
        }
        if (isPC(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
        if (isMusic(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("alt\\.binaries\\.b4e", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isPC(releaseName)) {
            return tmpCat;
        }
        if (isEBook(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("anime", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find())
        return CAT_MISC_ANIME;

    pattern = Pattern.compile("alt\\.binaries\\..*?audiobook\\.*?", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find())
        return CAT_MUSIC_AUDIOBOOK;

    pattern = Pattern.compile("lossless|flac", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_MUSIC_LOSSLESS;
    }

    pattern = Pattern.compile("alt\\.binaries\\.sounds.*?|alt\\.binaries\\.mp3.*?|alt\\.binaries\\..*\\.mp3",
            Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isMusic(releaseName)) {
            return tmpCat;
        }
        return CAT_MUSIC_MP3;
    }

    pattern = Pattern.compile("alt\\.binaries\\.console\\.ps3", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find())
        return CAT_GAME_PS3;

    pattern = Pattern.compile("alt\\.binaries\\.games\\.xbox*", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isConsole(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("alt\\.binaries\\.games", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isConsole(releaseName)) {
            return tmpCat;
        }
        return CAT_PC_GAMES;
    }

    pattern = Pattern.compile("alt\\.binaries\\.games\\.wii", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isConsole(releaseName)) {
            return tmpCat;
        }
        return CAT_GAME_WII;
    }

    pattern = Pattern.compile("alt\\.binaries\\.dvd.*?", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("alt\\.binaries\\.hdtv*|alt\\.binaries\\.x264", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("alt\\.binaries\\.classic\\.tv.*?", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_TV_SD;
    }

    pattern = Pattern.compile("alt\\.binaries\\.e-book*?", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_MISC_EBOOK;
    }

    pattern = Pattern.compile("alt\\.binaries\\.comics.*?", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_MISC_COMICS;
    }

    pattern = Pattern.compile("alt\\.binaries\\.cores.*?", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
        if (isConsole(releaseName)) {
            return tmpCat;
        }
        if (isPC(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("alt\\.binaries\\.cd.image|alt\\.binaries\\.audio\\.warez",
            Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isPC(releaseName)) {
            return tmpCat;
        }
        return CAT_PC_0DAY;
    }

    pattern = Pattern.compile("alt\\.binaries\\.sony\\.psp", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find())
        return CAT_GAME_PSP;

    pattern = Pattern.compile("alt\\.binaries\\.nintendo\\.ds|alt\\.binaries\\.games\\.nintendods",
            Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_GAME_NDS;
    }

    pattern = Pattern.compile("alt\\.binaries\\.mpeg\\.video\\.music", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_MUSIC_VIDEO;
    }

    pattern = Pattern.compile("alt\\.binaries\\.mac", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_PC_MAC;
    }

    pattern = Pattern.compile("linux", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_PC_ISO;
    }

    pattern = Pattern.compile("alt\\.binaries\\.ipod\\.videos\\.tvshows", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find())
        return CAT_TV_OTHER;

    pattern = Pattern.compile("alt\\.binaries\\.documentaries", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        return CAT_TV_SD;
    }

    pattern = Pattern.compile("alt\\.binaries\\.tv\\.swedish", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_TV_FOREIGN;
    }

    pattern = Pattern.compile("alt\\.binaries\\.erotica\\.divx", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_XXX_XVID;
    }

    pattern = Pattern.compile("alt\\.binaries\\.mma|alt\\.binaries\\.multimedia\\.sports.*?",
            Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        return CAT_TV_SPORT;
    }

    pattern = Pattern.compile("alt\\.binaries\\.b4e$", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isPC(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("alt\\.binaries\\.warez\\.smartphone", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find())
        if (isPC(releaseName)) {
            return tmpCat;
        }

    pattern = Pattern.compile("alt\\.binaries\\.warez\\.ibm\\-pc\\.0\\-day|alt\\.binaries\\.warez",
            Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isConsole(releaseName)) {
            return tmpCat;
        }
        if (isEBook(releaseName)) {
            return tmpCat;
        }
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isPC(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        return CAT_PC_0DAY;
    }

    pattern = Pattern.compile("alt\\.binaries\\.(teevee|multimedia|tv|tvseries)", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        if (isForeignTV(releaseName)) {
            return tmpCat;
        }
        return CAT_TV_OTHER;
    }

    pattern = Pattern.compile("erotica", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        return CAT_XXX_XVID;
    }

    pattern = Pattern.compile(
            "alt\\.binaries\\.movies\\.xvid|alt\\.binaries\\.movies\\.divx|alt\\.binaries\\.movies",
            Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isConsole(releaseName)) {
            return tmpCat;
        }
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
        return CAT_MOVIE_SD;
    }

    pattern = Pattern.compile("wmvhd", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
    }

    pattern = Pattern.compile("inner\\-sanctum", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isPC(releaseName)) {
            return tmpCat;
        }
        if (isEBook(releaseName)) {
            return tmpCat;
        }
        return CAT_MUSIC_MP3;
    }

    pattern = Pattern.compile("alt\\.binaries\\.x264", Pattern.CASE_INSENSITIVE);
    matcher = pattern.matcher(groupName);
    if (matcher.find()) {
        if (isXxx(releaseName)) {
            return tmpCat;
        }
        if (isTV(releaseName)) {
            return tmpCat;
        }
        if (isMovie(releaseName)) {
            return tmpCat;
        }
        return CAT_MOVIE_OTHER;
    }

    //
    // if a category hasnt been set yet, then try against all
    // functions and if still nothing, return Cat Misc.
    //
    if (isXxx(releaseName)) {
        return tmpCat;
    }
    if (isPC(releaseName)) {
        return tmpCat;
    }
    if (isTV(releaseName)) {
        return tmpCat;
    }
    if (isMovie(releaseName)) {
        return tmpCat;
    }
    if (isConsole(releaseName)) {
        return tmpCat;
    }
    if (isMusic(releaseName)) {
        return tmpCat;
    }
    if (isEBook(releaseName)) {
        return tmpCat;
    }

    Period determineCatTime = new Period(startTime, System.currentTimeMillis());
    _log.info("Determining category for " + releaseName + " took " + _periodFormatter.print(determineCatTime));

    return CAT_MISC;
}

From source file:net.longfalcon.newsj.FetchBinaries.java

License:Open Source License

@Transactional(propagation = Propagation.REQUIRED, isolation = Isolation.READ_COMMITTED)
public long scan(NewsClient nntpClient, Group group, long firstArticle, long lastArticle, String type,
        boolean compressedHeaders) throws IOException {
    // this is a hack - tx is not working ATM
    TransactionStatus transaction = transactionManager
            .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));

    long startHeadersTime = System.currentTimeMillis();

    long maxNum = 0;
    Map<String, Message> messages = new LinkedHashMap<>(MESSAGE_BUFFER + 1);

    Iterable<NewsArticle> articlesIterable = null;
    try {//from w  ww.  j  ava 2  s.c  om
        if (compressedHeaders) {
            _log.warn("Compressed Headers setting not currently functional");
            articlesIterable = nntpClient.iterateArticleInfo(firstArticle, lastArticle);
        } else {
            articlesIterable = nntpClient.iterateArticleInfo(firstArticle, lastArticle);
        }
    } catch (IOException e) {
        _log.error(e.toString());
        if (nntpClient.getReplyCode() == 400) {
            _log.info("NNTP connection timed out. Reconnecting...");
            nntpClient = nntpConnectionFactory.getNntpClient();
            nntpClient.selectNewsgroup(group.getName());
            articlesIterable = nntpClient.iterateArticleInfo(firstArticle, lastArticle);
        }
    }

    Period headersTime = new Period(startHeadersTime, System.currentTimeMillis());

    Set<Long> rangeRequested = ArrayUtil.rangeSet(firstArticle, lastArticle);
    Set<Long> messagesReceived = new HashSet<>();
    Set<Long> messagesBlacklisted = new HashSet<>();
    Set<Long> messagesIgnored = new HashSet<>();
    Set<Long> messagesInserted = new HashSet<>();
    Set<Long> messagesNotInserted = new HashSet<>();

    // check error codes?

    long startUpdateTime = System.currentTimeMillis();

    if (articlesIterable != null) {
        for (NewsArticle article : articlesIterable) {
            long articleNumber = article.getArticleNumberLong();

            if (articleNumber == 0) {
                continue;
            }

            messagesReceived.add(articleNumber);

            Pattern pattern = Defaults.PARTS_SUBJECT_REGEX;
            String subject = article.getSubject();
            Matcher matcher = pattern.matcher(subject);
            if (ValidatorUtil.isNull(subject) || !matcher.find()) {
                // not a binary post most likely.. continue
                messagesIgnored.add(articleNumber);
                if (_log.isDebugEnabled()) {
                    _log.debug(String.format("Skipping message no# %s : %s", articleNumber, subject));
                }
                continue;
            }

            //Filter binaries based on black/white list
            if (isBlacklisted(article, group)) {
                messagesBlacklisted.add(articleNumber);
                continue;
            }
            String group1 = matcher.group(1);
            String group2 = matcher.group(2);
            if (ValidatorUtil.isNumeric(group1) && ValidatorUtil.isNumeric(group2)) {
                int currentPart = Integer.parseInt(group1);
                int maxParts = Integer.parseInt(group2);
                subject = (matcher.replaceAll("")).trim();

                if (!messages.containsKey(subject)) {
                    messages.put(subject, new Message(article, currentPart, maxParts));
                } else if (currentPart > 0) {
                    Message message = messages.get(subject);
                    String articleId = article.getArticleId();
                    String messageId = articleId.substring(1, articleId.length() - 1);
                    int size = article.getSize();
                    message.addPart(currentPart, messageId, articleNumber, size);
                    messages.put(subject, message);
                }
            }
        }

        long count = 0;
        long updateCount = 0;
        long partCount = 0;
        maxNum = lastArticle;

        // add all the requested then remove the ones we did receive.
        Set<Long> rangeNotRecieved = new HashSet<>();
        rangeNotRecieved.addAll(rangeRequested);
        rangeNotRecieved.removeAll(messagesReceived);

        if (!type.equals("partrepair")) {
            _log.info(String.format("Received %d articles of %d requested, %d blacklisted, %d not binaries",
                    messagesReceived.size(), lastArticle - firstArticle + 1, messagesBlacklisted.size(),
                    messagesIgnored.size()));
        }

        if (rangeNotRecieved.size() > 0) {
            switch (type) {
            case "backfill":
                // don't add missing articles
                break;
            case "partrepair":
            case "update":
            default:
                addMissingParts(rangeNotRecieved, group);
                break;
            }
            _log.info("Server did not return article numbers " + ArrayUtil.stringify(rangeNotRecieved));
        }

        if (!messages.isEmpty()) {

            long dbUpdateTime = 0;
            maxNum = firstArticle;
            //insert binaries and parts into database. when binary already exists; only insert new parts
            for (Map.Entry<String, Message> entry : messages.entrySet()) {
                String subject = entry.getKey();
                Message message = entry.getValue();

                Map<Integer, MessagePart> partsMap = message.getPartsMap();
                if (!ValidatorUtil.isNull(subject) && !partsMap.isEmpty()) {
                    String binaryHash = EncodingUtil
                            .md5Hash(subject + message.getFrom() + String.valueOf(group.getId()));
                    Binary binary = binaryDAO.findByBinaryHash(binaryHash);
                    if (binary == null) {
                        long startDbUpdateTime = System.currentTimeMillis();
                        binary = new Binary();
                        binary.setName(subject);
                        binary.setFromName(message.getFrom());
                        binary.setDate(message.getDate().toDate());
                        binary.setXref(message.getxRef());
                        binary.setTotalParts(message.getMaxParts());
                        binary.setGroupId(group.getId());
                        binary.setBinaryHash(binaryHash);
                        binary.setDateAdded(new Date());
                        binaryDAO.updateBinary(binary);
                        dbUpdateTime += (System.currentTimeMillis() - startDbUpdateTime);
                        count++;
                        if (count % 500 == 0) {
                            _log.info(String.format("%s bin adds...", count));
                        }
                    } else {
                        updateCount++;
                        if (updateCount % 500 == 0) {
                            _log.info(String.format("%s bin updates...", updateCount));
                        }
                    }

                    long binaryId = binary.getId();
                    if (binaryId == 0) {
                        throw new RuntimeException("ID for binary wasnt set.");
                    }

                    for (MessagePart messagePart : message.getPartsMap().values()) {
                        long articleNumber = messagePart.getArticleNumber();
                        maxNum = (articleNumber > maxNum) ? articleNumber : maxNum;
                        partCount++;
                        // create part - its possible some bugs are happening here.
                        Part part = new Part();
                        part.setBinaryId(binaryId);
                        part.setMessageId(messagePart.getMessageId());
                        part.setNumber(messagePart.getArticleNumber());
                        part.setPartNumber(messagePart.getPartNumber());
                        part.setSize(messagePart.getSize());
                        part.setDateAdded(new Date());
                        try {
                            long startDbUpdateTime = System.currentTimeMillis();
                            partDAO.updatePart(part);
                            dbUpdateTime += (System.currentTimeMillis() - startDbUpdateTime);
                            messagesInserted.add(messagePart.getArticleNumber());
                        } catch (Exception e) {
                            _log.error(e.toString());
                            messagesNotInserted.add(messagePart.getArticleNumber());
                        }

                    }
                }
            }
            //TODO: determine whether to add to missing articles if insert failed
            if (messagesNotInserted.size() > 0) {
                _log.warn("WARNING: Parts failed to insert");
                addMissingParts(messagesNotInserted, group);
            }
            Period dbUpdatePeriod = new Period(dbUpdateTime);
            _log.info("Spent " + _periodFormatter.print(dbUpdatePeriod) + " updating the db");
        }
        Period updateTime = new Period(startUpdateTime, System.currentTimeMillis());

        if (!type.equals("partrepair")) {
            _log.info(count + " new, " + updateCount + " updated, " + partCount + " parts.");
            _log.info(" " + _periodFormatter.print(headersTime) + " headers, "
                    + _periodFormatter.print(updateTime) + " update.");
        }
        transactionManager.commit(transaction);
        return maxNum;
    } else {
        _log.error("Error: Can't get parts from server (msgs not array)\n Skipping group");
        return 0;
    }

}

From source file:net.longfalcon.newsj.Nzb.java

License:Open Source License

private void _doWriteNZBforRelease(Release release, Directory nzbBaseDir) throws IOException, JAXBException {
    long releaseId = release.getId();
    String releaseGuid = release.getGuid();
    String releaseName = release.getName();
    long startTime = System.currentTimeMillis();

    Category category = release.getCategory();
    String categoryName = null;//from  w  w  w.j  a v  a2  s  .  c  om
    if (category != null) {
        categoryName = category.getTitle();
    }

    net.longfalcon.newsj.xml.Nzb nzbRoot = new net.longfalcon.newsj.xml.Nzb();
    nzbRoot.setXmlns(_XMLNS);
    Head head = new Head();
    List<Meta> metaElements = head.getMeta();
    Meta categoryMeta = new Meta();
    categoryMeta.setType("category");
    categoryMeta.setvalue(StringEscapeUtils.escapeXml11(categoryName));
    Meta nameMeta = new Meta();
    nameMeta.setType("name");
    nameMeta.setvalue(StringEscapeUtils.escapeXml11(releaseName));
    metaElements.add(categoryMeta);
    metaElements.add(nameMeta);
    nzbRoot.setHead(head);

    List<File> files = nzbRoot.getFile();
    List<Binary> binaries = binaryDAO.findBinariesByReleaseId(releaseId);
    for (Binary binary : binaries) {
        File fileElement = new File();
        fileElement.setPoster(StringEscapeUtils.escapeXml11(binary.getFromName()));
        fileElement.setDate(String.valueOf(binary.getDate().getTime()));
        String subjectString = String.format("%s (1/%s)", StringEscapeUtils.escapeXml11(binary.getName()),
                binary.getTotalParts());
        fileElement.setSubject(subjectString);

        Groups groupsElement = new Groups();
        List<Group> groups = groupsElement.getGroup();
        net.longfalcon.newsj.model.Group group = groupDAO.findGroupByGroupId(binary.getGroupId());
        Group groupElement = new Group();
        groupElement.setvalue(group.getName());
        groups.add(groupElement);

        // TODO: add XRef groups
        fileElement.setGroups(groupsElement);
        Segments segmentsElement = new Segments();
        List<Segment> segments = segmentsElement.getSegment();

        List<Object[]> messageIdSizePartNos = partDAO
                .findDistinctMessageIdSizeAndPartNumberByBinaryId(binary.getId());
        for (Object[] messageIdSizePartNo : messageIdSizePartNos) {
            // messageIdSizePartNo is {String,Long,Integer}
            Segment segment = new Segment();
            segment.setBytes(String.valueOf(messageIdSizePartNo[1]));
            segment.setNumber(String.valueOf(messageIdSizePartNo[2]));

            segment.setvalue(String.valueOf(messageIdSizePartNo[0]));
            segments.add(segment);
        }
        fileElement.setSegments(segmentsElement);

        files.add(fileElement);
    }

    long startFileWriteTime = System.currentTimeMillis();

    FsFile fileHandle = getNzbFileHandle(release, nzbBaseDir);
    Writer writer = new OutputStreamWriter(fileHandle.getOutputStream(), Charset.forName("UTF-8"));
    getMarshaller().marshal(nzbRoot, writer);
    writer.write(String.format("<!-- generated by NewsJ %s -->", config.getReleaseVersion()));
    writer.flush();
    writer.close();

    Period totalTimePeriod = new Period(startTime, System.currentTimeMillis());
    Period buildTimePeriod = new Period(startTime, startFileWriteTime);
    Period writeTimePeriod = new Period(startFileWriteTime, System.currentTimeMillis());
    _log.info(String.format("Wrote NZB for %s in %s;\n build time: %s write time: %s", releaseName,
            _periodFormatter.print(totalTimePeriod), _periodFormatter.print(buildTimePeriod),
            _periodFormatter.print(writeTimePeriod)));
}