Example usage for com.google.common.collect Table get

List of usage examples for com.google.common.collect Table get

Introduction

In this page you can find the example usage for com.google.common.collect Table get.

Prototype

V get(@Nullable Object rowKey, @Nullable Object columnKey);

Source Link

Document

Returns the value corresponding to the given row and column keys, or null if no such mapping exists.

Usage

From source file:paquete.AnalizadorSemantico.java

public Relacion Diferencia(NodoBase ni, NodoBase nd) {
    //Verifica que la cantidad de columnas de las tablas sean iguales
    String nombreRelacion = "";
    int li = ni.getRelacion().getAtributos().length;
    int ld = nd.getRelacion().getAtributos().length;
    String tipo[] = ni.getRelacion().getTipos();

    Table<Integer, String, Object> relacionTemporal = HashBasedTable.create();
    String columna[] = null;// ww w  .  j  a  va 2s .  c  o  m

    if (li == ld) {

        //Verifica que los tipos de las columnas sean iguales
        columna = new String[ni.getRelacion().getAtributos().length];
        for (int i = 0; i < ni.getRelacion().getAtributos().length; i++) {
            if (ni.getRelacion().getTipos()[i].compareTo(nd.getRelacion().getTipos()[i]) != 0) {
                System.err.print("La operacion diferencia requiere que los atributos posean el mismo dominio");
                System.exit(0);
            }
            //verifica que los atributos se llamen igual para asignar el nombre de la
            //columna de la nueva relacion, en caso contrario junta los dos numbres
            else if (ni.getRelacion().getAtributos()[i].compareTo(nd.getRelacion().getAtributos()[i]) == 0) {
                columna[i] = ni.getRelacion().getAtributos()[i];
            } else {
                columna[i] = ni.getRelacion().getAtributos()[i].concat(nd.getRelacion().getAtributos()[i]);

            }
        }

        Table<Integer, String, Object> r1 = HashBasedTable.create();
        Table<Integer, String, Object> r2 = HashBasedTable.create();

        for (int col = 0; col < columna.length; col++) {
            for (int fila = 0; fila < ni.getRelacion().getDatos().size()
                    / ni.getRelacion().getAtributos().length; fila++) {
                r1.put(fila, columna[col],
                        ni.getRelacion().getDatos().get(fila, ni.getRelacion().getAtributos()[col]));

            }

        }

        for (int col = 0; col < columna.length; col++) {
            for (int fila = 0; fila < nd.getRelacion().getDatos().size()
                    / nd.getRelacion().getAtributos().length; fila++) {
                r2.put(fila, columna[col],
                        nd.getRelacion().getDatos().get(fila, nd.getRelacion().getAtributos()[col]));

            }

        }
        boolean guardar = true;
        int j = 0;
        for (int i = 0; i < r1.size() / columna.length; i++) {

            Map<String, Object> t = r1.row(i);

            for (int h = 0; h < r2.size() / columna.length; h++) {

                Map<String, Object> t2 = r2.row(h);

                if (t.equals(t2)) {
                    guardar = false;
                    break;
                }
                guardar = true;
            }

            if (guardar) {
                for (int h = 0; h < columna.length; h++) {
                    relacionTemporal.put(j, columna[h], r1.get(i, columna[h]));
                }

                j++;

            }

        }

    }

    else {
        System.err.print(
                "La operacion Diferencia requiere que las relaciones involucradas posean el mismo numero de atributos");
        System.exit(0);
    }

    return new Relacion(nombreRelacion, columna, tipo, relacionTemporal);

}

From source file:i5.las2peer.services.recommender.librec.data.NetflixDataDAO.java

/**
 * Read data from the data file. Note that we didn't take care of the duplicated lines.
 * /*w  w  w. j  a v a 2s .co  m*/
 * @param cols
 *            the indexes of the relevant columns in the data file: {user, item, [rating, timestamp] (optional)},
 *            not used for the Netflix dataset.
 * @param binThold
 *            the threshold to binarize a rating. If a rating is greater than the threshold, the value will be 1;
 *            otherwise 0. To disable this feature, i.e., keep the original rating value, set the threshold a
 *            negative value
 * @return a sparse matrix storing all the relevant data
 */
public SparseMatrix[] readData(int[] cols, double binThold) throws Exception {

    Logs.info(String.format("Dataset: %s", Strings.last(dataPath, 38)));

    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Table {row-id, col-id, timestamp}
    Table<Integer, Integer, Long> timeTable = HashBasedTable.create();
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();

    File[] fileList = new File(dataPath).listFiles();

    setHeadline(true);
    for (File file : fileList) {
        BufferedReader br = FileIO.getReader(file);

        String line = null;
        minTimestamp = Long.MAX_VALUE;
        maxTimestamp = Long.MIN_VALUE;

        line = br.readLine();
        if (line == null) {
            continue;
        }
        String item = line.trim().split(":")[0];

        while ((line = br.readLine()) != null) {
            String[] data = line.trim().split("[ \t,]+");

            String user = data[0];
            Double rate = Double.valueOf(data[1]);

            // binarize the rating for item recommendation task
            if (binThold >= 0) {
                rate = rate > binThold ? 1.0 : 0.0;
            }

            scaleDist.add(rate);

            // inner id starting from 0
            int row = userIds.containsKey(user) ? userIds.get(user) : userIds.size();
            userIds.put(user, row);

            int col = itemIds.containsKey(item) ? itemIds.get(item) : itemIds.size();
            itemIds.put(item, col);

            dataTable.put(row, col, rate);
            colMap.put(col, row);

            // record rating's issuing time
            // convert to timestamp (milliseconds since 1970-01-01
            String dateStr = data[2]; // format e.g. 2005-09-06
            long timestamp = new SimpleDateFormat("yyyy-MM-dd").parse(dateStr).getTime();

            if (minTimestamp > timestamp)
                minTimestamp = timestamp;

            if (maxTimestamp < timestamp)
                maxTimestamp = timestamp;

            timeTable.put(row, col, timestamp);
        }
        br.close();
    }

    numRatings = scaleDist.size();
    ratingScale = new ArrayList<>(scaleDist.elementSet());
    Collections.sort(ratingScale);

    int numRows = numUsers(), numCols = numItems();

    // if min-rate = 0.0, shift upper a scale
    double minRate = ratingScale.get(0).doubleValue();
    double epsilon = minRate == 0.0 ? ratingScale.get(1).doubleValue() - minRate : 0;
    if (epsilon > 0) {
        // shift upper a scale
        for (int i = 0, im = ratingScale.size(); i < im; i++) {
            double val = ratingScale.get(i);
            ratingScale.set(i, val + epsilon);
        }
        // update data table
        for (int row = 0; row < numRows; row++) {
            for (int col = 0; col < numCols; col++) {
                if (dataTable.contains(row, col))
                    dataTable.put(row, col, dataTable.get(row, col) + epsilon);
            }
        }
    }

    String dateRange = "";
    if (cols.length >= 4)
        dateRange = String.format(", Timestamps = {%s, %s}", Dates.toString(minTimestamp),
                Dates.toString(maxTimestamp));

    Logs.debug("With Specs: {Users, {}} = {{}, {}, {}}, Scale = {{}}{}",
            (isItemAsUser ? "Users, Links" : "Items, Ratings"), numRows, numCols, numRatings,
            Strings.toString(ratingScale), dateRange);

    // build rating matrix
    rateMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);

    if (timeTable != null)
        timeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);

    // release memory of data table
    dataTable = null;
    timeTable = null;

    return new SparseMatrix[] { rateMatrix, timeMatrix };
}

From source file:i5.las2peer.services.recommender.librec.data.CSVDataDAO.java

/**
 * Read data from the data file. Note that we didn't take care of the duplicated lines.
 * //  w ww .  j av  a  2 s .c om
 * @param cols
 *            the indexes of the relevant columns in the data file: {user, item, [rating, timestamp] (optional)}
 * @param binThold
 *            the threshold to binarize a rating. If a rating is greater than the threshold, the value will be 1;
 *            otherwise 0. To disable this feature, i.e., keep the original rating value, set the threshold a
 *            negative value
 * @return a sparse matrix storing all the relevant data
 */
public SparseMatrix[] readData(int[] cols, double binThold) throws Exception {

    Logs.info(String.format("Dataset: %s", Strings.last(dataPath, 38)));

    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Table {row-id, col-id, timestamp}
    Table<Integer, Integer, Long> timeTable = null;
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();

    BufferedReader br = FileIO.getReader(dataPath);
    String line = null;
    minTimestamp = Long.MAX_VALUE;
    maxTimestamp = Long.MIN_VALUE;
    while ((line = br.readLine()) != null) {
        if (isHeadline()) {
            setHeadline(false);
            continue;
        }

        String[] data = line.trim().split("[ \t,]+");

        if (data.length < 2) {
            Logs.error(String.format("Dataset: Cannot read line \"%s\"", line));
            continue;
        }

        String user = data[cols[0]];
        String item = data[cols[1]];
        Double rate = (cols.length >= 3 && data.length >= 3) ? Double.valueOf(data[cols[2]]) : 1.0;

        // binarize the rating for item recommendation task
        if (binThold >= 0)
            rate = rate > binThold ? 1.0 : 0.0;

        scaleDist.add(rate);

        // inner id starting from 0
        int row = userIds.containsKey(user) ? userIds.get(user) : userIds.size();
        userIds.put(user, row);

        int col = itemIds.containsKey(item) ? itemIds.get(item) : itemIds.size();
        itemIds.put(item, col);

        dataTable.put(row, col, rate);
        colMap.put(col, row);

        // record rating's issuing time
        if (cols.length >= 4 && data.length >= 4) {
            if (timeTable == null)
                timeTable = HashBasedTable.create();

            // convert to million-seconds
            long mms = 0L;
            try {
                mms = Long.parseLong(data[cols[3]]); // cannot format "9.7323480e+008"
            } catch (NumberFormatException e) {
                mms = (long) Double.parseDouble(data[cols[3]]);
            }
            long timestamp = timeUnit.toMillis(mms);

            if (minTimestamp > timestamp)
                minTimestamp = timestamp;

            if (maxTimestamp < timestamp)
                maxTimestamp = timestamp;

            timeTable.put(row, col, timestamp);
        }

    }
    br.close();

    numRatings = scaleDist.size();
    ratingScale = new ArrayList<>(scaleDist.elementSet());
    Collections.sort(ratingScale);

    int numRows = numUsers(), numCols = numItems();

    // if min-rate = 0.0, shift upper a scale
    double minRate = ratingScale.get(0).doubleValue();
    double epsilon = minRate == 0.0 ? ratingScale.get(1).doubleValue() - minRate : 0;
    if (epsilon > 0) {
        // shift upper a scale
        for (int i = 0, im = ratingScale.size(); i < im; i++) {
            double val = ratingScale.get(i);
            ratingScale.set(i, val + epsilon);
        }
        // update data table
        for (int row = 0; row < numRows; row++) {
            for (int col = 0; col < numCols; col++) {
                if (dataTable.contains(row, col))
                    dataTable.put(row, col, dataTable.get(row, col) + epsilon);
            }
        }
    }

    String dateRange = "";
    if (cols.length >= 4)
        dateRange = String.format(", Timestamps = {%s, %s}", Dates.toString(minTimestamp),
                Dates.toString(maxTimestamp));

    Logs.debug("With Specs: {Users, {}} = {{}, {}, {}}, Scale = {{}}{}",
            (isItemAsUser ? "Users, Links" : "Items, Ratings"), numRows, numCols, numRatings,
            Strings.toString(ratingScale), dateRange);

    // build rating matrix
    rateMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);

    if (timeTable != null)
        timeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);

    // release memory of data table
    dataTable = null;
    timeTable = null;

    return new SparseMatrix[] { rateMatrix, timeMatrix };
}

From source file:org.hawk.service.emf.EffectiveMetamodelRulesetSerializer.java

public EffectiveMetamodelRuleset load(Properties props) {
    final Map<Integer, String> incMetamodels = new TreeMap<>();
    final Table<Integer, Integer, String> incTypeTable = TreeBasedTable.create();
    final Table<Integer, Integer, ImmutableSet<String>> incSlotTable = TreeBasedTable.create();

    final Map<Integer, String> excMetamodels = new TreeMap<>();
    final Table<Integer, Integer, String> excTypeTable = TreeBasedTable.create();
    final Table<Integer, Integer, ImmutableSet<String>> excSlotTable = TreeBasedTable.create();

    for (String propName : props.stringPropertyNames()) {
        if (propName.startsWith(propertyPrefix)) {
            final String raw = propName.substring(propertyPrefix.length());
            boolean isIncludes;
            String unprefixed;//w w w.  ja v a2s.c om
            if (raw.startsWith(INCLUDES_SUFFIX)) {
                isIncludes = true;
                unprefixed = raw.substring(INCLUDES_SUFFIX.length());
            } else if (raw.startsWith(EXCLUDES_SUFFIX)) {
                isIncludes = false;
                unprefixed = raw.substring(EXCLUDES_SUFFIX.length());
            } else {
                continue;
            }
            final String[] parts = unprefixed.split("[" + SEPARATOR + "]");

            final String propValue = props.getProperty(propName).trim();
            int iMetamodel, iType;
            switch (parts.length) {
            case 1: // prefix0 -> URI of the first metamodel
                iMetamodel = Integer.valueOf(parts[0]);
                String mmURI = propValue;
                if (isIncludes) {
                    incMetamodels.put(iMetamodel, mmURI);
                } else {
                    excMetamodels.put(iMetamodel, mmURI);
                }
                break;
            case 2: // prefix0.0 -> name of the first type of the first metamodel
                iMetamodel = Integer.valueOf(parts[0]);
                iType = Integer.valueOf(parts[1]);
                String type = propValue;
                if (isIncludes) {
                    incTypeTable.put(iMetamodel, iType, type);
                } else {
                    excTypeTable.put(iMetamodel, iType, type);
                }
                break;
            case 3: // prefix0.0.slots -> comma-separated slots for the first type of first metamodel (if not all)
                iMetamodel = Integer.valueOf(parts[0]);
                iType = Integer.valueOf(parts[1]);
                ImmutableSet<String> slots;
                if (propValue.length() > 0) {
                    slots = ImmutableSet.copyOf(propValue.split("[" + SEPARATOR + "]"));
                } else {
                    slots = ImmutableSet.of();
                }
                if (isIncludes) {
                    incSlotTable.put(iMetamodel, iType, slots);
                } else {
                    excSlotTable.put(iMetamodel, iType, slots);
                }
                break;
            default:
                throw new IllegalArgumentException(String
                        .format("Property %s should only have 1-3 parts, but has %d", propName, parts.length));
            }
        }
    }

    final EffectiveMetamodelRuleset ruleset = new EffectiveMetamodelRuleset();

    for (final Cell<Integer, Integer, ImmutableSet<String>> mmEntry : incSlotTable.cellSet()) {
        final String mmURI = incMetamodels.get(mmEntry.getRowKey());
        final String typeName = incTypeTable.get(mmEntry.getRowKey(), mmEntry.getColumnKey());
        final ImmutableSet<String> slots = mmEntry.getValue();

        if (EffectiveMetamodelRuleset.WILDCARD.equals(typeName)) {
            ruleset.include(mmURI);
        } else if (slots.contains(EffectiveMetamodelRuleset.WILDCARD)) {
            ruleset.include(mmURI, typeName);
        } else {
            ruleset.include(mmURI, typeName, slots);
        }
    }

    for (final Cell<Integer, Integer, ImmutableSet<String>> mmEntry : excSlotTable.cellSet()) {
        final String mmURI = excMetamodels.get(mmEntry.getRowKey());
        final String typeName = excTypeTable.get(mmEntry.getRowKey(), mmEntry.getColumnKey());
        final ImmutableSet<String> slots = mmEntry.getValue();

        if (EffectiveMetamodelRuleset.WILDCARD.equals(typeName)) {
            ruleset.exclude(mmURI);
        } else if (slots.contains(EffectiveMetamodelRuleset.WILDCARD)) {
            ruleset.exclude(mmURI, typeName);
        } else {
            ruleset.exclude(mmURI, typeName, slots);
        }
    }

    return ruleset;
}

From source file:i5.las2peer.services.recommender.librec.data.FilmTrustDataDAO.java

/**
 * Read data from the data file. Note that we didn't take care of the duplicated lines.
 * // www  .  ja  v a 2s. c  om
 * @param cols
 *            the indexes of the relevant columns in the data file: {user, item, [rating, timestamp] (optional)}
 * @param binThold
 *            the threshold to binarize a rating. If a rating is greater than the threshold, the value will be 1;
 *            otherwise 0. To disable this feature, i.e., keep the original rating value, set the threshold a
 *            negative value
 * @return a sparse matrix storing all the relevant data
 */
public SparseMatrix[] readData(int[] cols, double binThold) throws Exception {

    Logs.info(String.format("Dataset: %s", Strings.last(dataPath, 38)));

    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Table {row-id, col-id, timestamp}
    Table<Integer, Integer, Long> timeTable = null;
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();

    BufferedReader br = FileIO.getReader(dataPath);
    String line = null;
    minTimestamp = Long.MAX_VALUE;
    maxTimestamp = Long.MIN_VALUE;
    while ((line = br.readLine()) != null) {
        if (isHeadline()) {
            setHeadline(false);
            continue;
        }

        String[] data = line.trim().split("[ \t,]+");

        String user = data[cols[0]];
        String item = data[cols[1]];
        Double rate = (cols.length >= 3 && data.length >= 3) ? Double.valueOf(data[cols[2]]) : 1.0;

        // binarize the rating for item recommendation task
        if (binThold >= 0)
            rate = rate > binThold ? 1.0 : 0.0;

        scaleDist.add(rate);

        // inner id starting from 0
        int row = userIds.containsKey(user) ? userIds.get(user) : userIds.size();
        userIds.put(user, row);

        int col = itemIds.containsKey(item) ? itemIds.get(item) : itemIds.size();
        itemIds.put(item, col);

        dataTable.put(row, col, rate);
        colMap.put(col, row);

        // record rating's issuing time
        if (cols.length >= 4 && data.length >= 4) {
            if (timeTable == null)
                timeTable = HashBasedTable.create();

            // convert to million-seconds
            long mms = 0L;
            try {
                mms = Long.parseLong(data[cols[3]]); // cannot format "9.7323480e+008"
            } catch (NumberFormatException e) {
                mms = (long) Double.parseDouble(data[cols[3]]);
            }
            long timestamp = timeUnit.toMillis(mms);

            if (minTimestamp > timestamp)
                minTimestamp = timestamp;

            if (maxTimestamp < timestamp)
                maxTimestamp = timestamp;

            timeTable.put(row, col, timestamp);
        }

    }
    br.close();

    numRatings = scaleDist.size();
    ratingScale = new ArrayList<>(scaleDist.elementSet());
    Collections.sort(ratingScale);

    int numRows = numUsers(), numCols = numItems();

    // if min-rate = 0.0, shift upper a scale
    double minRate = ratingScale.get(0).doubleValue();
    double epsilon = minRate == 0.0 ? ratingScale.get(1).doubleValue() - minRate : 0;
    if (epsilon > 0) {
        // shift upper a scale
        for (int i = 0, im = ratingScale.size(); i < im; i++) {
            double val = ratingScale.get(i);
            ratingScale.set(i, val + epsilon);
        }
        // update data table
        for (int row = 0; row < numRows; row++) {
            for (int col = 0; col < numCols; col++) {
                if (dataTable.contains(row, col))
                    dataTable.put(row, col, dataTable.get(row, col) + epsilon);
            }
        }
    }

    String dateRange = "";
    if (cols.length >= 4)
        dateRange = String.format(", Timestamps = {%s, %s}", Dates.toString(minTimestamp),
                Dates.toString(maxTimestamp));

    Logs.debug("With Specs: {Users, {}} = {{}, {}, {}}, Scale = {{}}{}",
            (isItemAsUser ? "Users, Links" : "Items, Ratings"), numRows, numCols, numRatings,
            Strings.toString(ratingScale), dateRange);

    // build rating matrix
    rateMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);

    if (timeTable != null)
        timeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);

    // release memory of data table
    dataTable = null;
    timeTable = null;

    return new SparseMatrix[] { rateMatrix, timeMatrix };
}

From source file:lcmc.crm.domain.CrmXml.java

void parseLrmResources(final String unameLowerCase, final Node lrmNode, final Collection<String> resList,
        final Map<String, ResourceAgent> resourceTypeMap, final Map<String, Map<String, String>> parametersMap,
        final Map<String, Set<String>> inLRMList, final Collection<String> orphanedList,
        final Table<String, String, Set<String>> failedClonesMap) {
    final Node lrmResourcesNode = XMLTools.getChildNode(lrmNode, "lrm_resources");
    final NodeList lrmResources = lrmResourcesNode.getChildNodes();
    for (int j = 0; j < lrmResources.getLength(); j++) {
        final Node rscNode = lrmResources.item(j);
        if ("lrm_resource".equals(rscNode.getNodeName())) {
            final String resId = XMLTools.getAttribute(rscNode, "id");
            final Pattern p = Pattern.compile("(.*):(\\d+)$");
            final Matcher m = p.matcher(resId);
            final String crmId;
            if (m.matches()) {
                crmId = m.group(1);//from  w  w  w .j  a  v a  2 s.  c om
                Set<String> clones = failedClonesMap.get(unameLowerCase, crmId);
                if (clones == null) {
                    clones = new LinkedHashSet<String>();
                    failedClonesMap.put(unameLowerCase, crmId, clones);
                }
                clones.add(m.group(2));
            } else {
                crmId = resId;
            }
            if (!resourceTypeMap.containsKey(crmId)) {
                final String raClass = XMLTools.getAttribute(rscNode, "class");
                String provider = XMLTools.getAttribute(rscNode, "provider");
                if (provider == null) {
                    provider = ResourceAgent.HEARTBEAT_PROVIDER;
                }
                final String type = XMLTools.getAttribute(rscNode, "type");
                resourceTypeMap.put(crmId, getResourceAgent(type, provider, raClass));
                resList.add(crmId);
                parametersMap.put(crmId, new HashMap<String, String>());
                orphanedList.add(crmId);
            }
            /* it is in LRM */
            Set<String> inLRMOnHost = inLRMList.get(unameLowerCase);
            if (inLRMOnHost == null) {
                inLRMOnHost = new HashSet<String>();
                inLRMList.put(unameLowerCase, inLRMOnHost);
            }
            inLRMOnHost.add(crmId);
        }
    }
}

From source file:librec.data.DataDAO.java

/**
 * Read data from the data file. Note that we didn't take care of the duplicated lines.
 * /*  w ww .j a  v  a  2 s .c  om*/
 * @param cols
 *            the indexes of the relevant columns in the data file: {user, item, [rating, timestamp] (optional)}
 * @param binThold
 *            the threshold to binarize a rating. If a rating is greater than the threshold, the value will be 1;
 *            otherwise 0. To disable this feature, i.e., keep the original rating value, set the threshold a
 *            negative value
 * @return a sparse matrix storing all the relevant data
 */
public SparseMatrix[] readData(int[] cols, double binThold) throws Exception {

    Logs.info(String.format("Dataset: %s", Strings.last(dataPath, 38)));

    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Table {row-id, col-id, timestamp}
    Table<Integer, Integer, Long> timeTable = null;
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();

    BufferedReader br = FileIO.getReader(dataPath);
    String line = null;
    minTimestamp = Long.MAX_VALUE;
    maxTimestamp = Long.MIN_VALUE;
    while ((line = br.readLine()) != null) {
        if (isHeadline()) {
            setHeadline(false);
            continue;
        }

        String[] data = line.trim().split("[ \t,]+");

        String user = data[cols[0]];
        String item = data[cols[1]];
        Double rate = (cols.length >= 3 && data.length >= 3) ? Double.valueOf(data[cols[2]]) : 1.0;
        //System.out.println(user + " @" + item+" @"+rate);
        // binarize the rating for item recommendation task
        if (binThold >= 0)
            rate = rate > binThold ? 1.0 : 0.0;

        scaleDist.add(rate);

        // inner id starting from 0
        int row = userIds.containsKey(user) ? userIds.get(user) : userIds.size();
        userIds.put(user, row);

        int col = itemIds.containsKey(item) ? itemIds.get(item) : itemIds.size();
        itemIds.put(item, col);

        dataTable.put(row, col, rate);
        colMap.put(col, row);

        // record rating's issuing time
        if (cols.length >= 4 && data.length >= 4) {
            if (timeTable == null)
                timeTable = HashBasedTable.create();

            // convert to million-seconds
            long mms = 0L;
            try {
                mms = Long.parseLong(data[cols[3]]); // cannot format "9.7323480e+008"
            } catch (NumberFormatException e) {
                mms = (long) Double.parseDouble(data[cols[3]]);
            }
            long timestamp = timeUnit.toMillis(mms);

            if (minTimestamp > timestamp)
                minTimestamp = timestamp;

            if (maxTimestamp < timestamp)
                maxTimestamp = timestamp;

            timeTable.put(row, col, timestamp);
        }

    }
    br.close();

    numRatings = scaleDist.size();
    ratingScale = new ArrayList<>(scaleDist.elementSet());
    Collections.sort(ratingScale);

    int numRows = numUsers(), numCols = numItems();

    // if min-rate = 0.0, shift upper a scale
    double minRate = ratingScale.get(0).doubleValue();
    double epsilon = minRate == 0.0 ? ratingScale.get(1).doubleValue() - minRate : 0;
    if (epsilon > 0) {
        // shift upper a scale
        for (int i = 0, im = ratingScale.size(); i < im; i++) {
            double val = ratingScale.get(i);
            ratingScale.set(i, val + epsilon);
        }
        // update data table
        for (int row = 0; row < numRows; row++) {
            for (int col = 0; col < numCols; col++) {
                if (dataTable.contains(row, col))
                    dataTable.put(row, col, dataTable.get(row, col) + epsilon);
            }
        }
    }

    String dateRange = "";
    if (cols.length >= 4)
        dateRange = String.format(", Timestamps = {%s, %s}", Dates.toString(minTimestamp),
                Dates.toString(maxTimestamp));

    Logs.debug("With Specs: {Users, {}} = {{}, {}, {}}, Scale = {{}}{}",
            (isItemAsUser ? "Users, Links" : "Items, Ratings"), numRows, numCols, numRatings,
            Strings.toString(ratingScale), dateRange);

    // build rating matrix
    rateMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);

    if (timeTable != null)
        timeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);

    // release memory of data table
    dataTable = null;
    timeTable = null;

    return new SparseMatrix[] { rateMatrix, timeMatrix };
}

From source file:edu.udo.scaffoldhunter.model.db.DbManagerHibernate.java

@Override
public Table<Scaffold, String, Integer> getStringDistribution(Scaffold root, Subset subset,
        PropertyDefinition propDef) throws DatabaseException {
    Preconditions.checkArgument(propDef.isStringProperty() && !propDef.isScaffoldProperty());

    Table<Scaffold, String, Integer> dist = HashBasedTable.create();
    Map<Integer, Scaffold> scaffolds = Maps.newHashMap();
    Session hibernateSession = null;/*from www  .  j a  v a  2 s. c o  m*/

    for (Scaffold scaf : Scaffolds.getSubtreePreorderIterable(root)) {
        scaffolds.put(scaf.getId(), scaf);
    }
    try {
        hibernateSession = sessionFactory.getCurrentSession();
        hibernateSession.beginTransaction();
        Query query = hibernateSession.createQuery(
                "SELECT scaf.id, prop.value " + "FROM Scaffold AS scaf JOIN scaf.generationMolecules scafmol, "
                        + " MoleculeStringProperty prop, " + "Subset AS subs JOIN subs.molecules submol "
                        + "WHERE scafmol = prop.molecule " + "AND submol = scafmol " + "AND scaf.tree = :tree "
                        + "AND subs = :subset AND prop.type = :propdef");
        query.setParameter("tree", root.getTree());
        query.setParameter("subset", subset);
        query.setParameter("propdef", propDef);

        @SuppressWarnings("unchecked")
        List<Object[]> result = query.list();

        hibernateSession.getTransaction().commit();

        for (Object[] o : result) {
            Scaffold scaf = scaffolds.get(o[0]);
            String str = (String) o[1];
            if (scaf != null) {
                Integer i = dist.get(scaf, o[1]);
                if (i == null) {
                    dist.put(scaf, str, 1);
                } else {
                    dist.put(scaf, str, i + 1);
                }
            }
        }

        return dist;
    } catch (HibernateException ex) {
        ex.printStackTrace();
        closeAndRollBackErroneousSession(hibernateSession);
        throw new DatabaseException(ex);
    }
}

From source file:lcmc.crm.domain.CrmXml.java

private void parseTransientAttributes(final String uname, final Node transientAttrNode,
        final Table<String, String, String> failedMap, final Table<String, String, Set<String>> failedClonesMap,
        final Map<String, String> pingCountMap) {
    /* <instance_attributes> */
    final Node instanceAttrNode = XMLTools.getChildNode(transientAttrNode, "instance_attributes");
    /* <nvpair...> */
    if (instanceAttrNode != null) {
        final NodeList nvpairsRes;
        if (Tools.versionBeforePacemaker(host)) {
            /* <attributtes> only til 2.1.4 */
            final Node attrNode = XMLTools.getChildNode(instanceAttrNode, "attributes");
            nvpairsRes = attrNode.getChildNodes();
        } else {//from www . jav  a2 s. c  om
            nvpairsRes = instanceAttrNode.getChildNodes();
        }
        for (int j = 0; j < nvpairsRes.getLength(); j++) {
            final Node optionNode = nvpairsRes.item(j);
            if (optionNode.getNodeName().equals("nvpair")) {
                final String name = XMLTools.getAttribute(optionNode, "name");
                final String value = XMLTools.getAttribute(optionNode, "value");
                /* TODO: last-failure-" */
                if ("pingd".equals(name)) {
                    pingCountMap.put(uname, value);
                } else if (name.indexOf(FAIL_COUNT_PREFIX) == 0) {
                    final String resId = name.substring(FAIL_COUNT_PREFIX.length());
                    final String unameLowerCase = uname.toLowerCase(Locale.US);
                    failedMap.put(unameLowerCase, resId, value);
                    final Pattern p = Pattern.compile("(.*):(\\d+)$");
                    final Matcher m = p.matcher(resId);
                    if (m.matches()) {
                        final String crmId = m.group(1);
                        Set<String> clones = failedClonesMap.get(unameLowerCase, crmId);
                        if (clones == null) {
                            clones = new LinkedHashSet<String>();
                            failedClonesMap.put(unameLowerCase, crmId, clones);
                        }
                        clones.add(m.group(2));
                        failedMap.put(uname.toLowerCase(Locale.US), crmId, value);
                    }
                }
            }
        }
    }
}

From source file:com.recalot.model.rec.librec.DataDAO.java

/**
 * Read data from the data file. Note that we didn't take care of the duplicated lines.
 *
 * @return a sparse matrix storing all the relevant data
 *///from   ww  w .  j a  v  a2 s  .com
public SparseMatrix[] readData(String... relationshipType) throws Exception {

    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Table {row-id, col-id, timestamp}
    Table<Integer, Integer, Long> timeTable = null;
    // Table {row-id, col-id, relationship}
    Table<Integer, Integer, Double> relationshipTable = null;
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> rColMap = null;
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();

    minTimestamp = Long.MAX_VALUE;
    maxTimestamp = Long.MIN_VALUE;

    //read user ids
    for (User user : dataSet.getUsers()) {
        // inner id starting from 0
        int row = userIds.containsKey(InnerIds.getId(user.getId())) ? userIds.get(InnerIds.getId(user.getId()))
                : userIds.size();
        userIds.put(InnerIds.getId(user.getId()), row);
    }

    //read item ids
    for (Item item : dataSet.getItems()) {
        // inner id starting from 0
        int col = itemIds.containsKey(InnerIds.getId(item.getId())) ? itemIds.get(InnerIds.getId(item.getId()))
                : itemIds.size();
        itemIds.put(InnerIds.getId(item.getId()), col);
    }

    if (dataSet.getRelationCount() > 0) {

        relationshipTable = HashBasedTable.create();

        rColMap = HashMultimap.create();
        //read social information
        for (Relation relation : dataSet.getRelations()) {
            if (relationshipType.length == 0 || relation.getType().toLowerCase().equals(relationshipType[0])) {

                // inner id starting from 0
                if (!userIds.containsKey(InnerIds.getId(relation.getFromId()))) {
                    userIds.put(InnerIds.getId(relation.getFromId()), userIds.size());
                }

                // inner id starting from 0
                if (!userIds.containsKey(InnerIds.getId(relation.getToId()))) {
                    userIds.put(InnerIds.getId(relation.getToId()), userIds.size());
                }

                int row = userIds.get(InnerIds.getId(relation.getFromId()));
                int col = userIds.get(InnerIds.getId(relation.getToId()));

                relationshipTable.put(row, col, 1.0);
                rColMap.put(col, row);
            }
        }
    }

    //read interactions
    for (Interaction interaction : dataSet.getInteractions()) {

        String itemId = interaction.getItemId();
        String userId = interaction.getUserId();
        String rating = interaction.getValue();

        int row = userIds.containsKey(InnerIds.getId(userId)) ? userIds.get(InnerIds.getId(userId))
                : userIds.size();
        userIds.put(InnerIds.getId(userId), row);

        int col = itemIds.containsKey(InnerIds.getId(itemId)) ? itemIds.get(InnerIds.getId(itemId))
                : itemIds.size();
        itemIds.put(InnerIds.getId(itemId), col);

        Double r = Double.valueOf(rating);
        if (r.isNaN())
            r = 1.0;

        /*
        // binarize the rating for item recommendation task
        if (binThold >= 0)
        r = r > binThold ? 1.0 : 0.0;
        */
        dataTable.put(row, col, r);
        colMap.put(col, row);

        scaleDist.add(r);

        // record rating's issuing time
        if (interaction.getTimeStamp() != null) {
            if (timeTable == null)
                timeTable = HashBasedTable.create();

            long timestamp = interaction.getTimeStamp().getTime();

            if (minTimestamp > timestamp)
                minTimestamp = timestamp;

            if (maxTimestamp < timestamp)
                maxTimestamp = timestamp;

            timeTable.put(row, col, timestamp);
        }
    }

    numRatings = scaleDist.size();
    ratingScale = new ArrayList<>(scaleDist.elementSet());
    Collections.sort(ratingScale);

    int numRows = numUsers(), numCols = numItems();

    // if min-rate = 0.0, shift upper a scale
    double minRate = ratingScale.get(0).doubleValue();
    double epsilon = minRate == 0.0 ? ratingScale.get(1).doubleValue() - minRate : 0;
    if (epsilon > 0) {
        // shift upper a scale
        for (int i = 0, im = ratingScale.size(); i < im; i++) {
            double val = ratingScale.get(i);
            ratingScale.set(i, val + epsilon);
        }
        // update data table
        for (int row = 0; row < numRows; row++) {
            for (int col = 0; col < numCols; col++) {
                if (dataTable.contains(row, col))
                    dataTable.put(row, col, dataTable.get(row, col) + epsilon);
            }
        }
    }

    // build rating matrix
    rateMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);

    if (timeTable != null)
        timeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);

    if (relationshipTable != null)
        relationshipMatrix = new SparseMatrix(numRows, numRows, relationshipTable, rColMap);

    // release memory of data table
    dataTable = null;
    timeTable = null;
    relationshipTable = null;

    return new SparseMatrix[] { rateMatrix, timeMatrix, relationshipMatrix };
}