Example usage for org.apache.commons.lang3 StringEscapeUtils unescapeJava

List of usage examples for org.apache.commons.lang3 StringEscapeUtils unescapeJava

Introduction

In this page you can find the example usage for org.apache.commons.lang3 StringEscapeUtils unescapeJava.

Prototype

public static final String unescapeJava(final String input) 

Source Link

Document

Unescapes any Java literals found in the String .

Usage

From source file:com.magestore.app.pos.api.m1.config.POSConfigDataAccessM1.java

private ConfigPriceFormat getPriceFormat(LinkedTreeMap priceFormat) {
    String currencySymbol = (String) mConfig.getValue("currentCurrencySymbol");
    String currency_symbol = "";
    if (currencySymbol.length() > 0) {
        String sSymbol = currencySymbol.substring(0, 1);
        if (sSymbol.equals("u")) {
            currency_symbol = StringEscapeUtils.unescapeJava("\\" + currencySymbol);
        } else if (sSymbol.equals("\\")) {
            currency_symbol = StringEscapeUtils.unescapeJava(currencySymbol);
        } else if (currencySymbol.contains("\\u")) {
            currency_symbol = StringEscapeUtils.unescapeJava(currencySymbol);
        } else {/*from w  ww  .  j  a v a2  s . c o  m*/
            currency_symbol = StringEscapeUtils.unescapeJava(currencySymbol);
        }
    }
    String pattern = priceFormat.get("pattern").toString();
    int precision = ((Double) priceFormat.get("precision")).intValue();
    int requiredPrecision = ((Double) priceFormat.get("requiredPrecision")).intValue();
    String decimalSymbol = priceFormat.get("decimalSymbol").toString();
    String groupSymbol = priceFormat.get("groupSymbol").toString();
    int groupLength = ((Double) priceFormat.get("groupLength")).intValue();
    int integerRequired = ((Double) priceFormat.get("integerRequired")).intValue();

    ConfigPriceFormat configPriceFormat = new PosConfigPriceFormat();
    configPriceFormat.setPattern(pattern);
    configPriceFormat.setPrecision(precision);
    configPriceFormat.setRequirePrecision(requiredPrecision);
    configPriceFormat.setDecimalSymbol(decimalSymbol);
    configPriceFormat.setGroupSymbol(groupSymbol);
    configPriceFormat.setGroupLength(groupLength);
    configPriceFormat.setIntegerRequied(integerRequired);
    configPriceFormat.setCurrencySymbol(currency_symbol);

    return configPriceFormat;
}

From source file:com.taobao.android.tools.TPatchTool.java

protected File getLastPatchFile(String baseApkVersion, String productName, File outPatchDir)
        throws IOException {
    try {//from   w  w w  .  j  ava2s.c  o m
        String httpUrl = ((TpatchInput) input).LAST_PATCH_URL + "baseVersion=" + baseApkVersion
                + "&productIdentifier=" + productName;
        String response = HttpClientUtils.getUrl(httpUrl);
        if (StringUtils.isBlank(response) || response.equals("\"\"")) {
            return null;
        }
        File downLoadFolder = new File(outPatchDir, "LastPatch");
        downLoadFolder.mkdirs();
        File downLoadFile = new File(downLoadFolder, "lastpatch.tpatch");
        String downLoadUrl = StringEscapeUtils.unescapeJava(response);
        downloadTPath(downLoadUrl.substring(1, downLoadUrl.length() - 1), downLoadFile);

        return downLoadFile;
    } catch (Exception e) {
        return null;
    }
}

From source file:Creator.WidgetPanel.java

public String generateWidgetImport(String panelName, int panelID, List<String> l) {

    String importCode = "";
    String template = "(%s, %s, %s, %s, %s)";
    String htmlLinks = getHtmlLinks(panelID);

    Pattern p = Pattern.compile("\\d+");
    Matcher m;/* ww  w.j  av a2  s .  c  o m*/

    for (String s : l) {

        String w_id = "", w_x = "", w_y = "", content = "", p_id = String.valueOf(panelID);
        // widget_subclass": "
        w_id = s.split("widget_subclass\": \"")[1].split("\",")[0];
        w_x = s.split("w_x\": \"")[1].split("\",")[0];
        w_y = s.split("w_y\": \"")[1].split("\",")[0];
        content = s.split("code\": \"")[1];
        if (content.contains("holder_")) {
            String oldHolderID = content.substring(content.indexOf("holder_"), content.indexOf("holder_") + 30);
            m = p.matcher(oldHolderID);
            if (m.find()) {
                oldHolderID = "holder_" + m.group();
            }

            String newHolderID = "holder_" + holderNum;
            holderNum++;
            content = content.replace(oldHolderID, newHolderID);
        }

        // Replace the holder numbers
        content = StringEscapeUtils.unescapeJava(content);

        content = "$$" + content.substring(0, content.length() - 2) + "$$";

        if (!importCode.equals("")) {
            importCode += ",";
        }

        importCode += String.format(template, p_id, w_id, w_x, w_y, content);

    }

    importCode += "," + htmlLinks;

    return importCode;

}

From source file:Creator.WidgetPanel.java

public String getHtmlLinks(int pid) {

    String returnString = "";

    String template = "(%s, %s, %s, %s, %s)";

    Pattern p = Pattern.compile("\\d+");
    Matcher m;//  w ww.j  ava  2s . c  om

    WidgetCode wc = mf.wgPanel.getWidgetCode(widgetCodeName);

    for (Map.Entry<String, LinkInfo> entry : ws.wpl.getLinks().entrySet()) {

        // For each entry, format a code string based on the default positions
        // and the given panel name and ID's
        String panelID = String.valueOf(entry.getValue().getPanelID());
        String panelName = entry.getValue().getPanelName();

        if (panelID == "-1" && !panelName.equals("Map")) {
            System.out.println("Continuing on " + panelName);
            continue;
        }

        String xPos = String.valueOf(entry.getValue().getXPos());
        String yPos = String.valueOf(entry.getValue().getYPos());
        String newCode = wc.getFullWidgetText().replace("`%XPOS%`", xPos).replace("`%YPOS%`", yPos)
                .replace("`%PANELID%`", panelID).replace("`%PANELNAME%`", panelName);

        String w_id = "", w_x = "", w_y = "", content = "", p_id = String.valueOf(pid);
        // widget_subclass": "
        w_id = newCode.split("widget_subclass\": \"")[1].split("\",")[0];
        w_x = newCode.split("w_x\": \"")[1].split("\",")[0];
        w_y = newCode.split("w_y\": \"")[1].split("\",")[0];
        content = newCode.split("code\": \"")[1];
        if (content.contains("holder_")) {
            String oldHolderID = content.substring(content.indexOf("holder_"), content.indexOf("holder_") + 30);
            //ystem.out.println("Before Holder ID: " + oldHolderID);

            m = p.matcher(oldHolderID);
            if (m.find()) {
                oldHolderID = "holder_" + m.group();
            }

            String newHolderID = "holder_" + holderNum;
            holderNum++;
            content = content.replace(oldHolderID, newHolderID);
        }

        // Replace the holder numbers
        content = StringEscapeUtils.unescapeJava(content);
        content = "$$" + content.substring(0, content.length() - 2) + "$$";

        if (!returnString.equals("")) {
            returnString += ",";
        }

        returnString += String.format(template, p_id, w_id, w_x, w_y, content);
    }

    return returnString;

}

From source file:net.team2xh.crt.language.compiler.Compiler.java

@Override
public String visitStringLiteral(StringLiteralContext ctx) {
    String str = ctx.getText();//w w  w .j  a  va  2  s .  co m
    // Unescape characters
    // http://stackoverflow.com/a/7847310
    str = StringEscapeUtils.unescapeJava(str);
    return str.substring(1, str.length() - 1);
}

From source file:org.apache.drill.common.util.DrillStringUtils.java

/**
 * Unescapes any Java literals found in the {@code String}.
 * For example, it will turn a sequence of {@code '\'} and
 * {@code 'n'} into a newline character, unless the {@code '\'}
 * is preceded by another {@code '\'}.// w  ww  .  ja  v a 2s.c om
 *
 * @param input  the {@code String} to unescape, may be null
 * @return a new unescaped {@code String}, {@code null} if null string input
 */
public static final String unescapeJava(String input) {
    return StringEscapeUtils.unescapeJava(input);
}

From source file:org.apache.drill.exec.store.dfs.FormatPluginOptionsDescriptor.java

/**
 * creates an instance of the FormatPluginConfig based on the passed parameters
 * @param t the signature and the parameters passed to the table function
 * @return the corresponding config/*ww  w  . j  ava  2s .  co  m*/
 */
FormatPluginConfig createConfigForTable(TableInstance t) {
    // Per the constructor, the first param is always "type"
    TableParamDef typeParamDef = t.sig.params.get(0);
    Object typeParam = t.params.get(0);
    if (!typeParamDef.name.equals("type") || typeParamDef.type != String.class || !(typeParam instanceof String)
            || !typeName.equalsIgnoreCase((String) typeParam)) {
        // if we reach here, there's a bug as all signatures generated start with a type parameter
        throw UserException.parseError()
                .message("This function signature is not supported: %s\n" + "expecting %s", t.presentParams(),
                        this.presentParams())
                .addContext("table", t.sig.name).build(logger);
    }
    FormatPluginConfig config;
    try {
        config = pluginConfigClass.newInstance();
    } catch (InstantiationException | IllegalAccessException e) {
        throw UserException
                .parseError(e).message("configuration for format of type %s can not be created (class: %s)",
                        this.typeName, pluginConfigClass.getName())
                .addContext("table", t.sig.name).build(logger);
    }
    for (int i = 1; i < t.params.size(); i++) {
        Object param = t.params.get(i);
        if (param == null) {
            // when null is passed, we leave the default defined in the config class
            continue;
        }
        if (param instanceof String) {
            // normalize Java literals, ex: \t, \n, \r
            param = StringEscapeUtils.unescapeJava((String) param);
        }
        TableParamDef paramDef = t.sig.params.get(i);
        TableParamDef expectedParamDef = this.functionParamsByName.get(paramDef.name);
        if (expectedParamDef == null || expectedParamDef.type != paramDef.type) {
            throw UserException.parseError()
                    .message("The parameters provided are not applicable to the type specified:\n"
                            + "provided: %s\nexpected: %s", t.presentParams(), this.presentParams())
                    .addContext("table", t.sig.name).build(logger);
        }
        try {
            Field field = pluginConfigClass.getField(paramDef.name);
            field.setAccessible(true);
            if (field.getType() == char.class && param instanceof String) {
                String stringParam = (String) param;
                if (stringParam.length() != 1) {
                    throw UserException.parseError()
                            .message("Expected single character but was String: %s", stringParam)
                            .addContext("table", t.sig.name).addContext("parameter", paramDef.name)
                            .build(logger);
                }
                param = stringParam.charAt(0);
            }
            field.set(config, param);
        } catch (IllegalAccessException | NoSuchFieldException | SecurityException e) {
            throw UserException.parseError(e)
                    .message("can not set value %s to parameter %s: %s", param, paramDef.name, paramDef.type)
                    .addContext("table", t.sig.name).addContext("parameter", paramDef.name).build(logger);
        }
    }
    return config;
}

From source file:org.apache.drill.exec.store.hive.HiveStoragePlugin.java

@Override
public HiveScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns,
        SessionOptionManager options) throws IOException {
    HiveReadEntry hiveReadEntry = selection.getListWith(new ObjectMapper(), new TypeReference<HiveReadEntry>() {
    });//from   ww  w. j  ava2 s. co  m
    try {
        if (hiveReadEntry.getJdbcTableType() == TableType.VIEW) {
            throw new UnsupportedOperationException(
                    "Querying views created in Hive from Drill is not supported in current version.");
        }

        Map<String, String> confProperties = new HashMap<>();
        if (options != null) {
            String value = StringEscapeUtils
                    .unescapeJava(options.getString(ExecConstants.HIVE_CONF_PROPERTIES));
            logger.trace("[{}] is set to {}.", ExecConstants.HIVE_CONF_PROPERTIES, value);
            try {
                Properties properties = new Properties();
                properties.load(new StringReader(value));
                confProperties = properties.stringPropertyNames().stream()
                        .collect(Collectors.toMap(Function.identity(), properties::getProperty, (o, n) -> n));
            } catch (IOException e) {
                logger.warn("Unable to parse Hive conf properties {}, ignoring them.", value);
            }
        }

        return new HiveScan(userName, hiveReadEntry, this, columns, null, confProperties);
    } catch (ExecutionSetupException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.flink.graph.driver.GraphMetrics.java

public static void main(String[] args) throws Exception {
    // Set up the execution environment
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.getConfig().enableObjectReuse();

    ParameterTool parameters = ParameterTool.fromArgs(args);
    if (!parameters.has("directed")) {
        printUsage();//from   w  ww  . ja va  2 s. co  m
        return;
    }
    boolean directedAlgorithm = parameters.getBoolean("directed");

    GraphAnalytic vm;
    GraphAnalytic em;

    switch (parameters.get("input", "")) {
    case "csv": {
        String lineDelimiter = StringEscapeUtils
                .unescapeJava(parameters.get("input_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));

        String fieldDelimiter = StringEscapeUtils
                .unescapeJava(parameters.get("input_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));

        GraphCsvReader reader = Graph.fromCsvReader(parameters.get("input_filename"), env)
                .ignoreCommentsEdges("#").lineDelimiterEdges(lineDelimiter).fieldDelimiterEdges(fieldDelimiter);

        switch (parameters.get("type", "")) {
        case "integer": {
            Graph<LongValue, NullValue, NullValue> graph = reader.keyType(LongValue.class);

            if (directedAlgorithm) {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>());
                }

                vm = graph.run(
                        new org.apache.flink.graph.library.metric.directed.VertexMetrics<LongValue, NullValue, NullValue>());
                em = graph.run(
                        new org.apache.flink.graph.library.metric.directed.EdgeMetrics<LongValue, NullValue, NullValue>());
            } else {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(
                                    false));
                }

                vm = graph.run(
                        new org.apache.flink.graph.library.metric.undirected.VertexMetrics<LongValue, NullValue, NullValue>());
                em = graph.run(
                        new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<LongValue, NullValue, NullValue>());
            }
        }
            break;

        case "string": {
            Graph<StringValue, NullValue, NullValue> graph = reader.keyType(StringValue.class);

            if (directedAlgorithm) {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.directed.Simplify<StringValue, NullValue, NullValue>());
                }

                vm = graph.run(
                        new org.apache.flink.graph.library.metric.directed.VertexMetrics<StringValue, NullValue, NullValue>());
                em = graph.run(
                        new org.apache.flink.graph.library.metric.directed.EdgeMetrics<StringValue, NullValue, NullValue>());
            } else {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.undirected.Simplify<StringValue, NullValue, NullValue>(
                                    false));
                }

                vm = graph.run(
                        new org.apache.flink.graph.library.metric.undirected.VertexMetrics<StringValue, NullValue, NullValue>());
                em = graph.run(
                        new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<StringValue, NullValue, NullValue>());
            }
        }
            break;

        default:
            printUsage();
            return;
        }
    }
        break;

    case "rmat": {
        int scale = parameters.getInt("scale", DEFAULT_SCALE);
        int edgeFactor = parameters.getInt("edge_factor", DEFAULT_EDGE_FACTOR);

        RandomGenerableFactory<JDKRandomGenerator> rnd = new JDKRandomGeneratorFactory();

        long vertexCount = 1L << scale;
        long edgeCount = vertexCount * edgeFactor;

        Graph<LongValue, NullValue, NullValue> graph = new RMatGraph<>(env, rnd, vertexCount, edgeCount)
                .generate();

        if (directedAlgorithm) {
            if (scale > 32) {
                Graph<LongValue, NullValue, NullValue> newGraph = graph.run(
                        new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>());

                vm = newGraph.run(
                        new org.apache.flink.graph.library.metric.directed.VertexMetrics<LongValue, NullValue, NullValue>());
                em = newGraph.run(
                        new org.apache.flink.graph.library.metric.directed.EdgeMetrics<LongValue, NullValue, NullValue>());
            } else {
                Graph<IntValue, NullValue, NullValue> newGraph = graph
                        .run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(
                                new LongValueToIntValue()))
                        .run(new org.apache.flink.graph.asm.simple.directed.Simplify<IntValue, NullValue, NullValue>());

                vm = newGraph.run(
                        new org.apache.flink.graph.library.metric.directed.VertexMetrics<IntValue, NullValue, NullValue>());
                em = newGraph.run(
                        new org.apache.flink.graph.library.metric.directed.EdgeMetrics<IntValue, NullValue, NullValue>());
            }
        } else {
            boolean clipAndFlip = parameters.getBoolean("clip_and_flip", DEFAULT_CLIP_AND_FLIP);

            if (scale > 32) {
                Graph<LongValue, NullValue, NullValue> newGraph = graph.run(
                        new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(
                                clipAndFlip));

                vm = newGraph.run(
                        new org.apache.flink.graph.library.metric.undirected.VertexMetrics<LongValue, NullValue, NullValue>());
                em = newGraph.run(
                        new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<LongValue, NullValue, NullValue>());
            } else {
                Graph<IntValue, NullValue, NullValue> newGraph = graph
                        .run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(
                                new LongValueToIntValue()))
                        .run(new org.apache.flink.graph.asm.simple.undirected.Simplify<IntValue, NullValue, NullValue>(
                                clipAndFlip));

                vm = newGraph.run(
                        new org.apache.flink.graph.library.metric.undirected.VertexMetrics<IntValue, NullValue, NullValue>());
                em = newGraph.run(
                        new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<IntValue, NullValue, NullValue>());
            }
        }
    }
        break;

    default:
        printUsage();
        return;
    }

    env.execute("Graph Metrics");

    System.out.print("Vertex metrics:\n  ");
    System.out.println(vm.getResult().toString().replace(";", "\n "));
    System.out.print("\nEdge metrics:\n  ");
    System.out.println(em.getResult().toString().replace(";", "\n "));

    JobExecutionResult result = env.getLastJobExecutionResult();

    NumberFormat nf = NumberFormat.getInstance();
    System.out.println("\nExecution runtime: " + nf.format(result.getNetRuntime()) + " ms");
}

From source file:org.apache.flink.graph.drivers.ClusteringCoefficient.java

public static void main(String[] args) throws Exception {
    // Set up the execution environment
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.getConfig().enableObjectReuse();

    ParameterTool parameters = ParameterTool.fromArgs(args);
    env.getConfig().setGlobalJobParameters(parameters);

    if (!parameters.has("directed")) {
        throw new ProgramParametrizationException(
                getUsage("must declare execution mode as '--directed true' or '--directed false'"));
    }//w w w . j a v a  2 s  .  c  om
    boolean directedAlgorithm = parameters.getBoolean("directed");

    int little_parallelism = parameters.getInt("little_parallelism", PARALLELISM_DEFAULT);

    // global and local clustering coefficient results
    GraphAnalytic gcc;
    GraphAnalytic acc;
    DataSet lcc;

    switch (parameters.get("input", "")) {
    case "csv": {
        String lineDelimiter = StringEscapeUtils
                .unescapeJava(parameters.get("input_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));

        String fieldDelimiter = StringEscapeUtils
                .unescapeJava(parameters.get("input_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));

        GraphCsvReader reader = Graph.fromCsvReader(parameters.get("input_filename"), env)
                .ignoreCommentsEdges("#").lineDelimiterEdges(lineDelimiter).fieldDelimiterEdges(fieldDelimiter);

        switch (parameters.get("type", "")) {
        case "integer": {
            Graph<LongValue, NullValue, NullValue> graph = reader.keyType(LongValue.class);

            if (directedAlgorithm) {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>()
                                    .setParallelism(little_parallelism));
                }

                gcc = graph.run(
                        new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = graph.run(
                        new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = graph.run(
                        new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
            } else {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(
                                    false).setParallelism(little_parallelism));
                }

                gcc = graph.run(
                        new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = graph.run(
                        new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = graph.run(
                        new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
            }
        }
            break;

        case "string": {
            Graph<StringValue, NullValue, NullValue> graph = reader.keyType(StringValue.class);

            if (directedAlgorithm) {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.directed.Simplify<StringValue, NullValue, NullValue>()
                                    .setParallelism(little_parallelism));
                }

                gcc = graph.run(
                        new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<StringValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = graph.run(
                        new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<StringValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = graph.run(
                        new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<StringValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
            } else {
                if (parameters.getBoolean("simplify", false)) {
                    graph = graph.run(
                            new org.apache.flink.graph.asm.simple.undirected.Simplify<StringValue, NullValue, NullValue>(
                                    false).setParallelism(little_parallelism));
                }

                gcc = graph.run(
                        new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<StringValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = graph.run(
                        new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<StringValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = graph.run(
                        new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<StringValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
            }
        }
            break;

        default:
            throw new ProgramParametrizationException(getUsage("invalid CSV type"));
        }
    }
        break;

    case "rmat": {
        int scale = parameters.getInt("scale", DEFAULT_SCALE);
        int edgeFactor = parameters.getInt("edge_factor", DEFAULT_EDGE_FACTOR);

        RandomGenerableFactory<JDKRandomGenerator> rnd = new JDKRandomGeneratorFactory();

        long vertexCount = 1L << scale;
        long edgeCount = vertexCount * edgeFactor;

        Graph<LongValue, NullValue, NullValue> graph = new RMatGraph<>(env, rnd, vertexCount, edgeCount)
                .setParallelism(little_parallelism).generate();

        if (directedAlgorithm) {
            if (scale > 32) {
                Graph<LongValue, NullValue, NullValue> newGraph = graph.run(
                        new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>()
                                .setParallelism(little_parallelism));

                gcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
            } else {
                Graph<IntValue, NullValue, NullValue> newGraph = graph
                        .run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(
                                new LongValueToUnsignedIntValue()).setParallelism(little_parallelism))
                        .run(new org.apache.flink.graph.asm.simple.directed.Simplify<IntValue, NullValue, NullValue>()
                                .setParallelism(little_parallelism));

                gcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<IntValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<IntValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<IntValue, NullValue, NullValue>()
                                .setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
            }
        } else {
            boolean clipAndFlip = parameters.getBoolean("clip_and_flip", DEFAULT_CLIP_AND_FLIP);

            if (scale > 32) {
                Graph<LongValue, NullValue, NullValue> newGraph = graph.run(
                        new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(
                                clipAndFlip).setParallelism(little_parallelism));

                gcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<LongValue, NullValue, NullValue>()
                                .setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
            } else {
                Graph<IntValue, NullValue, NullValue> newGraph = graph
                        .run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(
                                new LongValueToUnsignedIntValue()).setParallelism(little_parallelism))
                        .run(new org.apache.flink.graph.asm.simple.undirected.Simplify<IntValue, NullValue, NullValue>(
                                clipAndFlip).setParallelism(little_parallelism));

                gcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<IntValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                acc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<IntValue, NullValue, NullValue>()
                                .setLittleParallelism(little_parallelism));
                lcc = newGraph.run(
                        new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<IntValue, NullValue, NullValue>()
                                .setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
            }
        }
    }
        break;

    default:
        throw new ProgramParametrizationException(getUsage("invalid input type"));
    }

    switch (parameters.get("output", "")) {
    case "print":
        if (directedAlgorithm) {
            for (Object e : lcc.collect()) {
                org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient.Result result = (org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient.Result) e;
                System.out.println(result.toPrintableString());
            }
        } else {
            for (Object e : lcc.collect()) {
                org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient.Result result = (org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient.Result) e;
                System.out.println(result.toPrintableString());
            }
        }
        break;

    case "hash":
        System.out.println(DataSetUtils.checksumHashCode(lcc));
        break;

    case "csv":
        String filename = parameters.get("output_filename");

        String lineDelimiter = StringEscapeUtils
                .unescapeJava(parameters.get("output_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));

        String fieldDelimiter = StringEscapeUtils.unescapeJava(
                parameters.get("output_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));

        lcc.writeAsCsv(filename, lineDelimiter, fieldDelimiter);

        env.execute("Clustering Coefficient");
        break;

    default:
        throw new ProgramParametrizationException(getUsage("invalid output type"));
    }

    System.out.println(gcc.getResult());
    System.out.println(acc.getResult());

    JobExecutionResult result = env.getLastJobExecutionResult();

    NumberFormat nf = NumberFormat.getInstance();
    System.out.println("Execution runtime: " + nf.format(result.getNetRuntime()) + " ms");
}