Example usage for org.apache.hadoop.fs FileUtil fullyDelete

List of usage examples for org.apache.hadoop.fs FileUtil fullyDelete

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil fullyDelete.

Prototype

@Deprecated
public static void fullyDelete(FileSystem fs, Path dir) throws IOException 

Source Link

Document

Recursively delete a directory.

Usage

From source file:edu.purdue.cybercenter.dm.storage.AbstractStorageFileManager.java

@Override
public void deleteFile(StorageFile file) throws IOException {
    String sourcePath = file.getStorageId().getLocation() + file.getLocation();
    FileUtil.fullyDelete(localFileSystem, new Path(sourcePath));
    removeStorageFileEntry(file.getId());
}

From source file:edu.purdue.cybercenter.dm.storage.HdfsStorageFileManager.java

@Override
public void deleteFile(StorageFile file) throws IOException {
    String sourcePath = file.getStorageId().getLocation() + file.getLocation();
    FileSystem sourceFs = getFileSystemType(file.getStorageId().getType());
    FileUtil.fullyDelete(sourceFs, new Path(sourcePath));
    removeStorageFileEntry(file.getId());
}

From source file:edu.purdue.cybercenter.dm.storage.HdfsStorageFileManager.java

private StorageFile moveFile(StorageFile file, Storage storage) throws Exception {
    FileSystem destFS = getFileSystemType(storage.getType());
    String destPath = storage.getLocation() + file.getLocation();
    String sourcePath = file.getStorageId().getLocation() + file.getLocation();
    FileSystem sourceFs = getFileSystemType(file.getStorageId().getType());
    destFS.mkdirs(new Path(destPath.substring(0, destPath.lastIndexOf("/"))));
    FileUtil.copy(getFileSystemType(file.getStorageId().getType()), new Path(sourcePath), destFS,
            new Path(destPath), false, configuration);
    FileUtil.fullyDelete(sourceFs, new Path(sourcePath));
    file.setStorageId(storage);//from  ww  w  . jav  a2  s.  c  o  m
    file.persist();
    System.out.println("move from ");
    System.out.println(sourcePath);
    System.out.println("To ");
    System.out.println(destPath);
    return file;
}

From source file:edu.purdue.cybercenter.dm.storage.HdfsStorageFileManager.java

private StorageFile updateFile(String source, StorageFile target) throws IOException {
    String fileSysType = getStorageTypeFromFilePath(source);
    source = getAbsoultuePath(fileSysType, source);
    FileUtil.fullyDelete(getFileSystemType(fileSysType),
            new Path(target.getStorageId().getLocation() + target.getLocation()));
    FileUtil.copy(getFileSystemType(target.getStorageId().getType()), new Path(source),
            getFileSystemType(target.getStorageId().getType()),
            new Path(target.getStorageId().getLocation() + target.getLocation()), false, configuration);
    return target;
}

From source file:pegasus.ResultInfo.java

License:Apache License

public int run(final String[] args) throws Exception {
    if (args.length != 9) {
        return printUsage();
    }/*from   www  .j  av a 2  s.c  om*/

    edge_path = new Path(args[0]);
    curbm_path = new Path(args[1]);
    tempbm_path = new Path(args[2]);
    nextbm_path = new Path(args[3]);
    output_path = new Path(args[4]);
    summaryout_path = new Path("concmpt_summaryout");
    number_nodes = Integer.parseInt(args[5]);
    nreducers = Integer.parseInt(args[6]);

    if (args[7].compareTo("new") == 0)
        start_from_newbm = 1;
    else { // args[7] == contNN        e.g.) cont10
        start_from_newbm = 0;
        cur_iter = Integer.parseInt(args[7].substring(4));
        System.out.println("Starting from cur_iter = " + cur_iter);
    }

    if (args[8].compareTo("makesym") == 0)
        make_symmetric = 1;
    else
        make_symmetric = 0;

    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing connected component. Edge path = " + args[0] + ", Newbm = "
            + args[7] + ", Reducers = " + nreducers);

    local_output_path = args[4] + "_temp";

    if (start_from_newbm == 1) {
        System.out.print("Generating initial component vector for " + number_nodes + " nodes ");
        // create bitmask generate command file, and copy to curbm_path
        gen_component_vector_file(number_nodes, curbm_path);
        System.out.println(" done");
    } else {
        System.out.println("Resuming from current component vector at radius(" + cur_iter + ")");
    }

    // Iteratively calculate neighborhood function. 
    for (int i = cur_iter; i < MAX_ITERATIONS; i++) {
        cur_iter++;

        JobClient.runJob(configStage1());
        JobClient.runJob(configStage2());
        JobClient.runJob(configStage3());

        FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

        final FileSystem fs = FileSystem.get(getConf());

        // copy neighborhood information from HDFS to local disk, and read it!
        String new_path = local_output_path + "/" + i;
        fs.copyToLocalFile(output_path, new Path(new_path));
        ResultInfo ri = readIterationOutput(new_path);

        changed_nodes[iter_counter] = ri.changed;
        changed_nodes[iter_counter] = ri.unchanged;

        iter_counter++;

        System.out.println("Hop " + i + " : changed = " + ri.changed + ", unchanged = " + ri.unchanged);

        // Stop when the minimum neighborhood doesn't change
        if (ri.changed == 0) {
            System.out.println("All the component ids converged. Finishing...");
            fs.delete(curbm_path);
            fs.delete(tempbm_path);
            fs.delete(output_path);
            fs.rename(nextbm_path, curbm_path);

            break;
        }

        // rotate directory
        fs.delete(curbm_path);
        fs.delete(tempbm_path);
        fs.delete(output_path);
        fs.rename(nextbm_path, curbm_path);
    }

    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

    // calculate summary information using an additional stage
    System.out.println("Summarizing connected components information...");
    JobClient.runJob(configStage4());

    // finishing.
    System.out.println("\n[PEGASUS] Connected component computed.");
    System.out.println("[PEGASUS] Total Iteration = " + iter_counter);
    System.out.println(
            "[PEGASUS] Connected component information is saved in the HDFS concmpt_curbm as\n\"node_id   'msf'component_id\" format");
    System.out.println(
            "[PEGASUS] Connected component distribution is saved in the HDFS concmpt_summaryout as\n\"component_id   number_of_nodes\" format.\n");

    return 0;
}

From source file:pegasus.ConCmptBlock.java

License:Apache License

public int run(final String[] args) throws Exception {
    if (args.length != 9) {
        return printUsage();
    }/* w  w  w  .  ja v a  2 s  .  co  m*/
    int i;

    edge_path = new Path(args[0]);
    curbm_path = new Path(args[1]);
    tempbm_path = new Path(args[2]);
    nextbm_path = new Path(args[3]);
    output_path = new Path(args[4]);
    curbm_unfold_path = new Path("concmpt_curbm");
    summaryout_path = new Path("concmpt_summaryout");
    number_nodes = Integer.parseInt(args[5]);
    nreducers = Integer.parseInt(args[6]);

    if (args[7].compareTo("fast") == 0)
        recursive_diagmult = 1;
    else
        recursive_diagmult = 0;

    block_width = Integer.parseInt(args[8]);

    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing connected component using block method. Reducers = " + nreducers
            + ", block_width = " + block_width);

    local_output_path = args[4] + "_temp";

    // Iteratively calculate neighborhood function. 
    for (i = cur_radius; i < MAX_ITERATIONS; i++) {
        cur_radius++;
        iter_counter++;

        JobClient.runJob(configStage1());
        JobClient.runJob(configStage2());
        JobClient.runJob(configStage3());

        FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

        final FileSystem fs = FileSystem.get(getConf());

        // copy neighborhood information from HDFS to local disk, and read it!
        String new_path = local_output_path + "/" + i;
        fs.copyToLocalFile(output_path, new Path(new_path));
        ResultInfo ri = ConCmpt.readIterationOutput(new_path);

        changed_nodes[iter_counter] = ri.changed;
        changed_nodes[iter_counter] = ri.unchanged;

        System.out.println("Hop " + i + " : changed = " + ri.changed + ", unchanged = " + ri.unchanged);

        // Stop when the minimum neighborhood doesn't change
        if (ri.changed == 0) {
            System.out.println("All the component ids converged. Finishing...");
            fs.delete(curbm_path);
            fs.delete(tempbm_path);
            fs.delete(output_path);
            fs.rename(nextbm_path, curbm_path);

            System.out.println("Unfolding the block structure for easy lookup...");
            JobClient.runJob(configStage4());

            break;
        }

        // rotate directory
        fs.delete(curbm_path);
        fs.delete(tempbm_path);
        fs.delete(output_path);
        fs.rename(nextbm_path, curbm_path);
    }

    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

    // calculate summary information using an additional pass
    System.out.println("Summarizing connected components information...");
    JobClient.runJob(configStage5());

    // finishing.
    System.out.println("\n[PEGASUS] Connected component computed.");
    System.out.println("[PEGASUS] Total Iteration = " + iter_counter);
    System.out.println(
            "[PEGASUS] Connected component information is saved in the HDFS concmpt_curbm as\n\"node_id   'msf'component_id\" format");
    System.out.println(
            "[PEGASUS] Connected component distribution is saved in the HDFS concmpt_summaryout as\n\"component_id   number_of_nodes\" format.\n");

    return 0;
}

From source file:pegasus.hadi.Hadi.java

License:Apache License

public int run(final String[] args) throws Exception {
    int i;//from w  ww .  j  a  v  a  2 s  .c om
    int max_iteration = MAX_ITERATIONS;

    if (args.length != 12) {
        return printUsage();
    }

    edge_path = new Path(args[0]);
    curbm_path = new Path(args[1]);
    tempbm_path = new Path(args[2]);
    nextbm_path = new Path(args[3]);
    output_path = new Path(args[4]);
    number_nodes = Integer.parseInt(args[5]);
    radius_path = new Path("hadi_radius");
    radius_summary_path = new Path("hadi_radius_summary");
    nreplication = Integer.parseInt(args[6]);
    nreducer = Integer.parseInt(args[7]);

    if (args[8].compareTo("enc") == 0)
        encode_bitmask = 1;

    if (args[9].compareTo("newbm") == 0) {
        start_from_newbm = 1;
    } else if (args[9].startsWith("cont")) {
        start_from_newbm = 0;
        cur_radius = Integer.parseInt(args[9].substring(4));
    }

    if (args[10].compareTo("makesym") == 0)
        make_symmetric = 1;
    else
        make_symmetric = 0;

    if (args[11].compareTo("max") != 0)
        max_iteration = Integer.parseInt(args[11]);

    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing Radii/Diameter. Current hop: " + cur_radius + ", " + "edge_path: "
            + args[0] + ", encode: " + encode_bitmask + ", # reducers: " + nreducer + ", makesym: "
            + make_symmetric + ", max_iteration: " + max_iteration + "\n");

    local_output_path = args[4] + number_nodes + "_temp";

    if (start_from_newbm == 1) {
        System.out.print("Generating initial bitstrings for " + number_nodes + " nodes ");

        // create bitmask generate command file, and copy to curbm_path
        gen_bitmask_cmd_file(number_nodes, nreplication, curbm_path);
        System.out.println(" done");
    } else {
        System.out.println("Resuming from current hadi_curbm which contains up to N(" + (cur_radius - 1) + ")");
    }

    N[0] = number_nodes;

    boolean eff_diameter_computed = false;

    // Iteratively run Stage1 to Stage3.
    for (i = cur_radius; i <= max_iteration; i++) {
        JobClient.runJob(configStage1(edge_type));
        JobClient.runJob(configStage2());
        JobClient.runJob(configStage3());

        FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

        final FileSystem fs = FileSystem.get(getConf());

        // copy neighborhood information from HDFS to local disk, and read it!
        String new_path = local_output_path + "/" + i;
        fs.copyToLocalFile(output_path, new Path(new_path));
        HadiResultInfo ri = HadiUtils.readNhoodOutput(new_path);
        N[i] = ri.nh;
        iter_counter++;

        System.out.println(
                "Nh(" + i + "):\t" + N[i] + "\tGuessed Radius(" + (i - 1) + "):\t" + ri.converged_nodes);

        // Stop when all radii converged.
        if (ri.changed_nodes == 0) {//if( i > 1 && N[i] == N[i-1] ) {
            System.out.println("All the bitstrings converged. Finishing...");
            fs.delete(curbm_path);
            fs.delete(tempbm_path);
            fs.rename(nextbm_path, curbm_path);
            System.out.println("Calculating the effective diameter...");
            JobClient.runJob(configStage4());
            eff_diameter_computed = true;
            break;
        }

        // rotate directory.
        fs.delete(curbm_path);
        fs.delete(tempbm_path);
        if (i < MAX_ITERATIONS - 1)
            fs.delete(output_path);
        fs.rename(nextbm_path, curbm_path);

        cur_radius++;
    }

    if (eff_diameter_computed == false) {
        System.out.println("Calculating the effective diameter...");
        JobClient.runJob(configStage4());
    }

    // Summarize Radius Information
    System.out.println("Summarizing radius information...");
    JobClient.runJob(configStage5());

    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

    // print summary information
    if (i > max_iteration)
        System.out.println("Reached Max Iteartion " + max_iteration);
    System.out.println("Total Iteration = " + iter_counter + ".");

    System.out.println("Neighborhood Summary:");
    for (int j = 0; j <= (i); j++)
        System.out.println("\tNh(" + (j) + "):\t" + N[j]);

    System.out.println("\n[PEGASUS] Radii and diameter computed.");
    System.out.println("[PEGASUS] Maximum diameter: " + (cur_radius - 1));
    System.out.println("[PEGASUS] Average diameter: " + HadiUtils.average_diameter(N, cur_radius - 1));
    System.out.println("[PEGASUS] 90% Effective diameter: " + HadiUtils.effective_diameter(N, cur_radius - 1));
    System.out.println("[PEGASUS] Radii are saved in the HDFS " + radius_path.getName());
    System.out.println("[PEGASUS] Radii summary is saved in the HDFS " + radius_summary_path.getName() + "\n");

    return 0;
}

From source file:pegasus.hadi.HadiBlock.java

License:Apache License

public int run(final String[] args) throws Exception {
    int i;//from w w  w .  ja  va 2s .c o m
    int max_iteration = MAX_ITERATIONS;

    if (args.length != 12) {
        return printUsage();
    }

    edge_path = new Path(args[0]);
    curbm_path = new Path(args[1]);
    tempbm_path = new Path(args[2]);
    nextbm_path = new Path(args[3]);
    output_path = new Path(args[4]);
    number_nodes = Integer.parseInt(args[5]);
    radius_path = new Path("hadi_radius_block");
    radius_summary_path = new Path("hadi_radius_block_summary");
    nreplication = Integer.parseInt(args[6]);
    nreducer = Integer.parseInt(args[7]);

    if (args[8].compareTo("enc") == 0)
        encode_bitmask = 1;

    if (args[9].compareTo("newbm") == 0)
        start_from_newbm = 1;
    else {
        start_from_newbm = 0;
        cur_radius = Integer.parseInt(args[9].substring(4));
    }

    block_width = Integer.parseInt(args[10]);

    if (args[11].compareTo("max") != 0)
        max_iteration = Integer.parseInt(args[11]);

    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing Radii/Diameter using block method. Current hop: " + cur_radius
            + ", edge_path: " + args[0] + ", encode: " + encode_bitmask + ", # reducers: " + nreducer
            + ", block width: " + block_width + ", max_iteration: " + max_iteration + "\n");

    local_output_path = args[4] + number_nodes + "_tempblk";

    N[0] = number_nodes;

    // Iteratively run Stage1 to Stage3.
    for (i = cur_radius; i <= max_iteration; i++) {
        JobClient.runJob(configStage1());
        JobClient.runJob(configStage2());
        JobClient.runJob(configStage3());

        FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

        final FileSystem fs = FileSystem.get(getConf());

        // copy neighborhood information from HDFS to local disk, and read it!
        String new_path = local_output_path + "/" + i;
        fs.copyToLocalFile(output_path, new Path(new_path));
        HadiResultInfo ri = HadiUtils.readNhoodOutput(new_path);
        N[i] = ri.nh;
        iter_counter++;

        System.out.println(
                "Nh(" + i + "):\t" + N[i] + "\tGuessed Radius(" + (i - 1) + "):\t" + ri.converged_nodes);

        // Stop when all radii converged.
        if (ri.changed_nodes == 0) {//if( i > 1 && N[i] == N[i-1] ) {
            System.out.println("All the bitstrings converged. Finishing...");
            fs.delete(curbm_path);
            fs.delete(tempbm_path);
            fs.rename(nextbm_path, curbm_path);
            break;
        }

        // rotate directory
        fs.delete(curbm_path);
        fs.delete(tempbm_path);
        if (i < MAX_ITERATIONS - 1)
            fs.delete(output_path);
        fs.rename(nextbm_path, curbm_path);

        cur_radius++;
    }

    // Summarize Radius Information
    System.out.println("Calculating the effective diameter...");
    JobClient.runJob(configStage4());

    // Summarize Radius Information
    System.out.println("Summarizing radius information...");
    JobClient.runJob(configStage5());

    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

    // print summary information
    if (i > max_iteration)
        System.out.println("Reached Max Iteartion " + max_iteration);
    System.out.println("Total Iteration = " + iter_counter + ".");

    System.out.println("Neighborhood Summary:");
    for (int j = 0; j <= (i); j++)
        System.out.println("\tNh(" + (j) + "):\t" + N[j]);

    System.out.println("\n[PEGASUS] Radii and diameter computed.");
    System.out.println("[PEGASUS] Maximum diameter: " + (cur_radius - 1));
    System.out.println("[PEGASUS] Average diameter: " + HadiUtils.average_diameter(N, cur_radius - 1));
    System.out.println("[PEGASUS] 90% Effective diameter: " + HadiUtils.effective_diameter(N, cur_radius - 1));
    System.out.println("[PEGASUS] Radii are saved in the HDFS " + radius_path.getName());
    System.out.println("[PEGASUS] Radii summary is saved in the HDFS " + radius_summary_path.getName() + "\n");

    return 0;
}

From source file:pegasus.HadiResultInfo.java

License:Apache License

public int run(final String[] args) throws Exception {
    int i;// w w w . ja v  a2 s.  c om
    int max_iteration = MAX_ITERATIONS;

    if (args.length != 12) {
        return printUsage();
    }

    edge_path = new Path(args[0]);
    curbm_path = new Path(args[1]);
    tempbm_path = new Path(args[2]);
    nextbm_path = new Path(args[3]);
    output_path = new Path(args[4]);
    number_nodes = Integer.parseInt(args[5]);
    radius_path = new Path("hadi_radius");
    radius_summary_path = new Path("hadi_radius_summary");
    nreplication = Integer.parseInt(args[6]);
    nreducer = Integer.parseInt(args[7]);

    if (args[8].compareTo("enc") == 0)
        encode_bitmask = 1;

    if (args[9].compareTo("newbm") == 0) {
        start_from_newbm = 1;
    } else if (args[9].startsWith("cont")) {
        start_from_newbm = 0;
        cur_radius = Integer.parseInt(args[9].substring(4));
    }

    if (args[10].compareTo("makesym") == 0)
        make_symmetric = 1;
    else
        make_symmetric = 0;

    if (args[11].compareTo("max") != 0)
        max_iteration = Integer.parseInt(args[11]);

    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing Radii/Diameter. Current hop: " + cur_radius + ", edge_path: "
            + args[0] + ", encode: " + encode_bitmask + ", # reducers: " + nreducer + ", makesym: "
            + make_symmetric + ", max_iteration: " + max_iteration + "\n");

    local_output_path = args[4] + number_nodes + "_temp";

    if (start_from_newbm == 1) {
        System.out.print("Generating initial bitstrings for " + number_nodes + " nodes ");

        // create bitmask generate command file, and copy to curbm_path
        gen_bitmask_cmd_file(number_nodes, nreplication, curbm_path);
        System.out.println(" done");
    } else {
        System.out.println("Resuming from current hadi_curbm which contains up to N(" + (cur_radius - 1) + ")");
    }

    N[0] = number_nodes;

    boolean eff_diameter_computed = false;

    // Iteratively run Stage1 to Stage3.
    for (i = cur_radius; i <= max_iteration; i++) {
        JobClient.runJob(configStage1(edge_type));
        JobClient.runJob(configStage2());
        JobClient.runJob(configStage3());

        FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

        final FileSystem fs = FileSystem.get(getConf());

        // copy neighborhood information from HDFS to local disk, and read it!
        String new_path = local_output_path + "/" + i;
        fs.copyToLocalFile(output_path, new Path(new_path));
        HadiResultInfo ri = HadiUtils.readNhoodOutput(new_path);
        N[i] = ri.nh;
        iter_counter++;

        System.out.println(
                "Nh(" + i + "):\t" + N[i] + "\tGuessed Radius(" + (i - 1) + "):\t" + ri.converged_nodes);

        // Stop when all radii converged.
        if (ri.changed_nodes == 0) {//if( i > 1 && N[i] == N[i-1] ) {
            System.out.println("All the bitstrings converged. Finishing...");
            fs.delete(curbm_path);
            fs.delete(tempbm_path);
            fs.rename(nextbm_path, curbm_path);
            System.out.println("Calculating the effective diameter...");
            JobClient.runJob(configStage4());
            eff_diameter_computed = true;
            break;
        }

        // rotate directory. 
        fs.delete(curbm_path);
        fs.delete(tempbm_path);
        if (i < MAX_ITERATIONS - 1)
            fs.delete(output_path);
        fs.rename(nextbm_path, curbm_path);

        cur_radius++;
    }

    if (eff_diameter_computed == false) {
        System.out.println("Calculating the effective diameter...");
        JobClient.runJob(configStage4());
    }

    // Summarize Radius Information
    System.out.println("Summarizing radius information...");
    JobClient.runJob(configStage5());

    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));

    // print summary information
    if (i > max_iteration)
        System.out.println("Reached Max Iteartion " + max_iteration);
    System.out.println("Total Iteration = " + iter_counter + ".");

    System.out.println("Neighborhood Summary:");
    for (int j = 0; j <= (i); j++)
        System.out.println("\tNh(" + (j) + "):\t" + N[j]);

    System.out.println("\n[PEGASUS] Radii and diameter computed.");
    System.out.println("[PEGASUS] Maximum diameter: " + (cur_radius - 1));
    System.out.println("[PEGASUS] Average diameter: " + HadiUtils.average_diameter(N, cur_radius - 1));
    System.out.println("[PEGASUS] 90% Effective diameter: " + HadiUtils.effective_diameter(N, cur_radius - 1));
    System.out.println("[PEGASUS] Radii are saved in the HDFS " + radius_path.getName());
    System.out.println("[PEGASUS] Radii summary is saved in the HDFS " + radius_summary_path.getName() + "\n");

    return 0;
}

From source file:pegasus.rwr.RWRBlock.java

License:Apache License

public int run(final String[] args) throws Exception {
    if (args.length != 8) {
        return printUsage();
    }//from   w ww.  jav a 2s.c om
    int i;

    edge_path = new Path(args[0]);
    vector_path = new Path(args[1]);
    tempmv_path = new Path("rwr_tempmv_block");
    mv_output_path = new Path("rwr_output_block");
    new_vector_path = new Path("rwr_vector_new");
    query_raw_path = new Path(args[2]);
    number_nodes = Long.parseLong(args[3]);
    nreducers = Integer.parseInt(args[4]);
    niteration = Integer.parseInt(args[5]);
    block_width = Integer.parseInt(args[6]);
    mixing_c = Double.parseDouble(args[7]);

    local_output_path = "rwr_output_temp";

    System.out.println("\n-----===[PEGASUS: A Peta-Scale Graph Mining System]===-----\n");
    System.out.println("[PEGASUS] Computing RWR using block method. Max iteration = " + niteration
            + ", threshold = " + converge_threshold + "\n");

    fs = FileSystem.get(getConf());

    // normalize query
    String[] new_args = new String[4];
    new_args[0] = args[2];
    new_args[1] = "rwr_query_norm";
    new_args[2] = "" + nreducers;
    new_args[3] = "" + (1.0 - mixing_c);
    ToolRunner.run(getConf(), new NormalizeVector(), new_args);

    // block-encode the query
    new_args = new String[7];
    new_args[0] = "rwr_query_norm";
    new_args[1] = "rwr_query_norm_block";
    new_args[2] = "" + number_nodes;
    new_args[3] = "" + block_width;
    new_args[4] = "" + nreducers;
    new_args[5] = "null";
    new_args[6] = "nosym";
    ToolRunner.run(getConf(), new MatvecPrep(), new_args);

    // Iteratively calculate neighborhood function.
    for (i = 0; i < niteration; i++) {
        System.out.println("\n\nITERATION " + (i + 1));

        // v1 <- c*W*v
        JobClient.runJob(configStage1());
        RunningJob job = JobClient.runJob(configStage2());

        // v2 <- v1 + q
        SaxpyBlock(getConf(), nreducers, mv_output_path, query_block_path, new_vector_path, 1.0, block_width);

        // diff = || v2 - vector ||
        SaxpyBlock(getConf(), nreducers, new_vector_path, vector_path, diff_path, -1.0, block_width);

        // compute l1 norm
        new_args = new String[2];
        new_args[0] = diff_path.getName();
        new_args[1] = "" + block_width;

        ToolRunner.run(getConf(), new L1normBlock(), new_args);
        double difference = PegasusUtils.read_l1norm_result(getConf());
        FileSystem lfs = FileSystem.getLocal(getConf());
        lfs.delete(new Path("l1norm"), true);

        System.out.println("difference = " + difference);

        if (difference < converge_threshold) {
            System.out.println("RWR vector converged. Now preparing to finish...");
            fs.delete(vector_path);
            fs.delete(tempmv_path);
            fs.rename(new_vector_path, vector_path);
            break;
        }

        // rotate directory
        fs.delete(vector_path);
        fs.delete(tempmv_path);
        fs.rename(new_vector_path, vector_path);
    }

    if (i == niteration) {
        System.out.println("Reached the max iteration. Now preparing to finish...");
    }

    // unfold the block RWR to plain format
    System.out.println("Unfolding the block RWR to plain format...");
    JobClient.runJob(configStage25());

    // find min/max of RWR
    System.out.println("Finding minimum and maximum RWR scores...");
    JobClient.runJob(configStage3());

    FileUtil.fullyDelete(FileSystem.getLocal(getConf()), new Path(local_output_path));
    String new_path = local_output_path + "/";
    fs.copyToLocalFile(minmax_path, new Path(new_path));

    MinMaxInfo mmi = PagerankNaive.readMinMax(new_path);
    System.out.println("min = " + mmi.min + ", max = " + mmi.max);

    // find distribution of RWR scores
    JobClient.runJob(configStage4(mmi.min, mmi.max));

    System.out.println("\n[PEGASUS] RWR computed.");
    System.out.println("[PEGASUS] The final RWR scores are in the HDFS rwr_vector.");
    System.out.println("[PEGASUS] The minium and maximum RWRs are in the HDFS rwr_minmax.");
    System.out.println("[PEGASUS] The histogram of RWRs in 1000 bins between min_RWR and "
            + "max_RWR are in the HDFS rwr_distr.\n");

    return 0;
}