Example usage for org.apache.commons.lang RandomStringUtils randomAlphanumeric

List of usage examples for org.apache.commons.lang RandomStringUtils randomAlphanumeric

Introduction

In this page you can find the example usage for org.apache.commons.lang RandomStringUtils randomAlphanumeric.

Prototype

public static String randomAlphanumeric(int count) 

Source Link

Document

Creates a random string whose length is the number of characters specified.

Characters will be chosen from the set of alpha-numeric characters.

Usage

From source file:eu.vital.vitalcep.restApp.alert.Alerts.java

/**
 * Creates a filter.//from w  ww. j a  va 2  s . c o m
 *
 * @param cepico
 * @param req
 * @return the filter id 
 * @throws java.io.IOException 
 */
@PUT
@Path("createalert")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createAlert(String cepico, @Context HttpServletRequest req) throws IOException {

    StringBuilder ck = new StringBuilder();
    Security slogin = new Security();

    JSONObject credentials = new JSONObject();

    Boolean token = slogin.login(req.getHeader("name"), req.getHeader("password"), false, ck);
    credentials.put("username", req.getHeader("name"));
    credentials.put("password", req.getHeader("password"));
    if (!token) {
        return Response.status(Response.Status.UNAUTHORIZED).build();
    }
    this.cookie = ck.toString();

    JSONObject jo = new JSONObject(cepico);
    if (!jo.has("source")) {
        return Response.status(Response.Status.BAD_REQUEST).build();
    }

    MongoClient mongo = new MongoClient(new MongoClientURI(mongoURL));
    MongoDatabase db = mongo.getDatabase(mongoDB);

    try {
        db.getCollection("alerts");
    } catch (Exception e) {
        //System.out.println("Mongo is down");
        mongo.close();
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();

    } finally {
        if (db != null)
            db = null;
        if (mongo != null) {
            mongo.close();
            mongo = null;
        }
    }

    // create an empty query
    BasicDBObject query = new BasicDBObject();
    BasicDBObject fields = new BasicDBObject().append("_id", false);
    fields.append("dolceSpecification", false);

    if (jo.has("dolceSpecification")) {

        //Filter oFilter = new Filter(filter);
        JSONObject dsjo = jo.getJSONObject("dolceSpecification");
        String str = dsjo.toString();//"{\"dolceSpecification\": "+ dsjo.toString()+"}";

        try {

            DolceSpecification ds = new DolceSpecification(str);

            if (ds instanceof DolceSpecification) {
                UUID uuid = UUID.randomUUID();
                String randomUUIDString = uuid.toString();

                String mqin = RandomStringUtils.randomAlphanumeric(8);
                String mqout = RandomStringUtils.randomAlphanumeric(8);

                Date NOW = new Date();

                JSONArray requestArray;

                try {
                    requestArray = createAlertRequests(jo.getJSONArray("source"), ds.getEvents(),
                            getXSDDateTime(NOW));
                } catch (Exception e) {
                    return Response.status(Response.Status.BAD_REQUEST)
                            .entity("not available getObservation Service for this sensor ").build();
                }

                CEP cepProcess = new CEP();

                if (!(cepProcess.CEPStart(CEP.CEPType.ALERT, ds, mqin, mqout, confFile, requestArray.toString(),
                        credentials))) {
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                String clientName = cepProcess.fileName;

                if (cepProcess.PID < 1) {
                    return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                }

                DBObject dbObject = createAlertSensor(cepico, randomUUIDString, dsjo, cepProcess.id);

                Document doc = new Document(dbObject.toMap());

                try {
                    db.getCollection("alerts").insertOne(doc);

                    JSONObject opState = createOperationalStateObservation(randomUUIDString);
                    String sensorId = host + "/sensor/" + randomUUIDString;

                    MessageProcessor_publisher Publisher_MsgProcc = new MessageProcessor_publisher(this.dmsURL,
                            cookie, sensorId, "alertsobservations", mongoURL, mongoDB);//555
                    MQTT_connector_subscriper publisher = new MQTT_connector_subscriper(mqout,
                            Publisher_MsgProcc);
                    MqttConnectorContainer.addConnector(publisher.getClientName(), publisher);

                    DBObject oPut = (DBObject) JSON.parse(opState.toString());
                    Document doc1 = new Document(oPut.toMap());

                    try {
                        db.getCollection("alertsobservations").insertOne(doc1);
                        String id = doc1.get("_id").toString();

                    } catch (MongoException ex) {
                        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
                    }

                    JSONObject aOutput = new JSONObject();
                    aOutput.put("id", host + "/sensor/" + randomUUIDString);
                    return Response.status(Response.Status.OK).entity(aOutput.toString()).build();

                } catch (MongoException ex) {
                    return Response.status(Response.Status.BAD_REQUEST).build();
                }

            } else {

                return Response.status(Response.Status.BAD_REQUEST).build();
            }
        } catch (JSONException | IOException e) {
            return Response.status(Response.Status.BAD_REQUEST).build();
        }
    }

    return Response.status(Response.Status.BAD_REQUEST).build();

}

From source file:eu.vital.vitalcep.collector.Collector.java

private Collector() throws IOException {

    ConfigReader configReader = ConfigReader.getInstance();

    mongoURL = configReader.get(ConfigReader.MONGO_URL);
    mongoDB = configReader.get(ConfigReader.MONGO_DB);

    getCollectorList();// www .  j  av  a2s.c  o  m

    ScheduledExecutorService exec = Executors.newScheduledThreadPool(2);

    Runnable collectoRunnable;
    collectoRunnable = new Runnable() {
        @Override
        public void run() {

            if (sensors.length() > 0) {
                mongo = new MongoClient(new MongoClientURI(mongoURL));
                db = mongo.getDatabase(mongoDB);
            }

            Date NOW = new Date();
            String nowString = getXSDDateTime(NOW);
            for (int i = 0; i < sensors.length(); i++) {
                try {
                    String cookie = getListenerCredentials(i);

                    JSONArray aData = new JSONArray();
                    String type = sensors.getJSONObject(i).getString("cepType");
                    if (type.equals("CONTINUOUS")) {
                        try {
                            DMSListener oDMS = new DMSListener(cookie);

                            aData = oDMS.getObservations(sensors.getJSONObject(i).getJSONArray("sources"),
                                    sensors.getJSONObject(i).getJSONArray("properties"),
                                    sensors.getJSONObject(i).getString("lastRequest"));

                        } catch (IOException | KeyManagementException | NoSuchAlgorithmException
                                | KeyStoreException ex) {
                            java.util.logging.Logger.getLogger(Collector.class.getName()).log(Level.SEVERE,
                                    null, ex);
                        }

                        if (aData.length() > 0) {
                            sendData2CEP(aData, i);
                        }

                    } else {
                        try {

                            JSONObject sensor = new JSONObject();
                            sensor = sensors.getJSONObject(i);
                            JSONArray requests = new JSONArray();
                            requests = sensor.getJSONArray("requests");
                            PPIListener oPPI = new PPIListener(cookie);

                            aData = oPPI.getObservations(requests, sensor.getString("lastRequest"));

                            if (aData.length() > 0) {
                                sendData2CEP(aData, i);
                            }

                        } catch (IOException | KeyManagementException | NoSuchAlgorithmException
                                | KeyStoreException ex) {
                            java.util.logging.Logger.getLogger(Collector.class.getName()).log(Level.SEVERE,
                                    null, ex);
                        }

                    }

                    sensors.getJSONObject(i).put("lastRequest", nowString);
                    if (mongo == null)
                        mongo = new MongoClient(new MongoClientURI(mongoURL));
                    if (db == null)
                        db = mongo.getDatabase(mongoDB);

                    Bson filter = Filters.eq("_id", new ObjectId(sensors.getJSONObject(i).getString("id")));

                    Bson update = new Document("$set", new Document("lastRequest", nowString));

                    UpdateOptions options = new UpdateOptions().upsert(false);

                    UpdateResult updateDoc = db.getCollection("cepinstances").updateOne(filter, update,
                            options);

                } catch (GeneralSecurityException | IOException | ParseException ex) {
                    java.util.logging.Logger.getLogger(Collector.class.getName()).log(Level.SEVERE, null, ex);
                } finally {
                    if (db != null)
                        db = null;
                    if (mongo != null) {
                        mongo.close();
                        mongo = null;
                    }
                }

            }
            if (db != null)
                db = null;
            if (mongo != null) {
                mongo.close();
                mongo = null;
            }
        }

        private void sendData2CEP(JSONArray aData, int i) throws JSONException, ParseException {
            Decoder decoder = new Decoder();
            ArrayList<String> simpleEventAL = decoder.JsonldArray2DolceInput(aData);
            MqttAllInOne oMqtt = new MqttAllInOne();
            TMessageProc MsgProcc = new TMessageProc();

            //TODO: check the client name. see from cep instances and what happen when if the topic exist 
            String clientName = "collector_" + RandomStringUtils.randomAlphanumeric(4);

            oMqtt.sendMsg(MsgProcc, clientName, simpleEventAL, sensors.getJSONObject(i).getString("mqin"),
                    sensors.getJSONObject(i).getString("mqout"), true);

        }

        private String getListenerCredentials(int i)
                throws IOException, GeneralSecurityException, JSONException {
            StringBuilder ck = new StringBuilder();
            Security slogin = new Security();
            JSONObject credentials = new JSONObject();
            //                Boolean token = slogin.login(sensors.getJSONArray(i)
            //                        .getJSONObject(0)
            //                        .getString("username")
            //                        ,decrypt(sensors.getJSONArray(i)
            //                                .getJSONObject(0)
            //                                .getString("password")),false,ck);
            Boolean token = slogin.login("elisa", "elisotas1", false, ck);
            if (!token) {
                //throw new

            }
            String cookie = ck.toString();
            return cookie;
        }
    };

    exec.scheduleAtFixedRate(collectoRunnable, 0, 10, TimeUnit.SECONDS);

}

From source file:com.blackberry.logtools.logcat.java

public int run(String[] argv) throws Exception {
    //Configuring configuration and filesystem to work on HDFS
    final Configuration conf = getConf(); //Configuration processed by ToolRunner
    FileSystem fs = FileSystem.get(conf);
    //Initiate tools used for running search
    LogTools tools = new LogTools();

    //Other options
    String date_format = "RFC5424";
    String field_separator = "";
    ArrayList<String> D_options = new ArrayList<String>();
    boolean quiet = true;
    boolean silent = false;
    boolean log = false;
    boolean forcelocal = false;
    boolean forceremote = false;

    //The arguments are 
    // - dc number
    // - service/*from   w w  w  . j av a 2 s  .c  om*/
    // - component
    // - startTime (Something 'date' can parse, or just a time in ms from epoch)
    // - endTime (Same as start)
    // - outputDir

    //Indexing for arguments to be passed for Mapreduce
    int dcNum = 0;
    int svcNum = 1;
    int compNum = 2;
    int startNum = 3;
    int endNum = 4;
    int outNum = 5;

    //Parsing through user arguments
    String[] args = new String[6];
    int count = 0; //Count created to track the parse of all arguments
    int argcount = 0; //Count created to track number of arguments to be passed on
    while (count < argv.length) {
        String arg = argv[count];
        count++;
        if (arg.equals("--")) {
            break;
        } else if (arg.startsWith("-")) {
            if (arg.equals("--v")) {
                quiet = tools.parseV(silent);
            } else if (arg.startsWith("--dateFormat=")) {
                arg = arg.replace("--dateFormat=", "");
                date_format = arg;
            } else if (arg.startsWith("--fieldSeparator=")) {
                arg = arg.replace("--fieldSeparator=", "");
                field_separator = arg;
            } else if (arg.startsWith("-dc=")) {
                arg = arg.replace("-dc=", "");
                args[dcNum] = arg;
                argcount++;
            } else if (arg.startsWith("-svc=")) {
                arg = arg.replace("-svc=", "");
                args[svcNum] = arg;
                argcount++;
            } else if (arg.startsWith("-comp=")) {
                arg = arg.replace("-comp=", "");
                args[compNum] = arg;
                argcount++;
            } else if (arg.startsWith("-start=")) {
                arg = arg.replace("-start=", "");
                args[startNum] = arg;
                argcount++;
            } else if (arg.startsWith("-end=")) {
                arg = arg.replace("-end=", "");
                args[endNum] = arg;
                argcount++;
            } else if (arg.startsWith("--out=")) {
                args[outNum] = tools.parseOut(arg, fs);
                argcount++;
            } else if (arg.startsWith("-D")) {
                D_options.add(arg);
            } else if (arg.equals("--silent")) {
                silent = tools.parseSilent(quiet);
            } else if (arg.equals("--log")) {
                log = true;
            } else if (arg.equals("--l")) {
                forcelocal = tools.parsePigMode(forceremote);
            } else if (arg.equals("--r")) {
                forceremote = tools.parsePigMode(forcelocal);
            } else {
                LogTools.logConsole(quiet, silent, error, "Unrecognized option: " + arg);
                System.exit(1);
            }
        } else {
            LogTools.logConsole(quiet, silent, error, "Unrecognized option: " + arg);
            System.exit(1);
        }
    }

    //Default output should be stdout represented by "-"
    if (args[outNum] == null) {
        args[outNum] = "-";
        argcount++;
        LogTools.logConsole(quiet, silent, info, "Output set to default stdout.");
    }

    if (argcount < 6) {
        System.err.println(";****************************************" + "\n\t\t\t NOT ENOUGH ARGUMENTS\n"
                + "\n\tUSAGE: logcat [REQUIRED ARGUMENTS] [OPTIONS] (Order does not matter)"
                + "\n\tREQUIRED ARGUMENTS:" + "\n\t\t-dc=[DATACENTER]   Data Center."
                + "\n\t\t-svc=[SERVICE]      Service." + "\n\t\t-comp=[COMPONENT]   Component."
                + "\n\t\t-start=[START]      Start time." + "\n\t\t-end=[END]      End time." + "\n\tOptions:"
                + "\n\t\t--out=[DIRECTORY]         Desired output directory. If not defined, output to stdout."
                + "\n\t\t--v                     Verbose output."
                + "\n\t\t--r                     Force remote sort."
                + "\n\t\t--l                     Force local sort."
                + "\n\t\t--dateFormat=[FORMAT]     Valid formats are RFC822, RFC3164 (zero padded day),"
                + "\n\t                          RFC5424 (default), or any valid format string for FastDateFormat."
                + "\n\t\t--fieldSeparator=X      The separator to use to separate fields in intermediate"
                + "\n\t                             files.  Defaults to 'INFORMATION SEPARATOR ONE' (U+001F)."
                + "\n\t\t--silent      Output only the data." + "\n\t\t--log              Save all the logs.\n"
                + ";****************************************");
        System.exit(1);
    }

    //Parse time inputs for start and end of search
    args[startNum] = tools.parseDate(args[startNum]);
    args[endNum] = tools.parseDate(args[endNum]);
    tools.checkTime(args[startNum], args[endNum]);

    //Retrieve 'out' argument to determine where output of results should be sent
    String out = args[outNum];

    //Generate files to temporarily store output of mapreduce jobs and pig logs locally                 
    File local_output = File.createTempFile("tmp.", RandomStringUtils.randomAlphanumeric(10));
    if (log != true) {
        local_output.deleteOnExit();
    }
    File pig_tmp = File.createTempFile("tmp.", RandomStringUtils.randomAlphanumeric(10));
    if (log != true) {
        pig_tmp.deleteOnExit();
    }

    //Name the temp directory for storing results in HDFS
    String tmp = "tmp/logcat-" + RandomStringUtils.randomAlphanumeric(10);

    //Set args[outNum] to be temp output directory to be passed onto CatByTime instead of UserInput argument
    args[outNum] = (StringEscapeUtils.escapeJava(tmp) + "/rawlines");

    //Managing console output - deal with --v/--silent
    Logger LOG = LoggerFactory.getLogger(logcat.class);
    tools.setConsoleOutput(local_output, quiet, silent);

    //Create temp directory in HDFS to store logsearch logs before sorting
    tools.tmpDirHDFS(quiet, silent, fs, conf, tmp, log);

    LogTools.logConsole(quiet, silent, warn, "Gathering logs...");
    LogTools.logConsole(quiet, silent, warn,
            "Passing Arguments: DC=" + args[dcNum] + " Service=" + args[svcNum] + " Component=" + args[compNum]
                    + " StartTime=" + args[startNum] + " EndTime=" + args[endNum] + " Output=" + out);

    //Set standard configuration for running Mapreduce and PIG
    String queue_name = "logsearch";

    //Start Mapreduce job
    tools.runMRJob(quiet, silent, conf, D_options, out, LOG, field_separator, queue_name, args, "CatByTime",
            new CatByTime());

    //Before sorting, determine the number of records and size of the results found
    long foundresults = tools.getResults(local_output);
    long size = tools.getSize(foundresults, tmp, fs);

    //Run PIG job if results found
    tools.runPig(silent, quiet, foundresults, size, tmp, out, D_options, queue_name, date_format,
            field_separator, pig_tmp, fs, conf, forcelocal, forceremote);

    //Display location of tmp files if log enabled
    tools.logs(log, local_output, pig_tmp, tmp);

    return 0;
}

From source file:com.xtructure.xutil.valid.UTestValidateUtils.java

public void validateArgWithOnePredicateBehavesAsExpected() {
    String argName = RandomStringUtils.randomAlphanumeric(10);
    boolean valid = validateArg(argName, new Object(), isNotNull());
    if (!valid) {
        throw new AssertionError();
    }/*  w w  w.  j a v  a  2 s .  co m*/
    try {
        validateArg(argName, null, isNotNull());
    } catch (IllegalArgumentException e) {
        if (!String.format("%s (%s): %s", argName, null, isNotNull()).equals(e.getMessage())) {
            throw new AssertionError();
        }
    }
}

From source file:massbank.BatchSearchWorker.java

public void run() {
    File attacheDir = null;/*from  w ww.  jav  a  2s  .com*/
    try {
        // wWu?"Rinning"
        JobManager jobMgr = new JobManager();
        jobMgr.setRunning(this.jobId);

        GetConfig conf = new GetConfig(MassBankEnv.get(MassBankEnv.KEY_BASE_URL));
        this.serverUrl = conf.getServerUrl();

        String tempDir = MassBankEnv.get(MassBankEnv.KEY_TOMCAT_TEMP_PATH);
        File temp = File.createTempFile("batchRes", ".txt");
        String queryFilePath = (!this.fileName.equals("")) ? tempDir + this.fileName : "";
        String resultFilePath = (!temp.getName().equals("")) ? tempDir + temp.getName() : "";

        // ** open temporary file
        File f1 = new File(queryFilePath);
        File f2 = new File(resultFilePath);
        BufferedReader in = new BufferedReader(new FileReader(f1));
        this.writer = new PrintWriter(new BufferedWriter(new FileWriter(f2)));
        String line = "";
        String name = "";
        String peak = "";
        int peakLineCnt = 0;
        ArrayList<String> names = new ArrayList<String>();
        ArrayList<String> peaks = new ArrayList<String>();
        while ((line = in.readLine()) != null) {
            line = line.trim();

            // R?g?sXLbv
            if (line.startsWith("//")) {
                continue;
            }
            // NAME^O
            else if (line.matches("^Name:.*")) {
                name = line.replaceFirst("^Name: *", "").trim();
            } else if (line.matches(".*:.*")) {
            } else if (line.equals("")) {
                if (peakLineCnt > 0) {
                    names.add(name);
                    peaks.add(peak);
                    name = "";
                    peak = "";
                    peakLineCnt = 0;
                }
            } else {
                peak += line;
                if (!line.substring(line.length() - 1).equals(";")) {
                    peak += ";";
                }
                peakLineCnt++;
            }
        }
        in.close();
        if (peakLineCnt > 0) {
            names.add(name);
            peaks.add(peak);
        }

        // ???
        for (int i = 0; i < names.size(); i++) {
            boolean ret = doSearch(names.get(i), peaks.get(i), i);
            // Xbh?I
            if (isTerminated) {
                break;
            }
        }
        writer.flush();
        writer.close();

        if (isTerminated) {
            f2.delete();
            return;
        }

        if (!this.mailAddress.equals("")) {
            // ??[?M???
            SendMailInfo info = new SendMailInfo(MassBankEnv.get(MassBankEnv.KEY_BATCH_SMTP),
                    MassBankEnv.get(MassBankEnv.KEY_BATCH_FROM), this.mailAddress);
            info.setFromName(MassBankEnv.get(MassBankEnv.KEY_BATCH_NAME));
            info.setSubject("MassBank Batch Service Results");
            info.setContents("Dear Users,\n\nThank you for using MassBank Batch Service.\n" + "\n"
                    + "The results for your request dated '" + this.time + "' are attached to this e-mail.\n"
                    + "\n" + "----------------------------------------------\n"
                    + "MassBank - High Quality Mass Spectral Database\n" + "  URL: " + serverUrl + "\n"
                    + "  E-mail: " + MassBankEnv.get(MassBankEnv.KEY_BATCH_FROM));

            // Ytt@C??fBNg
            attacheDir = new File(tempDir + "batch_" + RandomStringUtils.randomAlphanumeric(9));
            while (attacheDir.exists()) {
                attacheDir = new File(tempDir + "batch_" + RandomStringUtils.randomAlphanumeric(9));
            }
            attacheDir.mkdir();

            // Ytt@C???ieLXg`?j
            String dirPath = attacheDir.getPath();
            String textFilePath = dirPath + "/results.txt";
            String textZipPath = dirPath + "/results.zip";
            File textFile = new File(textFilePath);
            textFile.createNewFile();
            createTextFile(f2, textFile);
            FileUtil.makeZip(textZipPath, textFilePath);

            // T}??
            String summaryFilePath = dirPath + "/summary.html";
            String summaryZipPath = dirPath + "/summary.zip";
            File summaryFile = new File(summaryFilePath);
            summaryFile.createNewFile();
            createSummary(f2, summaryFile);
            FileUtil.makeZip(summaryZipPath, summaryFilePath);
            info.setFiles(new File[] { new File(summaryZipPath), new File(textZipPath) });

            // ??[?M
            SendMail.send(info);
        }

        // ?Zbg
        jobMgr.setResult(this.jobId, resultFilePath);

        // wWu?"Completed"
        jobMgr.setCompleted(this.jobId);

        // NGt@C?t@C??
        f1.delete();
        f2.delete();
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (attacheDir != null && attacheDir.isDirectory()) {
            try {
                FileUtils.forceDelete(attacheDir);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:com.flexive.tests.disttools.DistPackageTest.java

private File createTempDir() {
    final String tempDir = System.getProperty("java.io.tmpdir");
    Assert.assertTrue(StringUtils.isNotBlank(tempDir));
    final File dir = new File(
            tempDir + File.separator + "flexive-dist-tests-temp-" + RandomStringUtils.randomAlphanumeric(32));
    dir.deleteOnExit();/* w  w  w . j a  v  a2 s .  c o m*/
    dir.mkdirs();
    return dir;
}

From source file:apim.restful.importexport.APIService.java

/**
 * This is the service which is used to import an API. All relevant API data will be included upon the creation of
 * the API. Depending on the choice of the user, provider of the imported API will be preserved or modified.
 *
 * @param uploadedInputStream uploadedInputStream input stream from the REST request
 * @param defaultProviderStatus     user choice to keep or replace the API provider
 * @param httpHeaders         HTTP headers for the authentication mechanism
 * @return response for the API process/*from w ww  . jav  a 2  s  .  co m*/
 */
@POST
@Path("/import-api")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
public Response importAPI(@Multipart("file") InputStream uploadedInputStream,
        @QueryParam("preserveProvider") String defaultProviderStatus, @Context HttpHeaders httpHeaders) {

    boolean isProviderPreserved = true;

    //Check if the URL parameter value is specified, otherwise the default value "true" is used
    if (APIImportExportConstants.STATUS_FALSE.equalsIgnoreCase(defaultProviderStatus)) {
        isProviderPreserved = false;
    }

    try {
        Response authorizationResponse = AuthenticatorUtil.authorizeUser(httpHeaders);

        //Process continues only if the user is authorized
        if (Response.Status.OK.getStatusCode() == authorizationResponse.getStatus()) {

            String currentUser = AuthenticatorUtil.getAuthenticatedUserName();
            APIImportUtil.initializeProvider(currentUser);

            //Temporary directory is used to create the required folders
            String currentDirectory = System.getProperty(APIImportExportConstants.TEMP_DIR);
            String createdFolders = File.separator
                    + RandomStringUtils.randomAlphanumeric(APIImportExportConstants.TEMP_FILENAME_LENGTH)
                    + File.separator;
            File importFolder = new File(currentDirectory + createdFolders);
            boolean folderCreateStatus = importFolder.mkdirs();

            //API import process starts only if the required folder is created successfully
            if (folderCreateStatus) {

                String uploadFileName = APIImportExportConstants.UPLOAD_FILE_NAME;
                String absolutePath = currentDirectory + createdFolders;
                APIImportUtil.transferFile(uploadedInputStream, uploadFileName, absolutePath);

                String extractedFolderName = APIImportUtil
                        .extractArchive(new File(absolutePath + uploadFileName), absolutePath);

                APIImportUtil.importAPI(absolutePath + extractedFolderName, currentUser, isProviderPreserved);

                importFolder.deleteOnExit();
                return Response.status(Status.CREATED).entity("API imported successfully.\n").build();
            } else {
                return Response.status(Status.BAD_REQUEST).build();
            }
        } else {
            return Response.status(Status.UNAUTHORIZED).entity("Not authorized to import API.\n").build();
        }
    } catch (APIExportException e) {
        return Response.status(Status.INTERNAL_SERVER_ERROR).entity("Error in initializing API provider.\n")
                .build();
    } catch (APIImportException e) {
        String errorDetail = new Gson().toJson(e.getErrorDescription());
        return Response.serverError().entity(errorDetail).build();
    }
}

From source file:com.google.cloud.bigtable.hbase.TestPut.java

@Test(expected = RetriesExhaustedWithDetailsException.class)
@Category(KnownGap.class)
public void testIOExceptionOnFailedPut() throws Exception {
    Table table = getConnection().getTable(TABLE_NAME);
    byte[] rowKey = Bytes.toBytes("testrow-" + RandomStringUtils.randomAlphanumeric(8));
    byte[] badfamily = Bytes.toBytes("badcolumnfamily-" + RandomStringUtils.randomAlphanumeric(8));
    byte[] qualifier = Bytes.toBytes("testQualifier-" + RandomStringUtils.randomAlphanumeric(8));
    byte[] value = Bytes.toBytes("testValue-" + RandomStringUtils.randomAlphanumeric(8));
    Put put = new Put(rowKey);
    put.addColumn(badfamily, qualifier, value);
    table.put(put);//from w  w w  . j a v a 2s. c o m
}

From source file:com.ning.metrics.collector.hadoop.processing.LocalSpoolManager.java

/**
 * Get the full file path in HDFS/*from   www  .ja  va2s.  c  o m*/
 * <p/>
 * The Hadoop fileName includes a number of things to avoid collisions:
 * ip  to avoid conflicts between machines
 * fileExtension  (serialization-type-specific) so multiple file extensions can be written to same directory
 * flushCount  so the same queue can write multiple times
 * queueCreationTimestamp  so if, for instance, we shut down and restart the collector within an hour, their writes won't conflict
 *
 * @param flushCount number of flushes for this queue
 * @return output path for the spool in HDFS
 */
public String toHadoopPath(final int flushCount) {
    return String.format("%s/%s-%d-%s-%s-f%d.%s", hdfsDir, config.getLocalIp(), config.getLocalPort(),
            RandomStringUtils.randomAlphanumeric(4), dateFormatter.print(timeStamp), flushCount,
            serializationType.getFileSuffix());
}

From source file:fr.mby.portal.coreimpl.session.MemorySessionManager.java

/**
 * Generate a Portal session Id.//from  w  w  w. ja  va2  s . c o  m
 * 
 * @param request
 * @return the generated portal session Id
 */
protected String genSessionId(final HttpServletRequest request) {
    String portalSessionId = null;

    do {
        // Generate Id while we don't generate a new one
        portalSessionId = RandomStringUtils.randomAlphanumeric(16);
    } while (this.generatedSessionIds.contains(portalSessionId));

    this.generatedSessionIds.add(portalSessionId);

    return portalSessionId;
}