Example usage for java.lang ThreadLocal ThreadLocal

List of usage examples for java.lang ThreadLocal ThreadLocal

Introduction

In this page you can find the example usage for java.lang ThreadLocal ThreadLocal.

Prototype

public ThreadLocal() 

Source Link

Document

Creates a thread local variable.

Usage

From source file:org.jcf.GraphicObjectHandlerImpl.java

/**
 * ctor to use for a given room and nikname
 * @param nikName room unique nikname /*from   w w  w.j  a  va  2 s.com*/
 * @param room room name
 */
GraphicObjectHandlerImpl(String nikName, String room) {
    Assert.notNull(nikName);
    Assert.notNull(room);

    this.room = room;
    this.nikName = nikName;
    objects = Collections.synchronizedMap(new HashMap<Id, GraphicObject>());
    listener = Collections.synchronizedList(new ArrayList<GraphicObjectEventListener>());
    threadLocalGraphicalMessage = new ThreadLocal<GraphicMessage>();
}

From source file:com.icloud.framework.http.heritrix.ArchiveUtils.java

private static ThreadLocal<SimpleDateFormat> threadLocalDateFormat(final String pattern) {
    ThreadLocal<SimpleDateFormat> tl = new ThreadLocal<SimpleDateFormat>() {
        protected SimpleDateFormat initialValue() {
            SimpleDateFormat df = new SimpleDateFormat(pattern);
            df.setTimeZone(TimeZone.getTimeZone("GMT"));
            return df;
        }/*from  w  w  w  . j av a  2 s  .com*/
    };
    return tl;
}

From source file:org.apache.lens.cube.metadata.UpdatePeriod.java

private static DateFormat getHourlyFormat() {
    if (hourlyFormat == null) {
        hourlyFormat = new ThreadLocal<DateFormat>() {
            @Override/*from  w ww.java 2  s. c  o m*/
            protected SimpleDateFormat initialValue() {
                return new SimpleDateFormat(HOURLY.formatStr());
            }
        };
    }
    return hourlyFormat.get();
}

From source file:org.jumpmind.metl.core.runtime.resource.SftpDirectory.java

public SftpDirectory(Resource resource, String server, Integer port, String user, String password,
        String basePath, Integer connectionTimeout, boolean mustExist) {

    this.server = server;
    this.port = port;
    this.user = user;
    this.password = password;
    this.basePath = basePath;
    this.connectionTimeout = connectionTimeout;
    this.mustExist = mustExist;
    this.threadSession = new ThreadLocal<Session>();
    this.threadChannels = new ThreadLocal<Map<Integer, ChannelSftp>>();
}

From source file:org.archive.util.ArchiveUtils.java

private static ThreadLocal<SimpleDateFormat> threadLocalDateFormat(final String pattern) {
    ThreadLocal<SimpleDateFormat> tl = new ThreadLocal<SimpleDateFormat>() {
        protected SimpleDateFormat initialValue() {
            SimpleDateFormat df = new SimpleDateFormat(pattern, Locale.ENGLISH);
            df.setTimeZone(TimeZone.getTimeZone("GMT"));
            return df;
        }//from  w  w  w  . ja va  2 s . c  om
    };
    return tl;
}

From source file:org.jboss.dashboard.commons.filter.AbstractFilter.java

public AbstractFilter() {
    filterProperties = new ArrayList();
    filterCondition = null;/* w  w  w . j  a  va  2s.com*/
    wildcard = "\u002a";
    gt = "\u003e";
    lt = "\u003c";
    gtOrEq = "\u003e\u003d";
    ltOrEq = "\u003c\u003d";
    locale = Locale.getDefault();
    _filterVarValues = new HashMap();
    _bshIntepreterThread = new ThreadLocal();
}

From source file:com.strategicgains.docussandra.controller.perf.remote.mongo.MongoLoader.java

public static void loadMongoData(MongoClientURI uri, final int NUM_WORKERS, Database database,
        final int numDocs, final PerfTestParent clazz) {
    logger.info("------------Loading Data into: " + database.name() + " with MONGO!------------");
    try {/*from w ww. j  a v a 2s .c o  m*/
        try {
            MongoClient mongoClient = new MongoClient(uri);
            mongoClient.setWriteConcern(WriteConcern.MAJORITY);
            DB db = mongoClient.getDB(database.name());
            final DBCollection coll = db.getCollection(database.name());
            ArrayList<Thread> workers = new ArrayList<>(NUM_WORKERS + 1);
            int docsPerWorker = numDocs / NUM_WORKERS;
            try {
                List<Document> docs = clazz.getDocumentsFromFS();
                ArrayList<List<Document>> documentQueues = new ArrayList<>(NUM_WORKERS + 1);
                int numDocsAssigned = 0;
                while ((numDocsAssigned + 1) < numDocs) {
                    int start = numDocsAssigned;
                    int end = numDocsAssigned + docsPerWorker;
                    if (end > numDocs) {
                        end = numDocs - 1;
                    }
                    documentQueues.add(new ArrayList(docs.subList(start, end)));
                    numDocsAssigned = end;
                }
                for (final List<Document> queue : documentQueues) {
                    workers.add(new Thread() {
                        @Override
                        public void run() {
                            for (Document d : queue) {
                                DBObject o = (DBObject) JSON.parse(d.object());
                                coll.save(o);
                            }
                            logger.info("Thread " + Thread.currentThread().getName() + " is done. It processed "
                                    + queue.size() + " documents.");
                        }
                    });
                }
            } catch (UnsupportedOperationException e)//we can't read everything in at once
            {
                //all we need to do in this block is find a way to set "workers"
                for (int i = 0; i < NUM_WORKERS; i++) {
                    workers.add(new Thread() {
                        private final int chunk = (int) (Math.random() * 100) + 150;//pick a random chunk so we are not going back to the FS all at the same time and potentially causing a bottle neck

                        @Override
                        public void run() {
                            ThreadLocal<Integer> counter = new ThreadLocal<>();
                            counter.set(new Integer(0));
                            try {
                                List<Document> docs = clazz.getDocumentsFromFS(chunk);//grab a handful of documents
                                while (docs.size() > 0) {
                                    for (Document d : docs)//process the documents we grabbed
                                    {
                                        DBObject o = (DBObject) JSON.parse(d.object());
                                        coll.save(o);
                                        counter.set(counter.get() + 1);
                                    }
                                    docs = clazz.getDocumentsFromFS(chunk);//grab another handful of documents
                                }
                                logger.info("Thread " + Thread.currentThread().getName()
                                        + " is done. It processed " + counter.get() + " documents.");
                            } catch (IOException | ParseException e) {
                                logger.error("Couldn't read from document", e);
                            }
                        }
                    });
                }
            }

            long start = new Date().getTime();
            //start your threads!
            for (Thread t : workers) {
                t.start();
            }
            logger.info("All threads started, waiting for completion.");
            boolean allDone = false;
            boolean first = true;
            while (!allDone || first) {
                first = false;
                boolean done = true;
                for (Thread t : workers) {
                    if (t.isAlive()) {
                        done = false;
                        logger.info("Thread " + t.getName() + " is still running.");
                        break;
                    }
                }
                if (done) {
                    allDone = true;
                } else {
                    logger.info("We still have workers running...");
                    try {
                        Thread.sleep(10000);
                    } catch (InterruptedException e) {
                    }
                }
            }
            long end = new Date().getTime();
            long miliseconds = end - start;
            double seconds = (double) miliseconds / 1000d;
            output.info("Done loading data using: " + NUM_WORKERS + ". Took: " + seconds + " seconds");
            double tpms = (double) numDocs / (double) miliseconds;
            double tps = tpms * 1000;
            double transactionTime = (double) miliseconds / (double) numDocs;
            output.info(database.name() + " Mongo Average Transactions Per Second: " + tps);
            output.info(
                    database.name() + " Mongo Average Transactions Time (in miliseconds): " + transactionTime);

        } catch (UnknownHostException e) {
            logger.error("Couldn't connect to Mongo Server", e);
        }
    } catch (IOException | ParseException e) {
        logger.error("Couldn't read data.", e);
    }
}

From source file:org.apache.lens.cube.metadata.UpdatePeriod.java

private static DateFormat getDailyFormat() {
    if (dailyFormat == null) {
        dailyFormat = new ThreadLocal<DateFormat>() {
            @Override//ww w. j  a  v a  2  s  .  c om
            protected SimpleDateFormat initialValue() {
                return new SimpleDateFormat(DAILY.formatStr());
            }
        };
    }
    return dailyFormat.get();
}

From source file:org.xenei.bloomgraph.bloom.sql.DBIO.java

/**
 * Constructor/*from www .j a  va  2 s  .com*/
 * 
 * @param dataSource
 *            The datasource for the connectins.
 * @param sqlCommands
 *            The SQL commands for the database implementation.
 * @throws SQLException
 *             on error.
 */
public DBIO(final DataSource dataSource, final SQLCommands sqlCommands) throws SQLException {
    this.dataSource = dataSource;
    this.statistics = new BloomGraphStatistics();
    this.sqlCommands = sqlCommands;
    this.threadConn = new ThreadLocal<Connection>();
    createSchema();
}

From source file:com.netflix.hollow.jsonadapter.HollowJsonAdapter.java

public HollowJsonAdapter(HollowWriteStateEngine stateEngine, String typeName) {
    super(typeName, "populate");
    this.stateEngine = stateEngine;
    this.hollowSchemas = new HashMap<String, HollowSchema>();
    this.canonicalObjectFieldMappings = new HashMap<String, ObjectFieldMapping>();
    this.passthroughDecoratedTypes = new HashSet<String>();

    for (HollowSchema schema : stateEngine.getSchemas()) {
        hollowSchemas.put(schema.getName(), schema);
        if (schema instanceof HollowObjectSchema)
            canonicalObjectFieldMappings.put(schema.getName(), new ObjectFieldMapping(schema.getName(), this));
    }// w  w w.j  ava  2 s  .  co m

    ////TODO: Special 'passthrough' processing.
    this.passthroughRecords = new ThreadLocal<PassthroughWriteRecords>();
}