Example usage for java.lang Thread getContextClassLoader

List of usage examples for java.lang Thread getContextClassLoader

Introduction

In this page you can find the example usage for java.lang Thread getContextClassLoader.

Prototype

@CallerSensitive
public ClassLoader getContextClassLoader() 

Source Link

Document

Returns the context ClassLoader for this thread.

Usage

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param info/*from w ww . j  a v a 2s.co m*/
 * @param logKey
 * @param logEdit
 * @return true if default behavior should be bypassed, false otherwise
 * @throws IOException
 */
public boolean preWALRestore(final HRegionInfo info, final HLogKey logKey, final WALEdit logEdit)
        throws IOException {
    boolean bypass = false;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                ((RegionObserver) env.getInstance()).preWALRestore(ctx, info, logKey, logEdit);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param fs fileystem to read from/*  ww  w.  j  av a 2  s.com*/
 * @param p path to the file
 * @param in {@link FSDataInputStreamWrapper}
 * @param size Full size of the file
 * @param cacheConf
 * @param r original reference file. This will be not null only when reading a split file.
 * @param reader the base reader instance
 * @return The reader to use
 * @throws IOException
 */
public StoreFile.Reader postStoreFileReaderOpen(final FileSystem fs, final Path p,
        final FSDataInputStreamWrapper in, final long size, final CacheConfig cacheConf, final Reference r,
        StoreFile.Reader reader) throws IOException {
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                reader = ((RegionObserver) env.getInstance()).postStoreFileReaderOpen(ctx, fs, p, in, size,
                        cacheConf, r, reader);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return reader;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * Called prior to rewriting the store files selected for compaction
 * @param store the store being compacted
 * @param scanner the scanner used to read store data during compaction
 * @param scanType type of Scan/*from ww  w. ja v  a  2 s. c om*/
 * @param request the compaction that will be executed
 * @throws IOException
 */
public InternalScanner preCompact(final Store store, final InternalScanner scanner, final ScanType scanType,
        final CompactionRequest request) throws IOException {
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    boolean bypass = false;
    InternalScanner s = scanner;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                s = ((RegionObserver) env.getInstance()).preCompact(ctx, store, s, scanType, request);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? null : s;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param s the scanner/*from w w  w.ja v  a  2  s  . c  o  m*/
 * @param results the result set returned by the region server
 * @param limit the maximum number of results to return
 * @return 'has next' indication to client if bypassing default behavior, or
 * null otherwise
 * @exception IOException Exception
 */
public Boolean preScannerNext(final InternalScanner s, final List<Result> results, final int limit)
        throws IOException {
    boolean bypass = false;
    boolean hasNext = false;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                hasNext = ((RegionObserver) env.getInstance()).preScannerNext(ctx, s, results, limit, hasNext);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? hasNext : null;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param fs fileystem to read from//ww w  .j a  v a 2 s  . co  m
 * @param p path to the file
 * @param in {@link FSDataInputStreamWrapper}
 * @param size Full size of the file
 * @param cacheConf
 * @param r original reference file. This will be not null only when reading a split file.
 * @return a Reader instance to use instead of the base reader if overriding
 * default behavior, null otherwise
 * @throws IOException
 */
public StoreFile.Reader preStoreFileReaderOpen(final FileSystem fs, final Path p,
        final FSDataInputStreamWrapper in, final long size, final CacheConfig cacheConf, final Reference r)
        throws IOException {
    StoreFile.Reader reader = null;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                reader = ((RegionObserver) env.getInstance()).preStoreFileReaderOpen(ctx, fs, p, in, size,
                        cacheConf, r, reader);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return reader;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param row row to check/*  w  w  w  . j  av  a  2 s  . c o m*/
 * @param family column family
 * @param qualifier column qualifier
 * @param compareOp the comparison operation
 * @param comparator the comparator
 * @param put data to put if check succeeds
 * @return true or false to return to client if default processing should
 * be bypassed, or null otherwise
 * @throws IOException e
 */
public Boolean preCheckAndPut(final byte[] row, final byte[] family, final byte[] qualifier,
        final CompareOp compareOp, final ByteArrayComparable comparator, final Put put) throws IOException {
    boolean bypass = false;
    boolean result = false;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                result = ((RegionObserver) env.getInstance()).preCheckAndPut(ctx, row, family, qualifier,
                        compareOp, comparator, put, result);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? result : null;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param row row to check// w  w w  .  ja v  a  2 s.com
 * @param family column family
 * @param qualifier column qualifier
 * @param compareOp the comparison operation
 * @param comparator the comparator
 * @param delete delete to commit if check succeeds
 * @return true or false to return to client if default processing should
 * be bypassed, or null otherwise
 * @throws IOException e
 */
public Boolean preCheckAndDelete(final byte[] row, final byte[] family, final byte[] qualifier,
        final CompareOp compareOp, final ByteArrayComparable comparator, final Delete delete)
        throws IOException {
    boolean bypass = false;
    boolean result = false;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                result = ((RegionObserver) env.getInstance()).preCheckAndDelete(ctx, row, family, qualifier,
                        compareOp, comparator, delete, result);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? result : null;
}

From source file:architecture.ee.plugin.impl.PluginManagerImpl.java

public List loadPlugin(File pluginDir, PluginEntityObject pluginDbBean) throws PluginException {

    if (!ApplicationHelper.isSetupComplete()) {
        return Collections.emptyList();
    }/*  w  w  w. jav  a  2  s  .c o  m*/

    log.debug((new StringBuilder()).append("Loading action from: ").append(pluginDir.getName()).toString());
    Document pluginXML;
    try {
        pluginXML = PluginUtils.getPluginConfiguration(pluginDir);
    } catch (DocumentException e) {
        pluginXML = null;
    }

    if (pluginXML == null) {
        String msg = (new StringBuilder()).append("Plugin ").append(pluginDir.getName())
                .append(" could not be loaded: no plugin.xml file found").toString();
        log.error(msg);
        brokenPlugins.put(pluginDir.getName(), "No plugin.xml found.");
        throw new PluginException(msg);
    }

    ArrayList results = Lists.newArrayList();

    String pluginName;
    PluginClassLoader pluginLoader;

    Node pluginNameNode = pluginXML.selectSingleNode("/plugin/name");

    pluginName = pluginNameNode.getText();

    isValidVersion(pluginName, pluginXML, pluginDir);

    pluginLoader = getPluginClassloader(pluginName, pluginDir);
    if (pluginLoader == null) {
        return Collections.emptyList();
    }
    pluginLoader.initialize();
    log.debug("Plugin classloader urls:" + pluginLoader.getURLS());

    Plugin plugin;
    PluginMetaDataImpl metaData;
    ConfigurationContext context;
    Thread currentThread;
    ClassLoader oldLoader;

    Node classNode = pluginXML.selectSingleNode("/plugin/class");
    if (classNode != null) {
        String className = classNode.getText();
        try {
            log.debug("Plugin class:" + className);
            plugin = (Plugin) pluginLoader.loadClass(className).newInstance();
            log.debug("Plugin object:" + plugin);
            log.debug("******************************** ");
        } catch (Throwable e) {
            brokenPlugins.put(pluginDir.getName(), "Failed to configure class loader.");
            log.debug(e);
            throw new PluginException(e);
        }
    } else {
        plugin = new DummyPlugin(pluginName);
    }
    log.debug("===============================1============");
    metaData = new PluginMetaDataImpl(plugin, pluginLoader, this, pluginXML, pluginDir);
    log.debug("=========================2==================");
    metaData.setPluginDbBean(pluginDbBean);
    log.debug("=======================3====================");
    registerPlugin(plugin, pluginDir);
    log.debug("=======================4====================");
    pluginMeta.put(pluginName, metaData);
    log.debug("======================5=====================");
    pluginMeta.put(plugin, metaData);
    log.debug("====================6=======================");
    context = new ConfigurationContext(metaData);
    log.debug("=======================7====================");
    currentThread = Thread.currentThread();
    oldLoader = currentThread.getContextClassLoader();

    log.debug("===========================================");
    try {
        currentThread.setContextClassLoader(pluginLoader.getClassLoader());

        log.debug("Plugin configures:" + configurators.size());

        for (PluginConfigurator configurator : configurators) {
            log.debug("Plugin configure:" + configurator.getClass().getName());
            configurator.configure(context);
        }
    } catch (Exception e) {
        brokenPlugins.put(pluginDir.getName(), "Failed to configure class loader.");
        throw new PluginException(e);
    } finally {
        if (oldLoader != null)
            currentThread.setContextClassLoader(oldLoader);
    }

    log.debug("===========================================");

    int pluginDbVersion = getPluginDatabaseVersion(metaData);

    boolean init = true;
    if (pluginDbVersion > 0 && metaData.getDatabaseVersion() != pluginDbVersion) {
        brokenPlugins.put(pluginDir.getName(),
                (new StringBuilder()).append("Database version mismatches plugin version. Current: ")
                        .append(pluginDbVersion).append(", Required: ").append(metaData.getDatabaseVersion())
                        .toString());
        init = false;
    }
    if (init) {
        try {
            plugin.init();
            firePluginCreatedEvent(pluginDir.getName(), plugin);
        } catch (IncompatibleClassChangeError e) {
            log.error((new StringBuilder()).append("Unable to initialize plugin, plugin ").append(pluginName)
                    .append(" binds to an old class version needs to be required.").toString());
            results.add(PluginRequiresRebuildResult.getPluginRequiresRebuildResult());
            brokenPlugins.put(pluginDir.getName(), "Failed to initialize.");
        }
        results.addAll(context.getResults());
        ChainingClassLoader.clearCache();
    }
    return results;
}

From source file:org.apache.hadoop.hive.ql.exec.Utilities.java

/**
 * remove elements from the classpath.//from   w w w.  ja v a  2  s.  c o m
 *
 * @param pathsToRemove
 *          Array of classpath elements
 */
public static void removeFromClassPath(String[] pathsToRemove) throws IOException {
    Thread curThread = Thread.currentThread();
    URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
    Set<URL> newPath = new HashSet<URL>(Arrays.asList(loader.getURLs()));

    for (String onestr : pathsToRemove) {
        URL oneurl = urlFromPathString(onestr);
        if (oneurl != null) {
            newPath.remove(oneurl);
        }
    }
    JavaUtils.closeClassLoader(loader);
    // This loader is closed, remove it from cached registry loaders to avoid removing it again.
    Registry reg = SessionState.getRegistry();
    if (reg != null) {
        reg.removeFromUDFLoaders(loader);
    }

    loader = new UDFClassLoader(newPath.toArray(new URL[0]));
    curThread.setContextClassLoader(loader);
    SessionState.get().getConf().setClassLoader(loader);
}

From source file:com.jaspersoft.jasperserver.api.engine.jasperreports.service.impl.EngineServiceImpl.java

protected OrigContextClassLoader setContextClassLoader(ExecutionContext context, Map unitResources,
        boolean inMemoryUnit, RepositoryContextHandle repositoryContextHandle) {
    Thread thread = Thread.currentThread();
    ClassLoader origClassLoader = thread.getContextClassLoader();
    ClassLoader jarsClassLoader;//  w  ww  .ja  v a 2s. c  om
    ClassLoader newClassLoader = null;

    List jarFiles = getJarFiles(context, unitResources, !inMemoryUnit);
    if (jarFiles.isEmpty()) {
        jarsClassLoader = origClassLoader;
    } else {
        newClassLoader = jarsClassLoader = getJarsClassLoader(origClassLoader, jarFiles);
    }

    RepositoryContext repositoryContext = repositoryContextHandle == null ? null
            : repositoryContextHandle.getRepositoryContext();
    if (repositoryContext != null || RepositoryUtil.hasThreadRepositoryContext()) {
        //use the repository context for the keys? not required for now as for now the context is always freshly set on the thread.
        Map resourceBundleKeys = getResourceBundleKeys(context, unitResources);
        if (!resourceBundleKeys.isEmpty()) {
            newClassLoader = getResourcesClassLoader(jarsClassLoader, resourceBundleKeys, inMemoryUnit,
                    repositoryContext);
        }
    }

    OrigContextClassLoader origContext;
    if (newClassLoader == null) {
        origContext = OrigContextClassLoader.NOT_SET;
    } else {
        origContext = new OrigContextClassLoader(origClassLoader, jarFiles);
        thread.setContextClassLoader(newClassLoader);
    }

    return origContext;
}