Example usage for java.lang Thread setContextClassLoader

List of usage examples for java.lang Thread setContextClassLoader

Introduction

In this page you can find the example usage for java.lang Thread setContextClassLoader.

Prototype

public void setContextClassLoader(ClassLoader cl) 

Source Link

Document

Sets the context ClassLoader for this Thread.

Usage

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param delete The Delete object/*from   w  w  w. ja  v  a 2 s  . com*/
 * @param edit The WALEdit object.
 * @param durability The durability used
 * @return true if default processing should be bypassed
 * @exception IOException Exception
 */
public boolean preDelete(final Delete delete, final WALEdit edit, final Durability durability)
        throws IOException {
    boolean bypass = false;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                ((RegionObserver) env.getInstance()).preDelete(ctx, delete, edit, durability);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param row row to check//from   ww  w.  j  a  va  2  s  .c om
 * @param family column family
 * @param qualifier column qualifier
 * @param compareOp the comparison operation
 * @param comparator the comparator
 * @param put data to put if check succeeds
 * @throws IOException e
 */
public boolean postCheckAndPut(final byte[] row, final byte[] family, final byte[] qualifier,
        final CompareOp compareOp, final ByteArrayComparable comparator, final Put put, boolean result)
        throws IOException {
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                result = ((RegionObserver) env.getInstance()).postCheckAndPut(ctx, row, family, qualifier,
                        compareOp, comparator, put, result);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return result;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param row row to check/*from   w ww .ja  v a2s  . co m*/
 * @param family column family
 * @param qualifier column qualifier
 * @param compareOp the comparison operation
 * @param comparator the comparator
 * @param delete delete to commit if check succeeds
 * @throws IOException e
 */
public boolean postCheckAndDelete(final byte[] row, final byte[] family, final byte[] qualifier,
        final CompareOp compareOp, final ByteArrayComparable comparator, final Delete delete, boolean result)
        throws IOException {
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                result = ((RegionObserver) env.getInstance()).postCheckAndDelete(ctx, row, family, qualifier,
                        compareOp, comparator, delete, result);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return result;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param append append object//w w w. ja v a 2  s.c om
 * @return result to return to client if default operation should be
 * bypassed, null otherwise
 * @throws IOException if an error occurred on the coprocessor
 */
public Result preAppend(final Append append) throws IOException {
    boolean bypass = false;
    Result result = null;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                result = ((RegionObserver) env.getInstance()).preAppend(ctx, append);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? result : null;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param increment increment object//w w  w.  j av  a  2  s.  c om
 * @return result to return to client if default operation should be
 * bypassed, null otherwise
 * @throws IOException if an error occurred on the coprocessor
 */
public Result preIncrement(final Increment increment) throws IOException {
    boolean bypass = false;
    Result result = null;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                result = ((RegionObserver) env.getInstance()).preIncrement(ctx, increment);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? result : null;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param scan the Scan specification/*from   w ww. j a v  a 2s  . c om*/
 * @return scanner id to return to client if default operation should be
 * bypassed, false otherwise
 * @exception IOException Exception
 */
public RegionScanner preScannerOpen(final Scan scan) throws IOException {
    boolean bypass = false;
    RegionScanner s = null;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                s = ((RegionObserver) env.getInstance()).preScannerOpen(ctx, scan, s);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? s : null;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * This will be called by the scan flow when the current scanned row is being filtered out by the
 * filter.//  w  ww . ja  v a2s . c o  m
 * @param s the scanner
 * @param currentRow The current rowkey which got filtered out
 * @param offset offset to rowkey
 * @param length length of rowkey
 * @return whether more rows are available for the scanner or not
 * @throws IOException
 */
public boolean postScannerFilterRow(final InternalScanner s, final byte[] currentRow, final int offset,
        final short length) throws IOException {
    boolean hasMore = true; // By default assume more rows there.
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                hasMore = ((RegionObserver) env.getInstance()).postScannerFilterRow(ctx, s, currentRow, offset,
                        length, hasMore);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return hasMore;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param info/*from  w w  w  .ja va 2 s.co  m*/
 * @param logKey
 * @param logEdit
 * @return true if default behavior should be bypassed, false otherwise
 * @throws IOException
 */
public boolean preWALRestore(final HRegionInfo info, final HLogKey logKey, final WALEdit logEdit)
        throws IOException {
    boolean bypass = false;
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                ((RegionObserver) env.getInstance()).preWALRestore(ctx, info, logKey, logEdit);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * @param fs fileystem to read from/*from   ww  w.ja  v  a 2 s.co  m*/
 * @param p path to the file
 * @param in {@link FSDataInputStreamWrapper}
 * @param size Full size of the file
 * @param cacheConf
 * @param r original reference file. This will be not null only when reading a split file.
 * @param reader the base reader instance
 * @return The reader to use
 * @throws IOException
 */
public StoreFile.Reader postStoreFileReaderOpen(final FileSystem fs, final Path p,
        final FSDataInputStreamWrapper in, final long size, final CacheConfig cacheConf, final Reference r,
        StoreFile.Reader reader) throws IOException {
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                reader = ((RegionObserver) env.getInstance()).postStoreFileReaderOpen(ctx, fs, p, in, size,
                        cacheConf, r, reader);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return reader;
}

From source file:org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java

/**
 * Called prior to rewriting the store files selected for compaction
 * @param store the store being compacted
 * @param scanner the scanner used to read store data during compaction
 * @param scanType type of Scan//  w  w  w .java2 s  .  com
 * @param request the compaction that will be executed
 * @throws IOException
 */
public InternalScanner preCompact(final Store store, final InternalScanner scanner, final ScanType scanType,
        final CompactionRequest request) throws IOException {
    ObserverContext<RegionCoprocessorEnvironment> ctx = null;
    boolean bypass = false;
    InternalScanner s = scanner;
    for (RegionEnvironment env : coprocessors) {
        if (env.getInstance() instanceof RegionObserver) {
            ctx = ObserverContext.createAndPrepare(env, ctx);
            Thread currentThread = Thread.currentThread();
            ClassLoader cl = currentThread.getContextClassLoader();
            try {
                currentThread.setContextClassLoader(env.getClassLoader());
                s = ((RegionObserver) env.getInstance()).preCompact(ctx, store, s, scanType, request);
            } catch (Throwable e) {
                handleCoprocessorThrowable(env, e);
            } finally {
                currentThread.setContextClassLoader(cl);
            }
            bypass |= ctx.shouldBypass();
            if (ctx.shouldComplete()) {
                break;
            }
        }
    }
    return bypass ? null : s;
}