Example usage for java.util.concurrent.locks ReentrantLock ReentrantLock

List of usage examples for java.util.concurrent.locks ReentrantLock ReentrantLock

Introduction

In this page you can find the example usage for java.util.concurrent.locks ReentrantLock ReentrantLock.

Prototype

public ReentrantLock() 

Source Link

Document

Creates an instance of ReentrantLock .

Usage

From source file:org.j2free.cache.impl.memory.MemoryFragment.java

/**
 * @param content An start value for the content of this MemoryFragment
 * @param condition An optional condition upon creation of the MemoryFragment;
 *        if the condition supplied to tryAcquireLock does not match this
 *        condition, then the cache considers itself in need of update.
 *
 * @param timeout The timeout for this cached MemoryFragment
 *///  ww w .  j  av a 2 s .  c om
public MemoryFragment(String content, String condition, long timeout) {

    this.content = content;
    this.timeout = timeout;

    // Use null instead of blank string for condition
    this.condition = StringUtils.isEmpty(condition) ? null : condition;

    this.updateLock = new ReentrantLock();
    this.initialized = new CountDownLatch(1);

    this.updateTime = System.currentTimeMillis();
    this.lockedTime = -1;
}

From source file:com.streamsets.datacollector.io.DataStore.java

@VisibleForTesting
void acquireLock() {
    LOG.trace("Acquiring lock for '{}'", file);
    ReentrantLock lock = null;/*from ww  w .ja  v a 2  s. c  om*/
    synchronized (DataStore.class) {
        lock = FILE_LOCKS.get(file);
        if (lock == null) {
            lock = new ReentrantLock();
            FILE_LOCKS.put(file, lock);
        } else {
            Utils.checkState(!lock.isHeldByCurrentThread(),
                    Utils.format("The current thread already has a lock on '{}'", file));
        }
    }
    lock.lock();
    LOG.trace("Acquired lock for '{}'", file);
}

From source file:com.facebook.infrastructure.net.TcpConnection.java

TcpConnection(EndPoint from, EndPoint to) throws IOException {

    localEp_ = from;/* w  w  w  .  ja va2s .  c  o m*/
    remoteEp_ = to;

    setupChannel();
    bStream_ = true;
    lock_ = new ReentrantLock();
    condition_ = lock_.newCondition();
}

From source file:at.ac.univie.isc.asio.platform.FileSystemConfigStore.java

/**
 * @param root base working directory//from  w w w . j a  v a  2 s.c o  m
 * @param timeout maximum time allowed to acquire internal lock
 */
public FileSystemConfigStore(final Path root, final Timeout timeout) {
    this.timeout = timeout;
    log.info(Scope.SYSTEM.marker(), "initializing in <{}>", root);
    lock = new ReentrantLock();
    try {
        this.root = Files.createDirectories(root.resolve(STORE_FOLDER)).toAbsolutePath();
        touch();
    } catch (IOException cause) {
        throw new FileSystemAccessFailure("cannot create configuration store folder", cause);
    }
}

From source file:org.wso2.carbon.apimgt.gateway.internal.TenantServiceCreator.java

public void createdConfigurationContext(ConfigurationContext configurationContext) {
    String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain();
    log.info("Initializing APIM TenantServiceCreator for the tenant domain : " + tenantDomain);
    try {/*from   w  w w  . ja  v  a2 s.  c  o m*/

        // first check which configuration should be active
        org.wso2.carbon.registry.core.Registry registry = (org.wso2.carbon.registry.core.Registry) PrivilegedCarbonContext
                .getThreadLocalCarbonContext().getRegistry(RegistryType.SYSTEM_CONFIGURATION);

        AxisConfiguration axisConfig = configurationContext.getAxisConfiguration();

        // initialize the lock
        Lock lock = new ReentrantLock();
        axisConfig.addParameter("synapse.config.lock", lock);

        // creates the synapse configuration directory hierarchy if not exists
        // useful at the initial tenant creation
        File tenantAxis2Repo = new File(configurationContext.getAxisConfiguration().getRepository().getFile());
        File synapseConfigsDir = new File(tenantAxis2Repo, "synapse-configs");
        if (!synapseConfigsDir.exists()) {
            if (!synapseConfigsDir.mkdir()) {
                log.fatal("Couldn't create the synapse-config root on the file system "
                        + "for the tenant domain : " + tenantDomain);
                return;
            }
        }

        String synapseConfigsDirLocation = synapseConfigsDir.getAbsolutePath();
        // set the required configuration parameters to initialize the ESB
        axisConfig.addParameter(SynapseConstants.Axis2Param.SYNAPSE_CONFIG_LOCATION, synapseConfigsDirLocation);

        // init the multiple configuration tracker
        ConfigurationManager manger = new ConfigurationManager((UserRegistry) registry, configurationContext);
        manger.init();

        File synapseConfigDir = new File(synapseConfigsDir, manger.getTracker().getCurrentConfigurationName());
        StringBuilder filepath = new StringBuilder();
        filepath.append(synapseConfigsDir).append('/').append(manger.getTracker().getCurrentConfigurationName())
                .append('/').append(MultiXMLConfigurationBuilder.SEQUENCES_DIR).append('/')
                .append(authFailureHandlerSequenceName).append(".xml");
        File authFailureHandlerSequenceNameFile = new File(filepath.toString());
        //Here we will check authfailurehandler sequence exist in synapse artifact. If it is not available we will create
        //sequence synapse configurations by using resource artifacts
        if (!authFailureHandlerSequenceNameFile.exists()) {
            createTenantSynapseConfigHierarchy(synapseConfigDir, tenantDomain);
        }
    } catch (RemoteException e) {
        log.error("Failed to create Tenant's synapse sequences.", e);
    } catch (Exception e) {
        log.error("Failed to create Tenant's synapse sequences.", e);
    }
}

From source file:org.rifidi.edge.core.sensors.management.impl.SensorManagementServiceImpl.java

/**
 * Constructor./* w w w .j a v a 2  s .co  m*/
 */
public SensorManagementServiceImpl() {
    this.sensorLock = new ReentrantLock();
    this.sensors = new ConcurrentHashMap<String, SensorUpdate>();
    this.physicalSensors = new ConcurrentHashMap<String, AbstractSensor<?>>();
}

From source file:org.nema.medical.mint.server.processor.StudyUpdateProcessor.java

@Override
public void run() {
    LOG.debug("Execution started.");

    String jobID = jobFolder.getName();
    String studyUUID = studyFolder.getName();

    JobInfo jobInfo = new JobInfo();
    jobInfo.setId(jobID);//from w  w w  . j  ava2 s  .com
    jobInfo.setStudyID(studyUUID);

    Lock lock = new ReentrantLock(), oldLock;

    oldLock = studyIdLocks.putIfAbsent(studyUUID, lock);
    if (oldLock != null) {
        LOG.debug("Lock was an existing lock.");
        lock = oldLock;
    }

    if (lock.tryLock()) {
        try {
            LOG.debug("Got lock, and starting process");

            //Not calling mkdir on this because they better already exist
            File changelogRoot = new File(studyFolder, "changelog");

            if (!changelogRoot.exists()) {
                throw new FileNotFoundException("The changelog for study uuid " + studyUUID
                        + " does not exist, may need to do a create first.");
            }

            /*
             * Need to load new study information
             */
            final StudyMetadata newStudy = StudyIO.loadStudy(jobFolder);
            final String typeName = newStudy.getType();
            final MetadataType dataDictionary = availableTypes.get(typeName);
            if (dataDictionary == null) {
                throw new RuntimeException("Invalid study type " + typeName);
            }

            if (newStudy.getVersion() >= 0) {
                throw new RuntimeException("Study update data specifies a version [" + newStudy.getVersion()
                        + "]; versions are controlled by server, not client");
            }

            try {
                StorageUtil.validateStudy(newStudy, dataDictionary, jobFolder);
            } catch (final StudyTraversals.TraversalException e) {
                throw new RuntimeException("Validation of the jobs study failed", e);
            }

            final File typeFolder = new File(studyFolder, typeName);
            final File existingBinaryFolder = new File(typeFolder, "binaryitems");
            existingBinaryFolder.mkdirs();

            StudyMetadata existingStudy;
            try {
                /*
                 * Need to load current study information
                 */
                existingStudy = StudyIO.loadStudy(typeFolder);
            } catch (final RuntimeException e) {
                /*
                 * Do nothing, just means there is no existing study
                 * which is fine.
                 */
                existingStudy = null;
            }

            /*
             * If the study versions are not the same, then this
             * update is for a version that is not the most recent and
             * should not be applied.
             */
            if (existingStudy != null
                    && (existingStudy.getVersion() < 0 || existingStudy.getVersion() != oldVersion)) {
                throw new RuntimeException(
                        "Study update data is of a different version than the current study, "
                                + "cannot update if versions do not match. (" + existingStudy.getVersion()
                                + " : " + oldVersion + ")");
            }

            /*
             * Need to rename the new binary files so there are no collisions
             * with existing data files when merging. This also means updating
             * the new study document.
             */
            final int maxExistingItemNumber = StorageUtil.getHighestNumberedBinaryItem(existingBinaryFolder);
            StorageUtil.shiftItemIds(newStudy, jobFolder, maxExistingItemNumber + 1);

            /*
             * Write metadata update message to change log folder.
             */
            File changelogFolder = StorageUtil.getNextChangelogDir(changelogRoot);

            StudyUtils.writeStudy(newStudy, changelogFolder);

            Collection<Integer> excludedBids = new HashSet<Integer>();
            if (existingStudy != null) {
                /*
                 * Need to move through the new study and look for things to exclude
                 * and exclude them from the existing study.
                 */
                StudyUtils.applyExcludes(existingStudy, newStudy, excludedBids);
            }

            /*
             * Clean out excludes because excludes should not be left in
             * the newStudy.
             */
            StudyUtils.removeStudyExcludes(newStudy);

            /*
             * Need to merge the study documents and renormalize the result.
             * This means first denormalize, then merge, then normalize the
             * result
             */
            StudyUtils.denormalizeStudy(newStudy);

            if (existingStudy != null) {
                StudyUtils.denormalizeStudy(existingStudy);
                StudyUtils.mergeStudy(existingStudy, newStudy, excludedBids);

                // Get next version number
                existingStudy.setVersion(existingStudy.getVersion() + 1);
            } else {
                /*
                 * If no existing study, new study becomes the existing
                 * study. This happens when an update is done on a type that
                 * has no data yet.
                 */
                existingStudy = newStudy;

                // Set to base level version
                existingStudy.setVersion(StudyUtils.getBaseVersion());
                existingStudy.setType(typeName);
            }

            //Rename all excluded binary files to have .exclude
            StorageUtil.renameExcludedFiles(existingBinaryFolder, excludedBids);

            StudyUtils.normalizeStudy(existingStudy);

            /*
             * Need to copy into the Study folder the new study document and
             * binary data files.
             */
            StudyUtils.writeStudy(existingStudy, typeFolder);

            StorageUtil.moveBinaryItems(jobFolder, existingBinaryFolder);

            FileUtils.deleteDirectory(jobFolder);

            //Update study DAO only if this is DICOM data; don't update study DAO for other types (DICOM is primary)
            if (typeName.equals("DICOM")) {
                MINTStudy studyData = new MINTStudy();
                studyData.setID(studyUUID);
                studyData.setStudyInstanceUID(existingStudy.getStudyInstanceUID());
                studyData.setPatientID(existingStudy.getValueForAttribute(0x00100020));
                studyData.setAccessionNumber(existingStudy.getValueForAttribute(0x00080050));
                // studyData.setDateTime(study.getValueForAttribute(0x00080020));
                studyData.setDateTime(MINTStudy.now());
                studyData.setStudyVersion(existingStudy.getVersion());
                studyDAO.updateStudy(studyData);
            }

            //Update change DAO for any type
            Change updateInfo = new Change();
            updateInfo.setId(UUID.randomUUID().toString());
            updateInfo.setStudyID(studyUUID);
            updateInfo.setType(typeName);
            updateInfo.setRemoteUser(remoteUser);
            updateInfo.setRemoteHost(remoteHost);
            updateInfo.setIndex(Integer.parseInt(changelogFolder.getName()));
            updateInfo.setOperation(ChangeOperation.UPDATE);
            updateDAO.saveChange(updateInfo);

            jobInfo.setStatus(JobStatus.SUCCESS);
            jobInfo.setStatusDescription("complete");
        } catch (Exception e) {
            jobInfo.setStatus(JobStatus.FAILED);
            jobInfo.setStatusDescription(e.getMessage());
            LOG.error("unable to process job " + jobID, e);
        } finally {
            lock.unlock();
            LOG.debug("Released lock and stopping.");
        }
    } else {
        jobInfo.setStatus(JobStatus.FAILED);
        jobInfo.setStatusDescription("unable to process job " + jobID
                + ", another update is current being processed on the same study.");
    }

    jobInfoDAO.saveOrUpdateJobInfo(jobInfo);
}

From source file:org.apache.synapse.transport.nhttp.NhttpSharedOutputBuffer.java

public NhttpSharedOutputBuffer(final int buffersize, final IOControl ioctrl,
        final ByteBufferAllocator allocator, int timeout) {
    super(buffersize, allocator);
    Args.notNull(ioctrl, "I/O content control");
    this.ioctrl = ioctrl;
    this.lock = new ReentrantLock();
    this.condition = this.lock.newCondition();
    this.timeout = timeout;
}

From source file:org.openbaton.drivers.openstack4j.OpenStack4JDriver.java

public void init() {
    String sslChecksDisabled = properties.getProperty("disable-ssl-certificate-checks", "false");
    log.debug("Disable SSL certificate checks: {}", sslChecksDisabled);
    OpenStack4JDriver.lock = new ReentrantLock();
}

From source file:org.ops4j.pax.web.service.spi.model.ServerModel.java

/**
 * Constructor./* w ww  .j a v  a  2 s.c  o m*/
 */
public ServerModel() {
    m_aliasMapping = new HashMap<String, ServletModel>();
    m_servlets = new HashSet<Servlet>();
    m_servletUrlPatterns = new HashMap<String, UrlPattern>();
    m_filterUrlPatterns = new ConcurrentHashMap<String, UrlPattern>();
    m_httpContexts = new ConcurrentHashMap<HttpContext, Bundle>();

    m_servletLock = new ReentrantLock();
}