Example usage for com.google.common.collect ForwardingBlockingDeque ForwardingBlockingDeque

List of usage examples for com.google.common.collect ForwardingBlockingDeque ForwardingBlockingDeque

Introduction

In this page you can find the example usage for com.google.common.collect ForwardingBlockingDeque ForwardingBlockingDeque.

Prototype

protected ForwardingBlockingDeque() 

Source Link

Document

Constructor for use by subclasses.

Usage

From source file:eu.itesla_project.offline.OfflineWorkflowImpl.java

@Override
protected void startImpl(final WorkflowStartContext startContext) throws Exception {
    Network network = loadAndMergeNetwork(creationParameters.getBaseCaseDate(), LOAD_FLOW_PRIORITY);

    // We want to work on multiple samples at the same time, so we are going
    // to use the multi-states feature of IIDM network model. Each of the
    // sample is mapped to a state created by cloning the initial state of
    // the network
    network.getStateManager().allowStateMultiThreadAccess(true);
    network.getStateManager().setWorkingState(StateManager.INITIAL_STATE_ID);

    Networks.printBalanceSummary("snapshot", network, LOGGER);

    try (HistoDbClient histoDbClient = histoDbClientFactory.create();
            TopologyMiner topologyMiner = topologyMinerFactory.create()) {

        ContingenciesAndActionsDatabaseClient cadbClient = cadbClientFactory.create();

        // prepare base case
        TopologyContext topologyContext = prepareBaseCase(network, creationParameters, histoDbClient,
                topologyMiner, computationManager);

        Networks.printBalanceSummary("base case", network, LOGGER);

        LOGGER.info("{} contingencies", cadbClient.getContingencies(network).size());

        //note: if ~/sampler2wp41.properties file does not exist, uses a mocksampler; otherwise uses a sampler provided by matlab-integration module
        //      samplers require MATLAB Compiler Runtime (ref readme.txt in matlab-integration)
        Sampler sampler = samplerFactory.create(network, computationManager, SAMPLING_PRIORITY, histoDbClient);

        LOGGER.info("Sampling module: {} {}", sampler.getName(), Objects.toString(sampler.getVersion(), ""));

        Stabilization stabilization = simulatorFactory.createStabilization(network, computationManager,
                STABILIZATION_PRIORITY);
        ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(network, computationManager,
                IMPACT_ANALYSIS_PRIORITY, cadbClient);
        Optimizer optimizer = optimizerFactory.create(network, computationManager, STARTING_POINT_INIT_PRIORITY,
                histoDbClient, topologyMiner);
        LoadFlow loadFlow = loadFlowFactory.create(network, computationManager, LOAD_FLOW_PRIORITY);

        LOGGER.info("Starting point init module: {} {}", optimizer.getName(),
                Objects.toString(optimizer.getVersion(), ""));
        LOGGER.info("Load flow module: {} {}", loadFlow.getName(), Objects.toString(loadFlow.getVersion(), ""));
        LOGGER.info("Stabilization module: {} {}", stabilization.getName(),
                Objects.toString(stabilization.getVersion(), ""));
        LOGGER.info("Impact analysis module: {} {}", impactAnalysis.getName(),
                Objects.toString(impactAnalysis.getVersion(), ""));

        int cores = computationManager.getResourcesStatus().getAvailableCores();
        final int stateQueueSize = startContext.getStartParameters().getStateQueueSize() != -1
                ? startContext.getStartParameters().getStateQueueSize()
                : 2;// w  ww. j av  a 2s. com

        LOGGER.trace("State queue initial size: {}", stateQueueSize);

        // module initializations
        sampler.init(new SamplerParameters(creationParameters.getHistoInterval(),
                creationParameters.isGenerationSampled(), creationParameters.isBoundariesSampled()));
        optimizer.init(new OptimizerParameters(creationParameters.getHistoInterval()), topologyContext);
        Map<String, Object> simulationInitContext = new HashMap<>();
        SimulationParameters simulationParameters = SimulationParameters.load();
        LOGGER.info(simulationParameters.toString());
        stabilization.init(simulationParameters, simulationInitContext);
        impactAnalysis.init(simulationParameters, simulationInitContext);

        changeWorkflowStatus(new OfflineWorkflowStatus(id, OfflineWorkflowStep.SAMPLING, creationParameters,
                startContext.getStartParameters()));

        LoadFlowParameters loadFlowParameters = createLoadFlowParameters();
        final WorkflowContext context = new WorkflowContext(network, sampler, optimizer, loadFlow,
                stabilization, impactAnalysis, loadFlowParameters);

        final BlockingDeque<Sample> samples = new ForwardingBlockingDeque<Sample>() {

            private final BlockingDeque<Sample> delegate = new LinkedBlockingDeque<>(
                    startContext.getStartParameters().getSampleQueueSize());

            @Override
            protected BlockingDeque<Sample> delegate() {
                return delegate;
            }

            @Override
            public boolean offer(Sample o) {
                boolean inserted = super.offer(o);
                LOGGER.trace("Sample queue size ++: {}", delegate.size());
                return inserted;
            }

            @Override
            public Sample poll(long timeout, TimeUnit unit) throws InterruptedException {
                Sample sample = super.poll(timeout, unit);
                if (sample != null) {
                    int size = delegate.size();
                    LOGGER.trace("Sample queue size --: {}", size);
                    if (size == 0) {
                        LOGGER.warn("Sample queue is empty");
                    }
                }
                return sample;
            }

        };

        List<Future<?>> sampleFutures = new ArrayList<>();
        final Semaphore samplePermits = new Semaphore(startContext.getStartParameters().getSampleQueueSize(),
                true);

        for (int i = 0; i < startContext.getStartParameters().getSamplingThreads(); i++) {
            sampleFutures.add(executorService.submit(() -> {
                try {
                    // and then we continousloy re-sample
                    while (!isStopRequested(startContext)) {
                        // wait for a sample permit
                        while (!isStopRequested(startContext) && !samplePermits.tryAcquire(
                                startContext.getStartParameters().getSamplesPerThread(), TIMEOUT,
                                TimeUnit.MILLISECONDS)) {
                        }

                        if (!isStopRequested(startContext)) {
                            SamplerResult result = runSampling(
                                    startContext.getStartParameters().getSamplesPerThread(), context,
                                    startContext);
                            if (result != null && result.isOk()) {
                                for (Sample sample : result.getSamples()) {
                                    samples.offer(sample);
                                }
                            }
                        }
                    }
                } catch (Throwable e) {
                    LOGGER.error(e.toString(), e);
                }
            }));
        }

        List<Future<?>> stateFutures = new ArrayList<>();
        final Semaphore statePermits = new Semaphore(stateQueueSize, true);
        final Queue<SimulationState> states = Queues
                .synchronizedQueue(new ArrayDeque<SimulationState>(stateQueueSize));
        final Map<SimulationState, Integer> sampleIds = Collections.synchronizedMap(new HashMap<>());

        for (int i = 0; i < stateQueueSize; i++) {
            stateFutures.add(executorService.submit(() -> {
                try {
                    while (!isStopRequested(startContext)) {
                        // wait for a state permit
                        while (!isStopRequested(startContext)
                                && !statePermits.tryAcquire(TIMEOUT, TimeUnit.MILLISECONDS)) {
                        }

                        // wait for a sample to be available
                        Sample sample = null;
                        while (!isStopRequested(startContext)
                                && (sample = samples.poll(TIMEOUT, TimeUnit.MILLISECONDS)) == null) {
                        }

                        if (sample != null) {
                            samplePermits.release();

                            String stateId = "Sample-" + sample.getId();

                            // create a new network state
                            context.getNetwork().getStateManager().cloneState(StateManager.INITIAL_STATE_ID,
                                    stateId);
                            try {
                                // set current thread working state
                                context.getNetwork().getStateManager().setWorkingState(stateId);

                                // apply the sample to the network
                                sample.apply(context.getNetwork());

                                SimulationState state = null;
                                if (!isStopRequested(startContext)
                                        && runStartingPointInit(context, startContext, sample.getId())
                                        && !isStopRequested(startContext)
                                        && runLoadFlow(context, startContext, sample)
                                        && !isStopRequested(startContext) && (state = runStabilization(context,
                                                startContext, sample.getId())) != null) {
                                    sampleIds.put(state, sample.getId());
                                    states.add(state);
                                    LOGGER.trace("State queue size ++: {}", states.size());
                                } else {
                                    statePermits.release();
                                }

                                // in any case store the sample in the and simulation db and validation
                                // db for later deep analysis
                                storeState(sample.getId(), context.getNetwork());

                                startContext.incrementProcessedSamples();

                                try {
                                    validationDb.save(context.getNetwork(),
                                            OfflineWorkflow.getValidationDir(id), "sample-" + sample.getId());
                                } catch (Exception e) {
                                    LOGGER.error(e.toString(), e);
                                }
                            } finally {
                                context.getNetwork().getStateManager().removeState(stateId);
                            }
                        }
                    }
                } catch (Throwable e) {
                    LOGGER.error(e.toString(), e);
                }
            }));
        }

        ImpactAnalysisController controller = new ImpactAnalysisController() {

            @Override
            public boolean isStopRequested() {
                return OfflineWorkflowImpl.this.isStopRequested(startContext);
            }

            @Override
            public SimulationState nextState() {
                SimulationState state = states.poll();
                if (state != null) {
                    statePermits.release();
                    int size = states.size();
                    LOGGER.trace("State queue size --: {}", size);
                    if (size == 0) {
                        LOGGER.warn("State queue is empty");
                    }
                }
                return state;
            }

        };

        ImpactAnalysisResultCallback callback = new ImpactAnalysisResultCallback() {

            @Override
            public void onStart(SimulationState state) {
                int sampleId = sampleIds.get(state);
                LOGGER.debug("Workflow {}, sample {}: impact analysis started", id, sampleId);
            }

            @Override
            public void onResult(SimulationState state, ImpactAnalysisResult result) {
                try {
                    int sampleId = sampleIds.remove(state);
                    storeImpactAnalysisResults(context, startContext, sampleId, result);
                } catch (Exception e) {
                    LOGGER.error(e.toString(), e);
                }
            }
        };

        try {
            runImpactAnalysis(context, cores, controller, callback, cadbClient);
        } catch (Exception e) {
            LOGGER.error(e.toString(), e);
            stopRequested.set(true);
        }

        for (Future<?> f : stateFutures) {
            f.get();
        }

        samplePermits.release(samples.size());

        for (Future<?> f : sampleFutures) {
            f.get();
        }

        // some offline db implementation may require to explicitly flush data on disk
        offlineDb.flush(id);

        // clean samples synthesis
        sampleSynthesisLock.lock();
        try {
            samplesSynthesis.clear();
        } finally {
            sampleSynthesisLock.unlock();
        }
        notifySampleSynthesisChange();
    }
}