Example usage for org.apache.commons.beanutils ConvertUtils convert

List of usage examples for org.apache.commons.beanutils ConvertUtils convert

Introduction

In this page you can find the example usage for org.apache.commons.beanutils ConvertUtils convert.

Prototype

public static Object convert(String values[], Class clazz) 

Source Link

Document

Convert an array of specified values to an array of objects of the specified class (if possible).

For more details see ConvertUtilsBean.

Usage

From source file:uk.ac.diamond.scisoft.ncd.reduction.LazyInvariant.java

public Dataset execute(int dim, Dataset data, SliceSettings sliceData, ILock lock) throws HDF5Exception {

    Invariant inv = new Invariant();

    int[] dataShape = Arrays.copyOf(data.getShape(), data.getRank() - dim);
    data = flattenGridData(data, dim);/*from  w w w. java 2  s .  co m*/
    Dataset errors = data.getErrorBuffer();

    Object[] myobj = inv.process(data.getBuffer(), errors.getBuffer(), data.getShape());
    float[] mydata = (float[]) myobj[0];
    double[] myerrors = (double[]) myobj[1];

    Dataset myres = new FloatDataset(mydata, dataShape);
    myres.setErrorBuffer(new DoubleDataset(myerrors, dataShape));

    try {
        lock.acquire();

        long[] frames = sliceData.getFrames();
        long[] start_pos = (long[]) ConvertUtils.convert(sliceData.getStart(), long[].class);
        int sliceDim = sliceData.getSliceDim();
        int sliceSize = sliceData.getSliceSize();

        long[] start = Arrays.copyOf(start_pos, frames.length);

        long[] block = Arrays.copyOf(frames, frames.length);
        Arrays.fill(block, 0, sliceData.getSliceDim(), 1);
        block[sliceDim] = Math.min(frames[sliceDim] - start_pos[sliceDim], sliceSize);

        long[] count = new long[frames.length];
        Arrays.fill(count, 1);

        int filespace_id = H5.H5Dget_space(inv_data_id);
        int type_id = H5.H5Dget_type(inv_data_id);
        int memspace_id = H5.H5Screate_simple(block.length, block, null);
        H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, block, count, block);
        H5.H5Dwrite(inv_data_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT, mydata);

        filespace_id = H5.H5Dget_space(inv_errors_id);
        type_id = H5.H5Dget_type(inv_errors_id);
        memspace_id = H5.H5Screate_simple(block.length, block, null);
        H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, block, count, block);
        H5.H5Dwrite(inv_errors_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT,
                myres.getError().getBuffer());
    } finally {
        lock.release();
    }

    return myres;
}

From source file:uk.ac.diamond.scisoft.ncd.reduction.LazyNcdProcessing.java

/**
  * //from www  .  j  a v  a2s  .co m
  * @param detectorName - name of detector e.g. Pilatus2M
  * @param dimension      - dimension of detector
  * @param filename - file path to io file (processing done in this folder) 
  * @param monitor
  * @throws HDF5Exception 
  */
 public void configure(String detectorName, int dimension, String filename, final IProgressMonitor monitor)
         throws HDF5Exception {
     String[] tmpName = FilenameUtils.getName(filename).split("_");
     monitorFile = tmpName[1];
     detector = detectorName;

     int fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
     H5.H5Pset_fclose_degree(fapl, HDF5Constants.H5F_CLOSE_WEAK);
     nxsfile_handle = H5.H5Fopen(filename, HDF5Constants.H5F_ACC_RDWR, fapl);
     H5.H5Pclose(fapl);
     entry_group_id = H5.H5Gopen(nxsfile_handle, "entry1", HDF5Constants.H5P_DEFAULT);

     fapl = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
     H5.H5Pset_fclose_degree(fapl, HDF5Constants.H5F_CLOSE_WEAK);
     // Need to use read-only file handle to safely access
     // input data linked into the result file
     inputfile_handle = H5.H5Fopen(filename, HDF5Constants.H5F_ACC_RDONLY, fapl);
     H5.H5Pclose(fapl);
     detector_group_id = H5.H5Gopen(inputfile_handle, "entry1/" + detector, HDF5Constants.H5P_DEFAULT);
     input_data_id = H5.H5Dopen(detector_group_id, "data", HDF5Constants.H5P_DEFAULT);
     boolean exists = H5.H5Lexists(detector_group_id, "errors", HDF5Constants.H5P_DEFAULT);
     if (exists) {
         input_errors_id = H5.H5Dopen(detector_group_id, "errors", HDF5Constants.H5P_DEFAULT);
     }

     input_ids = new DataSliceIdentifiers();
     input_ids.setIDs(detector_group_id, input_data_id);
     input_errors_ids = new DataSliceIdentifiers();
     input_errors_ids.setIDs(detector_group_id, input_errors_id);

     dim = dimension;
     rank = H5.H5Sget_simple_extent_ndims(input_ids.dataspace_id);
     frames = new long[rank];
     H5.H5Sget_simple_extent_dims(input_ids.dataspace_id, frames, null);
     frames_int = (int[]) ConvertUtils.convert(frames, int[].class);

     processing_group_id = NcdNexusUtils.makegroup(entry_group_id, detector + "_processing", Nexus.INST);
     result_group_id = NcdNexusUtils.makegroup(entry_group_id, detector + "_result", Nexus.DATA);

     if (firstFrame != null || lastFrame != null) {
         frameSelection = StringUtils.leftPad("", rank - dim - 1, ";");
         if (firstFrame != null) {
             frameSelection += Integer.toString(firstFrame);
         }
         frameSelection += "-";
         if (lastFrame != null) {
             frameSelection += Integer.toString(lastFrame);
         }
         frameSelection += ";";
     }

     if (frameSelection != null) {
         int sel_group_id = NcdNexusUtils.makegroup(processing_group_id, LazySelection.name, Nexus.DETECT);

         monitor.beginTask(monitorFile + " : Slicing Input Data", IProgressMonitor.UNKNOWN);
         LazySelection selection = new LazySelection(frames_int);
         selection.setFormat(frameSelection);
         selection.setMonitor(monitor);
         DataSliceIdentifiers[] obj_ids = selection.execute(dim, input_ids, input_errors_ids, sel_group_id);

         if (monitor.isCanceled()) {
             return;
         }

         monitor.done();

         input_ids = obj_ids[0];
         input_errors_ids = obj_ids[1];
         H5.H5Sget_simple_extent_dims(input_ids.dataspace_id, frames, null);
         frames_int = (int[]) ConvertUtils.convert(frames, int[].class);
     }

     lazyDetectorResponse = new LazyDetectorResponse(drFile, detector);
     if (flags.isEnableDetectorResponse()) {
         lazyDetectorResponse.setDrFile(drFile);
         lazyDetectorResponse.configure(dimension, frames, entry_group_id, processing_group_id);
     }

     lazySectorIntegration = new LazySectorIntegration();
     if (flags.isEnableSector() && dim == 2) {
         intSector.setAverageArea(false);
         lazySectorIntegration.setIntSector(intSector);
         if (enableMask) {
             lazySectorIntegration.setMask(mask);
         }
         qaxis = calculateQaxisDataset(detector, dim, secFrames, frames);
         if (qaxis != null) {
             lazySectorIntegration.setQaxis(qaxis, qaxisUnit);
             lazySectorIntegration.setCalibrationData(slope, intercept);
             lazySectorIntegration.setCameraLength(cameraLength);
             lazySectorIntegration.setEnergy(energy);
         }
         lazySectorIntegration.setCalculateRadial(flags.isEnableRadial());
         lazySectorIntegration.setCalculateAzimuthal(flags.isEnableAzimuthal());
         lazySectorIntegration.setFast(flags.isEnableFastintegration());
         lazySectorIntegration.configure(dimension, frames, processing_group_id);

         secRank = rank - dim + 1;
         secFrames = lazySectorIntegration.secFrames;
         dimension = 1;
     }

     lazyNormalisation = new LazyNormalisation();
     if (flags.isEnableNormalisation()) {
         lazyNormalisation.setCalibration(calibration);
         lazyNormalisation.setAbsScaling(absScaling);
         lazyNormalisation.setNormChannel(normChannel);
         lazyNormalisation.configure(dimension, flags.isEnableSector() ? secFrames : frames, entry_group_id,
                 processing_group_id);
     }

     lazyBackgroundSubtraction = new LazyBackgroundSubtraction();
     if (flags.isEnableBackground()) {
         if (qaxis != null) {
             lazyBackgroundSubtraction.setQaxis(qaxis, qaxisUnit);
         }
         lazyBackgroundSubtraction.setBgFile(bgFile);
         lazyBackgroundSubtraction.setBgDetector(bgDetector);
         lazyBackgroundSubtraction.setBgScale(bgScaling);
         lazyBackgroundSubtraction.configure(dimension, flags.isEnableSector() ? secFrames : frames,
                 processing_group_id);

         lazyBackgroundSubtraction.preprocess(dimension, frames, frameBatch);
     }

     lazyInvariant = new LazyInvariant();
     if (flags.isEnableInvariant()) {
         lazyInvariant.configure(dimension, flags.isEnableSector() ? secFrames : frames, entry_group_id,
                 processing_group_id);
     }
 }

From source file:uk.ac.diamond.scisoft.ncd.reduction.LazyNcdProcessing.java

/**
  * //from w w  w .ja v a2s .  co  m
  * @param monitor
  * @throws HDF5Exception 
  */
 public void execute(final IProgressMonitor monitor) throws HDF5Exception {

     int sliceDim = 0;
     int sliceSize = (int) frames[0];

     // We will slice only 2D data. 1D data is loaded into memory completely
     if (dim == 2) {

         estimateFrameBatchSize(dim, frames);

         // Find dimension that needs to be sliced
         MultidimensionalCounter dimCounter = new MultidimensionalCounter(
                 Arrays.copyOfRange(frames_int, 0, rank - dim));
         if (dimCounter.getSize() > frameBatch) {
             int[] sliceIdx = dimCounter.getCounts(frameBatch);
             for (int i = 0; i < sliceIdx.length; i++) {
                 if (sliceIdx[i] != 0) {
                     sliceDim = i;
                     break;
                 }
             }
             sliceSize = sliceIdx[sliceDim];
         }
     }

     SliceSettings sliceParams = new SliceSettings(frames, sliceDim, sliceSize);

     int[] iter_array = Arrays.copyOfRange(frames_int, 0, sliceDim + 1);
     int[] start = new int[iter_array.length];
     int[] step = new int[iter_array.length];
     Arrays.fill(start, 0);
     Arrays.fill(step, 1);
     step[sliceDim] = sliceSize;
     SliceND slice = new SliceND(iter_array, null, iter_array, step);
     IndexIterator iter = new SliceIterator(iter_array, AbstractDataset.calcSize(iter_array), slice);

     if (flags.isEnableSector() && dim == 2) {
         ArrayList<Job> sectorJobList = new ArrayList<Job>();
         ArrayList<Job> runningJobList = new ArrayList<Job>();

         while (iter.hasNext()) {

             DataReductionJob sectorJob = new DataReductionJob("Sector Integration") {

                 @Override
                 protected IStatus run(IProgressMonitor jobmonitor) {
                     try {
                         Dataset data;
                         try {
                             lock.acquire();
                             data = NcdNexusUtils.sliceInputData(currentSliceParams, tmp_ids);
                             if (tmp_errors_ids != null) {
                                 if (tmp_errors_ids.dataset_id >= 0) {
                                     Dataset errors = NcdNexusUtils.sliceInputData(currentSliceParams,
                                             tmp_errors_ids);
                                     data.setError(errors);
                                 } else {
                                     tmp_errors_ids.setSlice(currentSliceParams);
                                 }
                             }
                         } catch (Exception e) {
                             throw e;
                         } finally {
                             lock.release();
                         }

                         if (flags.isEnableDetectorResponse()) {
                             jobmonitor.setTaskName(monitorFile + " : Correct for detector response");
                             data = lazyDetectorResponse.execute(dim, data, currentSliceParams, lock);
                         }

                         jobmonitor.setTaskName(monitorFile + " : Performing sector integration");

                         data = lazySectorIntegration.execute(dim, data, currentSliceParams, lock)[1];
                     } catch (Exception e) {
                         e.printStackTrace();
                         return Status.CANCEL_STATUS;
                     }

                     return Status.OK_STATUS;
                 }
             };

             sectorJob.tmp_ids = new DataSliceIdentifiers(input_ids);
             sectorJob.tmp_errors_ids = new DataSliceIdentifiers(input_errors_ids);
             sliceParams.setStart(iter.getPos());
             sectorJob.currentSliceParams = new SliceSettings(sliceParams);
             sectorJobList.add(sectorJob);

         }

         monitor.beginTask(monitorFile + " : Running Sector Integration Stage", sectorJobList.size());
         for (Job job : sectorJobList) {
             if (monitor.isCanceled()) {
                 sectorJobList.clear();
                 for (Job runningJob : runningJobList) {
                     runningJob.cancel();
                 }
                 break;
             }
             while (runningJobList.size() >= cores) {
                 try {
                     runningJobList.get(0).join();
                     runningJobList.remove(0);
                     monitor.worked(1);
                 } catch (InterruptedException e) {
                     e.printStackTrace();
                 }
             }
             job.schedule();
             runningJobList.add(job);
         }

         for (Job job : sectorJobList) {
             try {
                 job.join();
             } catch (InterruptedException e) {
                 e.printStackTrace();
             }
         }

         if (monitor.isCanceled()) {
             return;
         }

         monitor.done();

         dim = 1;
         rank = secRank;
         sliceDim = 0;
         sliceSize = (int) secFrames[0];

         frames = secFrames;
         frames_int = (int[]) ConvertUtils.convert(secFrames, int[].class);

         sliceParams = new SliceSettings(frames, sliceDim, sliceSize);
         IntegerDataset idx_dataset = new IntegerDataset(new int[] { sliceSize }, new int[] { 1 });
         iter = idx_dataset.getSliceIterator(new int[] { 0 }, new int[] { 1 }, new int[] { 1 });

         input_ids.setIDs(lazySectorIntegration.sec_group_id, lazySectorIntegration.sec_data_id);
         input_errors_ids.setIDs(lazySectorIntegration.sec_group_id, lazySectorIntegration.sec_errors_id);
     }

     ArrayList<DataReductionJob> processingJobList = new ArrayList<DataReductionJob>();
     ArrayList<DataReductionJob> runningJobList = new ArrayList<DataReductionJob>();

     while (iter.hasNext()) {

         DataReductionJob processingJob = new DataReductionJob("Data Reduction") {

             @Override
             protected IStatus run(IProgressMonitor jobmonitor) {
                 try {
                     Dataset data;
                     int finalSliceDim = currentSliceParams.getSliceDim();
                     int finalSliceSize = currentSliceParams.getSliceSize();

                     try {
                         lock.acquire();
                         data = NcdNexusUtils.sliceInputData(currentSliceParams, tmp_ids);
                         if (tmp_errors_ids != null) {
                             if (tmp_errors_ids.dataset_id >= 0) {
                                 Dataset errors = NcdNexusUtils.sliceInputData(currentSliceParams,
                                         tmp_errors_ids);
                                 data.setError(errors);
                             } else {
                                 tmp_errors_ids.setSlice(currentSliceParams);
                             }
                         }
                     } catch (Exception e) {
                         throw e;
                     } finally {
                         lock.release();
                     }

                     if (flags.isEnableDetectorResponse() && !flags.isEnableSector()) {
                         jobmonitor.setTaskName(monitorFile + " : Correct for detector response");
                         data = lazyDetectorResponse.execute(dim, data, currentSliceParams, lock);
                     }

                     if (flags.isEnableNormalisation()) {
                         jobmonitor.setTaskName(monitorFile + " : Normalising data");
                         data = lazyNormalisation.execute(dim, data, currentSliceParams, lock);
                     }

                     if (flags.isEnableBackground()) {
                         jobmonitor.setTaskName(monitorFile + " : Subtracting background");

                         long[] bgFrames = lazyBackgroundSubtraction.bgFrames;
                         int[] bgFrames_int = lazyBackgroundSubtraction.bgFrames_int;
                         int bgSliceSize = Math.min(finalSliceSize, bgFrames_int[finalSliceDim]);
                         int[] bgStart = new int[finalSliceDim + 1];
                         for (int i = 0; i <= finalSliceDim; i++) {
                             bgStart[i] = Math.min(currentSliceParams.getStart()[i], bgFrames_int[i] - 1);
                         }
                         SliceSettings bgSliceParams = new SliceSettings(bgFrames, finalSliceDim, bgSliceSize);
                         bgSliceParams.setStart(bgStart);
                         Dataset bgData = NcdNexusUtils.sliceInputData(bgSliceParams, tmp_bgIds);
                         if (tmp_errors_bgIds != null) {
                             if (tmp_errors_bgIds.dataset_id >= 0) {
                                 Dataset bgErrors = NcdNexusUtils.sliceInputData(bgSliceParams,
                                         tmp_errors_bgIds);
                                 bgData.setError(bgErrors);
                             } else {
                                 tmp_errors_bgIds.setSlice(bgSliceParams);
                             }
                         }
                         Dataset[] remapData = NcdDataUtils.matchDataDimensions(data, bgData);
                         Dataset[] remapErrors = NcdDataUtils.matchDataDimensions(data.getError(),
                                 bgData.getError());
                         remapData[0].setError(remapErrors[0]);
                         remapData[1].setError(remapErrors[1]);
                         Dataset res = lazyBackgroundSubtraction.execute(dim, remapData[0], remapData[1],
                                 currentSliceParams, lock);
                         remapData[0] = res;
                         remapErrors[0] = res.getError();

                         // restore original axis order in output dataset
                         data = DatasetUtils.transpose(remapData[0], (int[]) remapData[2].getBuffer());
                         data.setError(
                                 DatasetUtils.transpose(remapErrors[0], (int[]) remapErrors[2].getBuffer()));
                     }

                     if (flags.isEnableInvariant()) {
                         jobmonitor.setTaskName(monitorFile + " : Calculating invariant");
                         SliceSettings invSliceParam = new SliceSettings(lazyInvariant.invFrames, finalSliceDim,
                                 finalSliceSize);
                         invSliceParam.setStart(Arrays.copyOf(currentSliceParams.getStart(), finalSliceDim + 1));
                         lazyInvariant.execute(dim, data, invSliceParam, lock);
                     }
                 } catch (Exception e) {
                     e.printStackTrace();
                     return Status.CANCEL_STATUS;
                 }

                 return Status.OK_STATUS;
             }
         };

         processingJob.tmp_ids = new DataSliceIdentifiers(input_ids);
         processingJob.tmp_errors_ids = new DataSliceIdentifiers(input_errors_ids);
         sliceParams.setStart(iter.getPos());
         processingJob.currentSliceParams = new SliceSettings(sliceParams);
         if (flags.isEnableBackground()) {
             processingJob.tmp_bgIds = new DataSliceIdentifiers(lazyBackgroundSubtraction.bgIds);
             processingJob.tmp_errors_bgIds = new DataSliceIdentifiers(lazyBackgroundSubtraction.bgErrorsIds);
         }
         processingJobList.add(processingJob);
     }

     monitor.beginTask(monitorFile + " : Running NCD Data Reduction stages", processingJobList.size());
     for (DataReductionJob job : processingJobList) {
         if (monitor.isCanceled()) {
             processingJobList.clear();
             for (Job runningJob : runningJobList) {
                 runningJob.cancel();
             }
             break;
         }
         while (runningJobList.size() >= cores) {
             try {
                 runningJobList.get(0).join();
                 runningJobList.remove(0);
                 monitor.worked(1);
             } catch (InterruptedException e) {
                 e.printStackTrace();
             }
         }
         job.schedule();
         runningJobList.add(job);
     }

     for (DataReductionJob job : processingJobList) {
         try {
             job.join();
         } catch (InterruptedException e) {
             e.printStackTrace();
         }
     }

     if (monitor.isCanceled()) {
         return;
     }

     monitor.done();

     if (flags.isEnableBackground()) {
         input_ids.setIDs(lazyBackgroundSubtraction.bg_group_id, lazyBackgroundSubtraction.bg_data_id);
         input_errors_ids.setIDs(lazyBackgroundSubtraction.bg_group_id, lazyBackgroundSubtraction.bg_errors_id);
     } else if (flags.isEnableNormalisation()) {
         input_ids.setIDs(lazyNormalisation.norm_group_id, lazyNormalisation.norm_data_id);
         input_errors_ids.setIDs(lazyNormalisation.norm_group_id, lazyNormalisation.norm_errors_id);
     } else if (flags.isEnableDetectorResponse() && !flags.isEnableSector()) {
         input_ids.setIDs(lazyDetectorResponse.dr_group_id, lazyDetectorResponse.dr_data_id);
         input_errors_ids.setIDs(lazyDetectorResponse.dr_group_id, lazyDetectorResponse.dr_errors_id);
     }

     if (flags.isEnableAverage()) {
         monitor.beginTask(monitorFile + " : Averaging  datasets", IProgressMonitor.UNKNOWN);
         int[] averageIndices = new int[] { frames.length - dim };
         if (gridAverage != null) {
             averageIndices = NcdDataUtils.createGridAxesList(gridAverage, frames.length - dim + 1);
         }
         lazyAverage = new LazyAverage();
         lazyAverage.setAverageIndices(averageIndices);
         lazyAverage.setMonitor(monitor);
         lazyAverage.configure(dim, frames_int, processing_group_id, frameBatch);
         lazyAverage.execute(input_ids, input_errors_ids);

         if (monitor.isCanceled()) {
             return;
         }

         if (qaxis != null) {
             lazyAverage.setQaxis(qaxis, qaxisUnit);
             lazyAverage.writeQaxisData(frames_int.length, input_ids.datagroup_id);
         }
         lazyAverage.writeNcdMetadata(input_ids.datagroup_id);

         monitor.done();
     }

     H5.H5Lcopy(input_ids.datagroup_id, "./data", result_group_id, "./data", HDF5Constants.H5P_DEFAULT,
             HDF5Constants.H5P_DEFAULT);
     if (input_errors_ids.dataset_id != -1) {
         H5.H5Lcopy(input_errors_ids.datagroup_id, "./errors", result_group_id, "./errors",
                 HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
     }
     if (qaxis != null) {
         H5.H5Lcopy(input_ids.datagroup_id, "./q", result_group_id, "./q", HDF5Constants.H5P_DEFAULT,
                 HDF5Constants.H5P_DEFAULT);
         if (input_errors_ids.dataset_id != -1 && qaxis.hasErrors()) {
             H5.H5Lcopy(input_ids.datagroup_id, "./q_errors", result_group_id, "./q_errors",
                     HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
         }
     }

     if (flags.isEnableLogLogPlot()) {
         SaxsPlotData plotData = new LogLogPlotData();
         addPlotData(plotData, detector, qaxis);
     }

     if (flags.isEnableGuinierPlot()) {
         SaxsPlotData plotData = new GuinierPlotData();
         addPlotData(plotData, detector, qaxis);
     }

     if (flags.isEnablePorodPlot()) {
         SaxsPlotData plotData = new PorodPlotData();
         addPlotData(plotData, detector, qaxis);
     }

     if (flags.isEnableKratkyPlot()) {
         SaxsPlotData plotData = new KratkyPlotData();
         addPlotData(plotData, detector, qaxis);
     }

     if (flags.isEnableZimmPlot()) {
         SaxsPlotData plotData = new ZimmPlotData();
         addPlotData(plotData, detector, qaxis);
     }

     if (flags.isEnableDebyeBuechePlot()) {
         SaxsPlotData plotData = new DebyeBuechePlotData();
         addPlotData(plotData, detector, qaxis);
     }
 }

From source file:uk.ac.diamond.scisoft.ncd.reduction.LazyNormalisation.java

public Dataset execute(int dim, Dataset data, SliceSettings sliceData, ILock lock) throws HDF5Exception {
    Normalisation nm = new Normalisation();
    nm.setCalibChannel(normChannel);// w  ww  .  j  a v a  2s . c o m
    if (absScaling != null) {
        nm.setNormvalue(absScaling);
    }
    int[] dataShape = data.getShape();

    data = flattenGridData(data, dim);
    Dataset errors = data.getErrorBuffer();

    SliceSettings calibrationSliceParams = new SliceSettings(sliceData);
    calibrationSliceParams.setFrames(framesCal);
    Dataset dataCal = NcdNexusUtils.sliceInputData(calibrationSliceParams, calibration_ids);
    Dataset calibngd = flattenGridData(dataCal, 1);

    Object[] myobj = nm.process(data.getBuffer(), errors.getBuffer(), calibngd.getBuffer(), data.getShape()[0],
            data.getShape(), calibngd.getShape());
    float[] mydata = (float[]) myobj[0];
    double[] myerrors = (double[]) myobj[1];

    Dataset myres = new FloatDataset(mydata, dataShape);
    myres.setErrorBuffer(new DoubleDataset(myerrors, dataShape));

    int filespace_id = -1;
    int type_id = -1;
    int memspace_id = -1;
    int select_id = -1;
    int write_id = -1;

    try {
        lock.acquire();

        long[] frames = sliceData.getFrames();
        long[] start_pos = (long[]) ConvertUtils.convert(sliceData.getStart(), long[].class);
        int sliceDim = sliceData.getSliceDim();
        int sliceSize = sliceData.getSliceSize();

        long[] start = Arrays.copyOf(start_pos, frames.length);

        long[] block = Arrays.copyOf(frames, frames.length);
        Arrays.fill(block, 0, sliceData.getSliceDim(), 1);
        block[sliceDim] = Math.min(frames[sliceDim] - start_pos[sliceDim], sliceSize);

        long[] count = new long[frames.length];
        Arrays.fill(count, 1);

        filespace_id = H5.H5Dget_space(norm_data_id);
        type_id = H5.H5Dget_type(norm_data_id);
        memspace_id = H5.H5Screate_simple(block.length, block, null);

        select_id = H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, block, count,
                block);
        if (select_id < 0) {
            throw new HDF5Exception("Failed to allocate space fro writing Normalisation data");
        }

        write_id = H5.H5Dwrite(norm_data_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT,
                mydata);
        if (write_id < 0) {
            throw new HDF5Exception("Failed to write Normalisation data into the results file");
        }

        NcdNexusUtils.closeH5idList(new ArrayList<Integer>(Arrays.asList(memspace_id, type_id, filespace_id)));

        filespace_id = H5.H5Dget_space(norm_errors_id);
        type_id = H5.H5Dget_type(norm_errors_id);
        memspace_id = H5.H5Screate_simple(block.length, block, null);
        select_id = H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, block, count,
                block);
        if (select_id < 0) {
            throw new HDF5Exception("Failed to allocate space for writing Normalisation error data");
        }
        write_id = H5.H5Dwrite(norm_errors_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT,
                myres.getError().getBuffer());
        if (write_id < 0) {
            throw new HDF5Exception("Failed to write Normalisation error data into the results file");
        }

    } finally {
        lock.release();
        NcdNexusUtils.closeH5idList(new ArrayList<Integer>(Arrays.asList(memspace_id, type_id, filespace_id)));
    }

    return myres;
}

From source file:uk.ac.diamond.scisoft.ncd.reduction.LazySectorIntegration.java

public void configure(int dim, long[] frames, int processing_group_id) throws HDF5Exception {
    sec_group_id = NcdNexusUtils.makegroup(processing_group_id, LazySectorIntegration.name, Nexus.DETECT);
    int typeFloat = HDF5Constants.H5T_NATIVE_FLOAT;
    int typeDouble = HDF5Constants.H5T_NATIVE_DOUBLE;
    int[] intRadii = intSector.getIntRadii();
    double[] radii = intSector.getRadii();
    double dpp = intSector.getDpp();
    int secRank = frames.length - dim + 1;
    secFrames = Arrays.copyOf(frames, secRank);
    secFrames[secRank - 1] = intRadii[1] - intRadii[0] + 1;
    sec_data_id = NcdNexusUtils.makedata(sec_group_id, "data", typeFloat, secFrames, true, "counts");
    sec_errors_id = NcdNexusUtils.makedata(sec_group_id, "errors", typeDouble, secFrames, false, "counts");

    double[] angles = intSector.getAngles();
    long[] azFrames = Arrays.copyOf(frames, secRank);
    if (intSector.getSymmetry() == SectorROI.FULL) {
        angles[1] = angles[0] + 2 * Math.PI;
    }/* w w w.  j a  v  a2  s . c  o m*/
    azFrames[secRank - 1] = (int) Math.ceil((angles[1] - angles[0]) * radii[1] * dpp);
    az_data_id = NcdNexusUtils.makedata(sec_group_id, "azimuth", typeFloat, azFrames, false, "counts");
    az_errors_id = NcdNexusUtils.makedata(sec_group_id, "azimuth_errors", typeDouble, azFrames, false,
            "counts");

    int[] areaShape = (int[]) ConvertUtils
            .convert(Arrays.copyOfRange(frames, frames.length - dim, frames.length), int[].class);
    areaData = ROIProfile.area(areaShape, Dataset.FLOAT32, mask, intSector, calculateRadial, calculateAzimuthal,
            fast);

    if (qaxis != null) {
        writeQaxisData(secRank, sec_group_id);
    }
    writeNcdMetadata(sec_group_id);
}

From source file:uk.ac.diamond.scisoft.ncd.reduction.LazySelection.java

public DataSliceIdentifiers[] execute(int dim, DataSliceIdentifiers ids, DataSliceIdentifiers error_ids,
        int output_group_id) throws HDF5Exception {

    int[] datDimMake = Arrays.copyOfRange(frames, 0, frames.length - dim);
    int[] imageSize = Arrays.copyOfRange(frames, frames.length - dim, frames.length);
    ArrayList<int[]> list = NcdDataUtils.createSliceList(format, datDimMake);
    for (int i = 0; i < datDimMake.length; i++) {
        datDimMake[i] = list.get(i).length;
    }//from w  w  w. j a v a  2 s  . c o m
    long[] framesTotal = (long[]) ConvertUtils.convert(ArrayUtils.addAll(datDimMake, imageSize), long[].class);

    long[] block = new long[frames.length];
    block = Arrays.copyOf((long[]) ConvertUtils.convert(frames, long[].class), block.length);
    Arrays.fill(block, 0, block.length - dim, 1);
    int[] block_int = (int[]) ConvertUtils.convert(block, int[].class);

    long[] count = new long[frames.length];
    Arrays.fill(count, 1);

    int dtype = HDF5Utils.getDtype(ids.dataclass_id, ids.datasize_id);
    Dataset data = DatasetFactory.zeros(block_int, dtype);
    int output_data_id = NcdNexusUtils.makedata(output_group_id, "data", ids.datatype_id, framesTotal, true,
            "counts");
    int output_dataspace_id = H5.H5Dget_space(output_data_id);

    Dataset errors = DatasetFactory.zeros(block_int, dtype);
    int errors_datatype_id = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_DOUBLE);
    int errors_data_id = NcdNexusUtils.makedata(output_group_id, "errors", errors_datatype_id, framesTotal,
            true, "counts");
    int errors_dataspace_id = H5.H5Dget_space(errors_data_id);

    MultidimensionalCounter frameCounter = new MultidimensionalCounter(datDimMake);
    Iterator iter = frameCounter.iterator();
    while (iter.hasNext()) {
        iter.next();
        long[] frame = (long[]) ConvertUtils.convert(iter.getCounts(), long[].class);
        long[] gridFrame = new long[datDimMake.length];
        for (int i = 0; i < datDimMake.length; i++) {
            gridFrame[i] = list.get(i)[(int) frame[i]];
        }

        long[] start = new long[frames.length];
        start = Arrays.copyOf(gridFrame, frames.length);
        long[] writePosition = new long[frames.length];
        writePosition = Arrays.copyOf(frame, frames.length);

        int memspace_id = H5.H5Screate_simple(block.length, block, null);
        H5.H5Sselect_hyperslab(ids.dataspace_id, HDF5Constants.H5S_SELECT_SET, start, block, count, block);
        H5.H5Dread(ids.dataset_id, ids.datatype_id, memspace_id, ids.dataspace_id, HDF5Constants.H5P_DEFAULT,
                data.getBuffer());
        int errors_memspace_id = H5.H5Screate_simple(block.length, block, null);
        if (error_ids.dataset_id >= 0) {
            H5.H5Sselect_hyperslab(error_ids.dataspace_id, HDF5Constants.H5S_SELECT_SET, start, block, count,
                    block);
            H5.H5Dread(error_ids.dataset_id, error_ids.datatype_id, errors_memspace_id, error_ids.dataspace_id,
                    HDF5Constants.H5P_DEFAULT, errors.getBuffer());
            data.setError(errors);
        } else {
            Object obj = DatasetUtils.createJavaArray(data);
            Dataset error = DatasetFactory.createFromObject(obj);
            error.ipower(0.5);
            data.setError(error);
        }

        H5.H5Sselect_hyperslab(output_dataspace_id, HDF5Constants.H5S_SELECT_SET, writePosition, block, count,
                block);
        H5.H5Dwrite(output_data_id, ids.datatype_id, memspace_id, output_dataspace_id,
                HDF5Constants.H5P_DEFAULT, data.getBuffer());

        H5.H5Sselect_hyperslab(errors_dataspace_id, HDF5Constants.H5S_SELECT_SET, writePosition, block, count,
                block);
        H5.H5Dwrite(errors_data_id, errors_datatype_id, errors_memspace_id, errors_dataspace_id,
                HDF5Constants.H5P_DEFAULT, data.getError().getBuffer());

        if (monitor.isCanceled()) {
            return null;
        }

        monitor.worked(1);

    }

    DataSliceIdentifiers outputDataIds = new DataSliceIdentifiers();
    outputDataIds.setIDs(output_group_id, output_data_id);
    DataSliceIdentifiers outputErrorsIds = new DataSliceIdentifiers();
    outputErrorsIds.setIDs(output_group_id, errors_data_id);
    return new DataSliceIdentifiers[] { outputDataIds, outputErrorsIds };
}

From source file:wicket.contrib.groovy.builder.util.AttributeUtils.java

public static Object generalAttributeConversion(Class expectedType, Object value) {
    if (value instanceof String) {
        if (expectedType.equals(Class.class)) {
            try {
                value = AttributeUtils.classValue(value);
            } catch (ClassNotFoundException e) {
                throw new WicketComponentBuilderException(
                        "Attribute conversion error.  Could not find class '" + value.toString() + "'", e);
            }//  w  w w .ja va  2  s . c  o  m
        }
        if (expectedType.equals(String.class) == false)
            value = ConvertUtils.convert((String) value, expectedType);
    }

    return value;
}

From source file:wwutil.jsoda.DataUtil.java

/** Caller should handle custom valueType first before calling this.
 * E.g. DynamoDB's Set<String> and Set<long> fields are encoded as Multi-Value AttributeValue.
 *///from   w w  w .j  a  va 2s.co  m
@SuppressWarnings("unchecked")
static String encodeValueToAttrStr(Object value, Class valueType) {
    if (value == null)
        return null; // Caller needs to handle null correctly, e.g. skip storing AttributeValue.

    if (valueType == String.class)
        return value.toString();

    // NOTE: Don't change encoding and padding once data have been created.  Different encoding will mess up sorting.
    // Stringify basic type and encode them for sorting.
    if (valueType == Byte.class || valueType == byte.class) {
        Byte casted = (Byte) ConvertUtils.convert(value, Byte.class);
        return SimpleDBUtils.encodeZeroPadding(casted.intValue(), 3); // 0-Padded for sorting
    } else if (valueType == Short.class || valueType == short.class) {
        Short casted = (Short) ConvertUtils.convert(value, Short.class);
        return SimpleDBUtils.encodeZeroPadding(casted.intValue(), 5); // 0-Padded for sorting
    } else if (valueType == Integer.class || valueType == int.class) {
        Integer casted = (Integer) ConvertUtils.convert(value, Integer.class);
        return SimpleDBUtils.encodeZeroPadding(casted.intValue(), 10); // 0-Padded for sorting
    } else if (valueType == Long.class || valueType == long.class) {
        Long casted = (Long) ConvertUtils.convert(value, Long.class);
        return SimpleDBUtils.encodeZeroPadding(casted.longValue(), 19); // 0-Padded for sorting
    } else if (valueType == Float.class || valueType == float.class) {
        Float casted = (Float) ConvertUtils.convert(value, Float.class);
        return SimpleDBUtils.encodeZeroPadding(casted.floatValue(), 16); // 0-Padded for sorting
    } else if (valueType == Double.class || valueType == double.class) {
        // SimpleDBUtils has no padding for double.  Just convert it to String.
        return value.toString();
    } else if (valueType == Boolean.class || valueType == boolean.class) {
        return value.toString();
    } else if (valueType == Character.class || valueType == char.class) {
        return value.toString();
    } else if (valueType == Date.class) {
        return SimpleDBUtils.encodeDate((Date) value);
    } else if (valueType.isEnum()) {
        return ((Enum) value).name();
    }

    // JSONify the rest.
    return toJson(value);
}

From source file:wwutil.jsoda.DynamoDBService.java

private Object attrToValue(Field field, AttributeValue attr) throws Exception {
    // Handle Set<String>, Set<Long>, or Set<Integer> field.
    if (Set.class.isAssignableFrom(field.getType())) {
        Class paramType = ReflectUtil.getGenericParamType1(field.getGenericType());
        if (isMultiValuetype(paramType)) {
            if (isN(paramType))
                return DataUtil.toObjectSet(attr.getNS(), paramType);
            else//from   w  ww  .  j a  v  a  2 s .  com
                return DataUtil.toObjectSet(attr.getSS(), paramType);
        }
    }

    // Handle number types
    if (isN(field.getType())) {
        return ConvertUtils.convert(attr.getN(), field.getType());
    }

    // Delegate to DataUtil to decode the rest.
    return DataUtil.decodeAttrStrToValue(attr.getS(), field.getType());
}

From source file:wwutil.model.BuiltinFunc.java

private static void setupBuiltinPreStore1Handlers(AnnotationRegistry registry) {

    registry.register(DefaultGUID.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != String.class)
                throw new ValidationException(
                        "The @DefaultGUID field must be String type.  Field: " + field.getName());
        }//  w w w.  j  a  v a2  s  .c  o  m

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            Object value = field.get(object);
            if (value == null || value.toString().length() == 0) {
                boolean isShort = ReflectUtil.getAnnoValue(fieldAnnotation, "isShort", false);
                String uuidStr = isShort ? BaseXUtil.uuid8() : BaseXUtil.uuid16();
                field.set(object, uuidStr);
            }
        }
    });

    registry.register(ModifiedTime.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != java.util.Date.class)
                throw new ValidationException(
                        "The @ModifiedTime field must be java.util.Date type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            field.set(object, new Date());
        }
    });

    registry.register(VersionLocking.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != Integer.class && field.getType() != int.class)
                throw new ValidationException(
                        "The @VersionLocking field must be int type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            ReflectUtil.incrementField(object, field, 1);
        }
    });

    registry.register(ToUpper.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != String.class)
                throw new ValidationException(
                        "The @ToUpper field must be String type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            String value = (String) field.get(object);
            if (value != null) {
                field.set(object, value.toUpperCase());
            }
        }
    });

    registry.register(ToLower.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != String.class)
                throw new ValidationException(
                        "The @ToLower field must be String type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            String value = (String) field.get(object);
            if (value != null) {
                field.set(object, value.toLowerCase());
            }
        }
    });

    registry.register(Trim.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != String.class)
                throw new ValidationException(
                        "The @Trim field must be String type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            String value = (String) field.get(object);
            if (value != null) {
                field.set(object, value.trim());
            }
        }
    });

    registry.register(RemoveChar.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != String.class)
                throw new ValidationException(
                        "The @RemoveChar field must be String type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            char charToRemove = ReflectUtil.getAnnoValue(fieldAnnotation, "charToRemove", ' ');
            String value = (String) field.get(object);
            if (value != null) {
                field.set(object, StringUtils.remove(value, charToRemove));
            }
        }
    });

    registry.register(RemoveAlphaDigits.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != String.class)
                throw new ValidationException(
                        "The @RemoveAlphaDigits field must be String type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            boolean removeDigits = ReflectUtil.getAnnoValue(fieldAnnotation, "removeDigits", false);
            String value = (String) field.get(object);
            if (value != null) {
                if (removeDigits)
                    field.set(object, value.replaceAll("[\\d]", "")); // remove all digits
                else
                    field.set(object, value.replaceAll("[^\\d]", "")); // remove all alphas (non-digits)
            }
        }
    });

    registry.register(MaxValue.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != Integer.class && field.getType() != int.class
                    && field.getType() != Long.class && field.getType() != long.class
                    && field.getType() != Short.class && field.getType() != short.class
                    && field.getType() != Float.class && field.getType() != float.class
                    && field.getType() != Double.class && field.getType() != double.class)
                throw new ValidationException(
                        "The @MaxValue field must be number type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            Object maxValueObj = ReflectUtil.getAnnoValue(fieldAnnotation, "value", (Object) null);
            double maxValue = ((Double) ConvertUtils.convert(maxValueObj, Double.class)).doubleValue();
            Object valueObj = field.get(object);
            double value = ((Double) ConvertUtils.convert(valueObj, Double.class)).doubleValue();
            value = (value > maxValue ? maxValue : value);
            field.set(object, ConvertUtils.convert(value, field.getType()));
        }
    });

    registry.register(MinValue.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != Integer.class && field.getType() != int.class
                    && field.getType() != Long.class && field.getType() != long.class
                    && field.getType() != Short.class && field.getType() != short.class
                    && field.getType() != Float.class && field.getType() != float.class
                    && field.getType() != Double.class && field.getType() != double.class)
                throw new ValidationException(
                        "The @MinValue field must be number type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            Object minValueObj = ReflectUtil.getAnnoValue(fieldAnnotation, "value", (Object) null);
            double minValue = ((Double) ConvertUtils.convert(minValueObj, Double.class)).doubleValue();
            Object valueObj = field.get(object);
            double value = ((Double) ConvertUtils.convert(valueObj, Double.class)).doubleValue();
            value = (value < minValue ? minValue : value);
            field.set(object, ConvertUtils.convert(value, field.getType()));
        }
    });

    registry.register(AbsValue.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != Integer.class && field.getType() != int.class
                    && field.getType() != Long.class && field.getType() != long.class
                    && field.getType() != Short.class && field.getType() != short.class
                    && field.getType() != Float.class && field.getType() != float.class
                    && field.getType() != Double.class && field.getType() != double.class)
                throw new ValidationException(
                        "The @AbsValue field must be number type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            Object valueObj = field.get(object);
            double value = ((Double) ConvertUtils.convert(valueObj, Double.class)).doubleValue();
            value = Math.abs(value);
            field.set(object, ConvertUtils.convert(value, field.getType()));
        }
    });

    registry.register(CeilValue.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != Integer.class && field.getType() != int.class
                    && field.getType() != Long.class && field.getType() != long.class
                    && field.getType() != Short.class && field.getType() != short.class
                    && field.getType() != Float.class && field.getType() != float.class
                    && field.getType() != Double.class && field.getType() != double.class)
                throw new ValidationException(
                        "The @CeilValue field must be number type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            Object valueObj = field.get(object);
            double value = ((Double) ConvertUtils.convert(valueObj, Double.class)).doubleValue();
            value = Math.ceil(value);
            field.set(object, ConvertUtils.convert(value, field.getType()));
        }
    });

    registry.register(FloorValue.class, new AnnotationFieldHandler() {
        public void checkModel(Annotation fieldAnnotation, Field field, Map<String, Field> allFieldMap)
                throws ValidationException {
            if (field.getType() != Integer.class && field.getType() != int.class
                    && field.getType() != Long.class && field.getType() != long.class
                    && field.getType() != Short.class && field.getType() != short.class
                    && field.getType() != Float.class && field.getType() != float.class
                    && field.getType() != Double.class && field.getType() != double.class)
                throw new ValidationException(
                        "The @FloorValue field must be number type.  Field: " + field.getName());
        }

        public void handle(Annotation fieldAnnotation, Object object, Field field,
                Map<String, Field> allFieldMap) throws Exception {
            Object valueObj = field.get(object);
            double value = ((Double) ConvertUtils.convert(valueObj, Double.class)).doubleValue();
            value = Math.floor(value);
            field.set(object, ConvertUtils.convert(value, field.getType()));
        }
    });

}