Example usage for org.apache.commons.configuration2.convert DefaultListDelimiterHandler DefaultListDelimiterHandler

List of usage examples for org.apache.commons.configuration2.convert DefaultListDelimiterHandler DefaultListDelimiterHandler

Introduction

In this page you can find the example usage for org.apache.commons.configuration2.convert DefaultListDelimiterHandler DefaultListDelimiterHandler.

Prototype

public DefaultListDelimiterHandler(final char listDelimiter) 

Source Link

Document

Creates a new instance of DefaultListDelimiterHandler and sets the list delimiter character.

Usage

From source file:imp.lstm.main.Driver.java

public static void main(String[] args)
        throws FileNotFoundException, IOException, ConfigurationException, InvalidParametersException {
    FileBasedConfigurationBuilder<PropertiesConfiguration> builder = new FileBasedConfigurationBuilder<>(
            PropertiesConfiguration.class).configure(
                    new Parameters().properties().setFileName(args[0]).setThrowExceptionOnMissing(true)
                            .setListDelimiterHandler(new DefaultListDelimiterHandler(';'))
                            .setIncludesAllowed(false));
    Configuration config = builder.getConfiguration();

    String inputSongPath = config.getString("input_song");
    String outputFolderPath = config.getString("output_folder");
    String autoEncoderParamsPath = config.getString("auto_encoder_params");
    String nameGeneratorParamsPath = config.getString("name_generator_params");
    String queueFolderPath = config.getString("queue_folder");
    String referenceQueuePath = config.getString("reference_queue", "nil");
    String inputCorpusFolder = config.getString("input_corpus_folder");
    boolean shouldWriteQueue = config.getBoolean("should_write_generated_queue");
    boolean frankensteinTest = config.getBoolean("queue_tests_frankenstein");
    boolean interpolateTest = config.getBoolean("queue_tests_interpolation");
    boolean iterateOverCorpus = config.getBoolean("iterate_over_corpus", false);
    boolean shouldGenerateSongTitle = config.getBoolean("generate_song_title");
    boolean shouldGenerateSong = config.getBoolean("generate_leadsheet");

    LogTimer.initStartTime(); //start our logging timer to keep track of our execution time
    LogTimer.log("Creating name generator...");

    //here is just silly code for generating name based on an LSTM lol $wag
    LSTM lstm = new LSTM();
    FullyConnectedLayer fullLayer = new FullyConnectedLayer(Operations.None);
    Loadable titleNetLoader = new Loadable() {
        @Override/*from   w  w  w .ja va2 s. co  m*/
        public boolean load(INDArray array, String path) {
            String car = pathCar(path);
            String cdr = pathCdr(path);
            switch (car) {
            case "full":
                return fullLayer.load(array, cdr);
            case "lstm":
                return lstm.load(array, cdr);
            default:
                return false;
            }
        }
    };

    LogTimer.log("Packing name generator from files...");
    (new NetworkConnectomeLoader()).load(nameGeneratorParamsPath, titleNetLoader);

    String characterString = " !\"'[],-.01245679:?ABCDEFGHIJKLMNOPQRSTUVWYZabcdefghijklmnopqrstuvwxyz";

    //Initialization
    LogTimer.log("Creating autoencoder...");
    int inputSize = 34;
    int outputSize = EncodingParameters.noteEncoder.getNoteLength();
    int featureVectorSize = 100;
    ProductCompressingAutoencoder autoencoder = new ProductCompressingAutoencoder(24, 48, 84 + 1, false); //create our network

    int numInterpolationDivisions = 5;

    //"pack" the network from weights and biases file directory
    LogTimer.log("Packing autoencoder from files");
    (new NetworkConnectomeLoader()).load(autoEncoderParamsPath, autoencoder);

    File[] songFiles;
    if (iterateOverCorpus) {
        songFiles = new File(inputCorpusFolder).listFiles();
    } else {
        songFiles = new File[] { new File(inputSongPath) };
    }
    for (File inputFile : songFiles) {
        (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder, "initialstate");
        String songTitle;
        if (shouldGenerateSong) {
            Random rand = new Random();
            AVector charOut = Vector.createLength(characterString.length());
            GroupedSoftMaxSampler sampler = new GroupedSoftMaxSampler(
                    new Group[] { new Group(0, characterString.length(), true) });
            songTitle = "";
            for (int i = 0; i < 50; i++) {
                charOut = fullLayer.forward(lstm.step(charOut));
                charOut = sampler.filter(charOut);
                int charIndex = 0;
                for (; charIndex < charOut.length(); charIndex++) {
                    if (charOut.get(charIndex) == 1.0) {
                        break;
                    }
                }
                songTitle += characterString.substring(charIndex, charIndex + 1);
            }
            songTitle = songTitle.trim();

            LogTimer.log("Generated song name: " + songTitle);
        } else {
            songTitle = "The Song We Never Name";
        }
        LogTimer.log("Reading file...");
        LeadSheetDataSequence inputSequence = LeadSheetIO.readLeadSheet(inputFile); //read our leadsheet to get a data vessel as retrieved in rbm-provisor
        LeadSheetDataSequence outputSequence = inputSequence.copy();

        outputSequence.clearMelody();
        if (interpolateTest) {
            LeadSheetDataSequence additionalOutput = outputSequence.copy();
            for (int i = 0; i < numInterpolationDivisions; i++) {
                outputSequence.concat(additionalOutput.copy());
            }
        }
        LeadSheetDataSequence decoderInputSequence = outputSequence.copy();

        LogTimer.startLog("Encoding data...");
        //TradingTimer.initStart(); //start our trading timer to keep track our our generation versus realtime play
        while (inputSequence.hasNext()) { //iterate through time steps in input data
            //TradingTimer.waitForNextTimedInput();
            autoencoder.encodeStep(inputSequence.retrieve()); //feed the resultant input vector into the network
            if (advanceDecoding) { //if we are using advance decoding (we start decoding as soon as we can)
                if (autoencoder.canDecode()) { //if queue has enough data to decode from
                    outputSequence.pushStep(null, null,
                            autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                    //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are
                }
            }
        }
        LogTimer.endLog();

        if (shouldWriteQueue) {
            String queueFilePath = queueFolderPath + java.io.File.separator
                    + inputFile.getName().replace(".ls", ".q");
            FragmentedNeuralQueue currQueue = autoencoder.getQueue();
            currQueue.writeToFile(queueFilePath);
            LogTimer.log("Wrote queue " + inputFile.getName().replace(".ls", ".q") + " to file...");
        }
        if (shouldGenerateSong) {
            if (interpolateTest) {

                FragmentedNeuralQueue refQueue = new FragmentedNeuralQueue();
                refQueue.initFromFile(referenceQueuePath);

                FragmentedNeuralQueue currQueue = autoencoder.getQueue();
                //currQueue.writeToFile(queueFilePath);

                autoencoder.setQueue(currQueue.copy());
                while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                    outputSequence.pushStep(null, null,
                            autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                    //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
                }

                for (int i = 1; i <= numInterpolationDivisions; i++) {
                    System.out.println("Starting interpolation " + ((1.0 / numInterpolationDivisions) * (i)));
                    (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder, "initialstate");
                    FragmentedNeuralQueue currCopy = currQueue.copy();
                    currCopy.basicInterpolate(refQueue, (1.0 / numInterpolationDivisions) * (i));
                    autoencoder.setQueue(currCopy);
                    int timeStep = 0;
                    while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                        System.out.println("interpolation " + i + " step " + ++timeStep);
                        outputSequence.pushStep(null, null,
                                autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                        //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
                    }
                }

            }
            if (frankensteinTest) {
                LogTimer.startLog("Loading queues");
                File queueFolder = new File(queueFolderPath);
                int numComponents = config.getInt("frankenstein_num_components", 5);
                int numCombinations = config.getInt("frankenstein_num_combinations", 6);
                double interpolationMagnitude = config.getDouble("frankenstein_magnitude", 2.0);
                if (queueFolder.isDirectory()) {
                    File[] queueFiles = queueFolder.listFiles(new FilenameFilter() {
                        @Override
                        public boolean accept(File dir, String name) {
                            return name.contains(".q");
                        }
                    });

                    List<File> fileList = new ArrayList<>();
                    for (File file : queueFiles) {
                        fileList.add(file);
                    }
                    Collections.shuffle(fileList);
                    int numSelectedFiles = (numComponents > queueFiles.length) ? queueFiles.length
                            : numComponents;

                    for (int i = 0; i < queueFiles.length - numSelectedFiles; i++) {
                        fileList.remove(fileList.size() - 1);
                    }
                    List<FragmentedNeuralQueue> queuePopulation = new ArrayList<>(fileList.size());
                    songTitle += " - a mix of ";
                    for (File file : fileList) {
                        FragmentedNeuralQueue newQueue = new FragmentedNeuralQueue();
                        newQueue.initFromFile(file.getPath());
                        queuePopulation.add(newQueue);
                        songTitle += file.getName().replaceAll(".ls", "") + ", ";
                    }
                    LogTimer.endLog();

                    LeadSheetDataSequence additionalOutput = outputSequence.copy();
                    for (int i = 1; i < numCombinations; i++) {
                        outputSequence.concat(additionalOutput.copy());
                    }
                    decoderInputSequence = outputSequence.copy();

                    FragmentedNeuralQueue origQueue = autoencoder.getQueue();

                    for (int i = 0; i < numCombinations; i++) {

                        LogTimer.startLog("Performing queue interpolation...");
                        AVector combinationStrengths = Vector.createLength(queuePopulation.size());
                        Random vectorRand = new Random(i);
                        for (int j = 0; j < combinationStrengths.length(); j++) {
                            combinationStrengths.set(j, vectorRand.nextDouble());
                        }
                        combinationStrengths.divide(combinationStrengths.elementSum());
                        FragmentedNeuralQueue currQueue = origQueue.copy();
                        for (int k = 0; k < combinationStrengths.length(); k++) {
                            currQueue.basicInterpolate(queuePopulation.get(k),
                                    combinationStrengths.get(k) * interpolationMagnitude);
                        }
                        LogTimer.endLog();
                        autoencoder.setQueue(currQueue);
                        LogTimer.startLog("Refreshing autoencoder state...");
                        (new NetworkConnectomeLoader()).refresh(autoEncoderParamsPath, autoencoder,
                                "initialstate");
                        LogTimer.endLog();
                        LogTimer.startLog("Decoding segment...");
                        while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                            outputSequence.pushStep(null, null,
                                    autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                            //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
                        }
                        LogTimer.endLog();
                    }

                }
            }

            while (autoencoder.hasDataStepsLeft()) { //we are done encoding all time steps, so just finish decoding!{
                outputSequence.pushStep(null, null, autoencoder.decodeStep(decoderInputSequence.retrieve())); //take sampled data for a timestep from autoencoder
                //TradingTimer.logTimestep(); //log our time to TradingTimer so we can know how far ahead of realtime we are       
            }
            LogTimer.log("Writing file...");

            String outputFilename = outputFolderPath + java.io.File.separator
                    + inputFile.getName().replace(".ls", "_Output"); //we'll write our generated file with the same name plus "_Output"
            LeadSheetIO.writeLeadSheet(outputSequence, outputFilename, songTitle);
            System.out.println(outputFilename);
        } else {
            autoencoder.setQueue(new FragmentedNeuralQueue());
        }
    }
    LogTimer.log("Process finished"); //Done!

}

From source file:main.Driver.java

/**
 * The path to a properties file which will supply parameter values for the tests should be passed in as argument 0 to main. 
 * The test that will be run is determined by the value of 'test_type' in the properties file, and each of the tests have their own properties:
 *      'encode+decode' - Encode and decode the given leadsheet with the autoencoder, writing the result to a leadsheet file.
 *              Params: //  www .  java 2 s  .  c om
 *                  * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with}
 *                  * name_generator_connectome={the path to the connectome which the name generator will be loaded with}
 *                  * input_leadsheet={the path to the leadsheet file which will be encoded and decoded}
 *                  * output_folder={the path to the output folder which the result leadsheet file will be written in}
 * 
 *      'encode+write_queue' - Encode the given leadsheet with the autoencoder, then write the encoded feature queue to a queue file.
 *              Params:
 *                  * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with}
 *                  * input_leadsheet={the path to the leadsheet file which will be encoded}
 *                  * queue_folder={the path to the output folder which the result queue file will be written in}
 * 
 *      'encode+write_queue+decode' - Encode the given leadsheet with the autoencoder, write the encoded feature queue to a queue file, and then write the result leadsheet to a leadsheet file.
 *                  * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with}
 *                  * name_generator_connectome={the path to the connectome which the name generator will be loaded with}
 *                  * input_leadsheet={the path to the leadsheet file which will be encoded and decoded}
 *                  * queue_folder={the path to the output folder which the result queue file will be written in}
 *                  * output_folder={the path to the output folder which the result leadsheet file will be written in}
 *      'create_feature_property_vector' - Given a corpus folder of leadsheets, construct a vector consisting of property analysis values for each feature in the corpus data
 *                  * input_corpus_folder={the path to the corpus folder containing all leadsheets to analyze}
 *                  * feature_size={the size (in time steps) of each feature}
 *                  * feature_properties_path={the path to write the generated vector file to (the file will be a csv file containing all the values in left-to-right order}
 *                  * feature_property={the type of feature property to analyze - current options are 'rest', 'sustain', articulate' (these return ratios of time steps with the given property to the total time steps in the feature).
 *      'compile_feature_queue_matrix' - Given a corpus folder of feature queues, construct a matrix of all feature vectors and write it as a csv file
 *                  * queue_folder={the path to the folder containing all queue files to compile}
 *                  * feature_matrix_path={the path to write the result csv file to}
 *      'generate_from_feature_queue_matrix' - Given a matrix of feature vectors, load the autoencoder with a queue of those features and decode from it, writing the result leadsheet to a file
 *                  * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with}
 *                  * reference_leadsheet={the path to the leadsheet we will take the chord sequence from (and loop it to match the length of the feature queue)}
 *                  * feature_queue_matrix_path={the path to the feature queue matrix file we will decode from}
 *                  * output_file_path={the path to the file we will write our result leadsheet to}
 *                  * (optional) song_title={the song title to write in the leadsheet file - by default this is "Generation from Feature Matrix {path of the feature matrix}"}
 *                  * feature_size={the size (in time steps) of features}
 *      'population_trade' - Given a leadsheet file, split it into sections of a specified size, and between sections, generate a response that plays off of a population of previously encoded feature queues
 *                  * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with}
 *                  * input_leadsheet={the path to the leadsheet file which will be encoded and traded with}     
 *                  * output_folder={the path to the output folder which the result leadsheet file will be written in}
 *                  * trading_part_size={the size (in time steps) of each trading part. The input leadsheet will be split into sections of this size, and trading responses will be generated in between.}
 *                  * interpolation_variance={a random value between zero and this will be added to the interpolation_min at each trading section to calculate the interpolation of the recently encoded queue towards the queue population before decoding the trading response}
 *                  * interpolation_min={the minimum ratio of interpolation at each trading section}
 *                  * herding_strength={the maximum strength of the herding operation at each section (all queues in the population are interpolated a random amount towards the most recent queue)}
 *                  * mutation_strength={the maximum strength of mutation at each section (each element of the feature vectors of all queues in the population are mutated at a random strength}
 *                  * crossover_strength{the maximum strength of crossover at each section (there is a chance for every queue that the queue will swap a random feature of itself with the corresponding feature of another random queue)}
 *      'interpolation' - Given a leadsheet file and a reference queue file, encode the leadsheet file with the autoencoder, and generate from the encoded queue for a number of divisions of a full interpolation towards the target queue
 *                  * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with}
 *                  * input_leadsheet={the path to the leadsheet file which will be encoded and interpolated}
 *                  * target_queue={the path to the queue to interpolate towards at each interpolation value};
 *                  * output_folder={the path to the output folder which the result leadsheet file will be written in}
 *                  * num_interpolation_divisions={the number of divisions of the interpolation strength from 0.0 to 1.0 (the length of the result leadsheet will be equal to the length of the original times 1 + number of divisions, as the first section of the result leadsheet is for interpolation 0.0)}
 *      'frankenstein' - Given a primary queue, a reference leadsheet for chords, and a corpus of queue files, construct the result leadsheet from a series of randomly weighted interpolations of the primary queue towards the set of selected queues.
 *                  * autoencoder_connectome={the path to the connectome which the autoencoder will be loaded with}
 *                  * primary_queue_path={the path to the queue which will serve as the base for all of the queue combinations (which are the result of sequential interpolations instead of a weighted sum)}
 *                  * reference_leadsheet={the path to the leadsheet we will take the chord sequence from (and loop it to match the desired length of our output}
 *                  * queue_folder={the path to the folder containing all queue files we can select from}
 *                  * output_file_path={the path to the file we will write our result leadsheet to}
 *                  * num_reference_queues={the number of reference queues we will pick at random from the queue folder to sample from)
 *                  * num_combinations={the number of queue combinations to sample and create the result leadsheet from}
 *                  * interpolation_strength={the total magnitude of all interpolation operations for each combination}
 */
public static void main(String[] args) throws FileNotFoundException, IOException, ConfigurationException {
    FileBasedConfigurationBuilder<PropertiesConfiguration> builder = new FileBasedConfigurationBuilder<>(
            PropertiesConfiguration.class).configure(
                    new Parameters().properties().setFileName(args[0]).setThrowExceptionOnMissing(true)
                            .setListDelimiterHandler(new DefaultListDelimiterHandler(';'))
                            .setIncludesAllowed(false));
    Configuration config = builder.getConfiguration();

    LogTimer.initStartTime(); //start our logging timer to keep track of our execution time

    //switch statement to run the appropriate test
    switch (config.getString("test_type")) {
    case "encode+decode": {
        //load parameter values from config file
        String autoencoderConnectomePath = config.getString("autoencoder_connectome");
        String nameGeneratorConnectomePath = config.getString("name_generator_connectome");
        String inputLeadsheetPath = config.getString("input_leadsheet");
        String outputFolderPath = config.getString("output_folder");

        //initialize networks
        NameGenerator nameGenerator = initializeNameGenerator(nameGeneratorConnectomePath);
        ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false);

        //initialize input sequences and output sequence
        LeadsheetDataSequence inputSequence = leadsheetToSequence(inputLeadsheetPath);
        LeadsheetDataSequence outputSequence = inputSequence.copy();
        outputSequence.clearMelody();
        LeadsheetDataSequence decoderInputSequence = outputSequence.copy();

        //encode and decode
        encodeFromSequence(autoencoder, inputSequence);
        decodeToSequence(autoencoder, outputSequence, decoderInputSequence);

        //generate song title
        String songTitle = nameGenerator.generateName();

        //write output to specified directory with same file name + _aeOutput suffix
        writeLeadsheetFile(outputSequence, outputFolderPath, new File(inputLeadsheetPath).getName(),
                "_aeOutput", songTitle);
    }
        break;

    case "encode+write_queue": {
        //load parameter values from config file
        String autoencoderConnectomePath = config.getString("autoencoder_connectome");
        String inputLeadsheetPath = config.getString("input_leadsheet");
        String queueFolderPath = config.getString("queue_folder");

        //initialize network
        ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false);

        //initialize input sequence
        LeadsheetDataSequence inputSequence = leadsheetToSequence(inputLeadsheetPath);

        //encode
        encodeFromSequence(autoencoder, inputSequence);
        //write to a queue file in the specified queue folder (the write method will handle removing/adding extensions
        writeQueueFile(autoencoder, queueFolderPath, new File(inputLeadsheetPath).getName());
    }
        break;
    case "encode+write_queue+decode": {
        //load parameter values from config file
        String autoencoderConnectomePath = config.getString("autoencoder_connectome");
        String nameGeneratorConnectomePath = config.getString("name_generator_connectome");
        String inputLeadsheetPath = config.getString("input_leadsheet");
        String queueFolderPath = config.getString("queue_folder");
        String outputFolderPath = config.getString("output_folder");

        //initialize networks
        NameGenerator nameGenerator = initializeNameGenerator(nameGeneratorConnectomePath);
        ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false);

        //initialize input sequences and output sequence
        LeadsheetDataSequence inputSequence = leadsheetToSequence(inputLeadsheetPath);
        LeadsheetDataSequence outputSequence = inputSequence.copy();
        outputSequence.clearMelody();
        LeadsheetDataSequence decoderInputSequence = outputSequence.copy();

        //encode
        encodeFromSequence(autoencoder, inputSequence);
        //write to a queue file in the specified queue folder (the write method will handle removing/adding extensions
        writeQueueFile(autoencoder, queueFolderPath, new File(inputLeadsheetPath).getName());
        //decode
        decodeToSequence(autoencoder, outputSequence, decoderInputSequence);

        //generate song title
        String songTitle = nameGenerator.generateName();

        //write output to specified directory with same file name + _aeOutput suffix
        writeLeadsheetFile(outputSequence, outputFolderPath, new File(inputLeadsheetPath).getName(),
                "_aeOutput", songTitle);
    }
        break;
    case "create_feature_property_vector": {
        //load parameter values from config file
        String inputCorpusFolder = config.getString("input_corpus_folder");
        int featureSize = config.getInt("feature_size");
        String featurePropertiesPath = config.getString("feature_properties_path");
        String featureProperty = config.getString("feature_property");

        //compile array of valid leadsheet files
        File[] songFiles = new File(inputCorpusFolder)
                .listFiles((File dir, String name) -> name.endsWith(".ls"));

        //construct feature property vector from analyzed feature property values of all songs
        AVector featurePropertyValues = Vector.createLength(0);
        int featureIndex = 0;
        for (File inputFile : songFiles) {
            LeadsheetDataSequence melodySequence = leadsheetToSequence(inputFile.getPath());
            featurePropertyValues.join(melodyFeatureAnalysis(melodySequence, featureProperty, featureSize));
        }

        //write generated feature_properties
        BufferedWriter writer = new BufferedWriter(
                new FileWriter(featurePropertiesPath + "_" + featureProperty + ".v"));
        writer.write(ReadWriteUtilities.getNumpyCSVString(featurePropertyValues));
        writer.close();
    }
        break;
    case "compile_feature_queue_matrix": {
        //load parameter values from config file
        String queueFolderPath = config.getString("queue_folder");
        String featureMatrixPath = config.getString("feature_matrix_path");

        //generate feature matrix from all feature queues in specified queue folder
        File[] queueFiles = new File(queueFolderPath).listFiles((File dir, String name) -> name.endsWith(".q"));
        AMatrix totalFeatureMatrix = generateFeatureQueueMatrix(queueFiles);
        String writeData = ReadWriteUtilities.getNumpyCSVString(totalFeatureMatrix);
        BufferedWriter writer = new BufferedWriter(new FileWriter(featureMatrixPath));
        writer.write(writeData);
        writer.close();
    }
        break;
    case "generate_from_feature_queue_matrix": {
        //load parameter values from config file
        String autoencoderConnectomePath = config.getString("autoencoder_connectome");
        String referenceLeadsheetPath = config.getString("reference_leadsheet");
        String featureQueueMatrixPath = config.getString("feature_queue_matrix_path");
        String outputFilePath = config.getString("output_file_path");
        String songTitle = config.getString("song_title",
                "Generation from Feature Matrix " + featureQueueMatrixPath);
        int featureSize = config.getInt("feature_size");

        //initialize network
        ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false);

        //initialize chord sequence
        LeadsheetDataSequence chordSequence = leadsheetToSequence(referenceLeadsheetPath);
        chordSequence.clearMelody();

        //call generation method
        generateFromFeatureMatrix(autoencoder, autoencoderConnectomePath, chordSequence, featureQueueMatrixPath,
                featureSize, outputFilePath, songTitle);
    }
        break;
    case "population_trade": {
        //load parameter values from config file
        String autoencoderConnectomePath = config.getString("autoencoder_connectome");
        String inputLeadsheetPath = config.getString("input_leadsheet");
        String outputFolderPath = config.getString("output_folder");
        int tradingPartSize = config.getInt("trading_part_size");
        double interpVariance = config.getDouble("interpolation_variance");
        double interpMin = config.getDouble("interpolation_min");
        double herdingStrength = config.getDouble("herding_strength");
        double mutationStrength = config.getDouble("mutation_strength");
        double crossoverStrength = config.getDouble("crossover_strength");

        //initialize network
        ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, true);

        //perform population trading test
        populationTradingTest(autoencoder, autoencoderConnectomePath, new File(inputLeadsheetPath),
                new File(outputFolderPath), tradingPartSize, interpVariance, interpMin, herdingStrength,
                mutationStrength, crossoverStrength);
    }
        break;
    case "interpolation": {
        //load parameter values from config file
        String autoencoderConnectomePath = config.getString("autoencoder_connectome");
        String inputLeadsheetPath = config.getString("input_leadsheet");
        String targetQueuePath = config.getString("target_queue");
        String outputFolderPath = config.getString("output_folder");
        int numInterpolationDivisions = config.getInt("num_interpolation_divisions");

        //initialize network
        ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false);

        //perform the interpolation test
        interpolateTest(autoencoder, autoencoderConnectomePath, new File(inputLeadsheetPath),
                new File(targetQueuePath), new File(outputFolderPath), numInterpolationDivisions);
    }
        break;
    case "frankenstein": {
        //load parameter values from config file
        String autoencoderConnectomePath = config.getString("autoencoder_connectome");
        String primaryQueuePath = config.getString("primary_queue_path");
        String referenceLeadsheetPath = config.getString("reference_leadsheet");
        String queueFolderPath = config.getString("queue_folder");
        String outputFilePath = config.getString("output_file_path");
        int numReferenceQueues = config.getInt("num_reference_queues");
        int numCombinations = config.getInt("num_combinations");
        double interpolationMagnitude = config.getDouble("interpolation_strength");

        //initialize network
        ProductCompressingAutoencoder autoencoder = initializeAutoencoder(autoencoderConnectomePath, false);

        //initialize chord sequence
        LeadsheetDataSequence chordSequence = leadsheetToSequence(referenceLeadsheetPath);
        chordSequence.clearMelody();

        //perform frankenstein test
        frankensteinTest(autoencoder, autoencoderConnectomePath, primaryQueuePath, new File(queueFolderPath),
                outputFilePath, chordSequence, numReferenceQueues, numCombinations, interpolationMagnitude);
    }
        break;
    default:
        throw new RuntimeException("Unrecognized test type");
    }
    LogTimer.log("Process finished"); //Done!
}

From source file:hd3gtv.as5kpc.MainClass.java

static FileBasedConfiguration loadConf(String file) throws ConfigurationException {
    org.apache.commons.configuration2.builder.fluent.Parameters params = new org.apache.commons.configuration2.builder.fluent.Parameters();
    PropertiesBuilderParameters pbp = params.properties().setFileName(file);
    pbp.setListDelimiterHandler(new DefaultListDelimiterHandler(','));

    FileBasedConfigurationBuilder<FileBasedConfiguration> builder = new FileBasedConfigurationBuilder<FileBasedConfiguration>(
            PropertiesConfiguration.class);
    builder.configure(pbp);/*from w w w.  j  a va  2s  . c  o  m*/
    return builder.getConfiguration();
}

From source file:me.dwtj.java.compiler.utils.CompilationTaskBuilder.java

/**
 * A helper method to create a configuration from the given Apache Commons Configuration
 * Properties file./*from w  w  w.j  a v a2s.co m*/
 *
 * @param configFile A valid Apache Commons Configuration Properties file.
 *
 * @return A {@link Configuration} corresponding to the given file.
 *
 * @see <a href=http://commons.apache.org/proper/commons-configuration/>
 *        Apache Commons Configuration
 *      </a>
 * @see <a href=http://commons.apache.org/proper/commons-configuration/userguide/howto_properties.html>
 *        Apache Commons Configuration: Properties Files
 *      </a>
 */
public static PropertiesConfiguration compileProperties(File configFile) {
    Parameters paramUtils = new Parameters();
    ListDelimiterHandler delim = new DefaultListDelimiterHandler(',');
    try {
        return new FileBasedConfigurationBuilder<>(PropertiesConfiguration.class)
                .configure(paramUtils.properties().setFile(configFile)).getConfiguration();
    } catch (ConfigurationException ex) {
        throw new RuntimeException("Failed to create a configuration from file " + configFile);
    }
}

From source file:com.streamsets.datacollector.cli.sch.SchAdmin.java

/**
 * Update dpm.properties file with new configuration.
 *///from  w w w.j a v a2s  .com
private static void updateDpmProperties(Context context, String dpmBaseURL, List<String> labels,
        boolean enableSch) {
    if (context.skipUpdatingDpmProperties) {
        return;
    }

    try {
        FileBasedConfigurationBuilder<PropertiesConfiguration> builder = new FileBasedConfigurationBuilder<>(
                PropertiesConfiguration.class)
                        .configure(new Parameters().properties()
                                .setFileName(context.runtimeInfo.getConfigDir() + "/dpm.properties")
                                .setThrowExceptionOnMissing(true)
                                .setListDelimiterHandler(new DefaultListDelimiterHandler(';'))
                                .setIncludesAllowed(false));
        PropertiesConfiguration config = null;
        config = builder.getConfiguration();
        config.setProperty(RemoteSSOService.DPM_ENABLED, Boolean.toString(enableSch));
        config.setProperty(RemoteSSOService.DPM_BASE_URL_CONFIG, dpmBaseURL);
        config.setProperty(RemoteSSOService.SECURITY_SERVICE_APP_AUTH_TOKEN_CONFIG, APP_TOKEN_FILE_PROP_VAL);
        if (labels != null && labels.size() > 0) {
            config.setProperty(RemoteEventHandlerTask.REMOTE_JOB_LABELS, StringUtils.join(labels, ','));
        } else {
            config.setProperty(RemoteEventHandlerTask.REMOTE_JOB_LABELS, "");
        }
        builder.save();
    } catch (ConfigurationException e) {
        throw new RuntimeException(Utils.format("Updating dpm.properties file failed: {}", e.getMessage()), e);
    }
}

From source file:org.craftercms.engine.util.ConfigUtils.java

public static XMLConfiguration readXmlConfiguration(Resource resource, char listDelimiter,
        Map<String, Lookup> prefixLookups) throws ConfigurationException {
    Parameters params = new Parameters();
    FileBasedConfigurationBuilder<XMLConfiguration> builder = new FileBasedConfigurationBuilder<>(
            XMLConfiguration.class);

    try {/*  w w  w  .ja  va 2s  . c o m*/
        XMLBuilderParameters xmlParams = params.xml().setURL(resource.getURL())
                .setListDelimiterHandler(new DefaultListDelimiterHandler(listDelimiter));

        if (MapUtils.isNotEmpty(prefixLookups)) {
            xmlParams = xmlParams.setPrefixLookups(prefixLookups);
        }

        builder.configure(xmlParams);
    } catch (IOException e) {
        throw new ConfigurationException("Unable to get URL of resource " + resource, e);
    }

    return builder.getConfiguration();
}

From source file:org.mitre.mpf.nms.util.PropertiesUtil.java

@PostConstruct
private void init() {
    URL url;/*from   w w w . j  a  v  a  2  s  .co  m*/
    try {
        url = propFile.getURL();
    } catch (IOException e) {
        throw new IllegalStateException("Cannot get URL from " + propFile + ".", e);
    }

    FileBasedConfigurationBuilder<PropertiesConfiguration> fileBasedConfigBuilder = new FileBasedConfigurationBuilder<>(
            PropertiesConfiguration.class);

    Parameters configBuilderParameters = new Parameters();
    fileBasedConfigBuilder.configure(configBuilderParameters.fileBased().setURL(url)
            .setListDelimiterHandler(new DefaultListDelimiterHandler(',')));

    try {
        propertiesConfig = fileBasedConfigBuilder.getConfiguration();
    } catch (ConfigurationException e) {
        throw new IllegalStateException("Cannot create configuration from " + propFile + ".", e);
    }
}

From source file:org.mitre.mpf.wfm.util.MpfPropertiesConfigurationBuilder.java

private FileBasedConfigurationBuilder<PropertiesConfiguration> createFileBasedConfigurationBuilder(
        Resource resource) {// www  . j  a va2s . co  m

    URL url;
    try {
        url = resource.getURL();
    } catch (IOException e) {
        throw new IllegalStateException("Cannot get URL from " + resource + ".", e);
    }

    FileBasedConfigurationBuilder<PropertiesConfiguration> fileBasedConfigBuilder = new FileBasedConfigurationBuilder<>(
            PropertiesConfiguration.class);

    Parameters configBuilderParameters = new Parameters();
    fileBasedConfigBuilder.configure(configBuilderParameters.fileBased().setURL(url)
            .setListDelimiterHandler(new DefaultListDelimiterHandler(',')));

    return fileBasedConfigBuilder;
}

From source file:org.sdw.util.ConfigReader.java

/**
 * Single parameter constructor//from  w  w  w.  ja v a  2  s  . co m
 * @param propertyFile : Absolute/ Relative path of property file to be read
 */
public ConfigReader(String propertyFile) {
    Parameters params = new Parameters();
    builder = new FileBasedConfigurationBuilder<FileBasedConfiguration>(PropertiesConfiguration.class);
    builder.configure(params.properties().setFileName(propertyFile)
            .setListDelimiterHandler(new DefaultListDelimiterHandler(',')));

}