Example usage for edu.stanford.nlp.math ArrayMath multiplyInto

List of usage examples for edu.stanford.nlp.math ArrayMath multiplyInto

Introduction

In this page you can find the example usage for edu.stanford.nlp.math ArrayMath multiplyInto.

Prototype

public static void multiplyInto(double[] a, double[] b, double c) 

Source Link

Usage

From source file:cmu.arktweetnlp.impl.OWLQN.java

void makeSteepestDescDir() {
    if (l1weight == 0) {
        ArrayMath.multiplyInto(dir, grad, -1);
    } else {/*from ww w .j  a v a  2 s  . c om*/

        for (int i = 0; i < dim; i++) {
            //mheilman: I added this if-statement to avoid penalizing bias parameters.
            if (OWLQN.biasParameters.contains(i)) {
                dir[i] = -grad[i];
                continue;
            }
            if (x[i] < 0) {
                dir[i] = -grad[i] + l1weight;
            } else if (x[i] > 0) {
                dir[i] = -grad[i] - l1weight;
            } else {
                if (grad[i] < -l1weight) {
                    dir[i] = -grad[i] - l1weight;
                } else if (grad[i] > l1weight) {
                    dir[i] = -grad[i] + l1weight;
                } else {
                    dir[i] = 0;
                }
            }
        }
    }
    steepestDescDir = dir.clone(); // deep copy needed
}

From source file:dz.pfe.storm.ressources.cmu.arktweetnlp.impl.OWLQN.java

synchronized void makeSteepestDescDir() {
    OWLQN owlqn = new OWLQN();
    if (l1weight == 0) {
        ArrayMath.multiplyInto(dir, grad, -1);
    } else {//from   ww  w  .  j  a  v  a 2  s  .c o m

        for (int i = 0; i < dim; i++) {
            //mheilman: I added this if-statement to avoid penalizing bias parameters.
            if (owlqn.biasParameters.contains(i)) {
                dir[i] = -grad[i];
                continue;
            }
            if (x[i] < 0) {
                dir[i] = -grad[i] + l1weight;
            } else if (x[i] > 0) {
                dir[i] = -grad[i] - l1weight;
            } else {
                if (grad[i] < -l1weight) {
                    dir[i] = -grad[i] - l1weight;
                } else if (grad[i] > l1weight) {
                    dir[i] = -grad[i] + l1weight;
                } else {
                    dir[i] = 0;
                }
            }
        }
    }
    steepestDescDir = dir.clone(); // deep copy needed
}