Skip to content

Commit

Permalink
Update DimReduce Module
Browse files Browse the repository at this point in the history
Update DimReduce Module - extract common base
  • Loading branch information
suquark committed Feb 7, 2017
1 parent 903496b commit 97cf153
Show file tree
Hide file tree
Showing 6 changed files with 147 additions and 385 deletions.
78 changes: 78 additions & 0 deletions dim_reduction/base.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { getopt } from 'util.js';
import { assertArray2D, assertSquare } from 'util/assert.js';
import { randn, randn2d } from 'util/random.js';
import { zeros, array2d, zeros2d, centerPoints, adjMatrixDistance } from 'util/array.js';
import get_optimizer from 'optimizer/index.js'; // the default function

/**
* @param {?Object} opt Options.
* @constructor
*/
class DimReductionBase {

constructor(opt={}) {
this.dim = getopt(opt, 'dim', 2); // by default 2-D
this.epsilon = getopt(opt, 'epsilon', 10); // learning rate
this.optimizer = getopt(opt, 'optimizer', 'adam') ;
this.iter = 0;
}

// this function takes a set of high-dimensional points
// and creates matrix P from them using gaussian kernel
initDataRaw(X) {
assertArray2D(X);
var dists = adjMatrixDistance(X);
this.initDataDist(dists);
}

// D is assumed to be provided as an array of size N^2.
initDataDist(D) {
var N = D.length;
this.D = D;
this.N = N; // back up the size of the dataset
this.initSolution(); // refresh this
}

// (re)initializes the solution to random
initSolution() {
this.Y = randn2d(this.N, this.dim, 0.0, 1e-4); // the solution
for (let i in this.Y) {
this.Y[i].optimizer = get_optimizer(this.Y[i].length, { method:this.optimizer, learning_rate: this.epsilon });
}
this.iter = 0;
}

// return pointer to current solution
get solution() { return this.Y; }

get edges() { return []; }

// perform a single step of optimization to improve the embedding
step(calc_cost = true) {
this.iter += 1;
let N = this.N;

let cg = this.costGrad(this.Y, calc_cost); // evaluate gradient
let cost = cg.cost;
let grad = cg.grad;

// perform gradient step
for (let i = 0; i < N; i++) {
this.Y[i].optimizer.update(this.Y[i], grad[i]);
}

// reproject Y to be zero mean
centerPoints(this.Y);

return cost; // return current cost
}

/**
* return cost and gradient, given an arrangement
*/
costGrad(Y, calc_cost=true) {
throw "costGrad not implemented";
}
}

export { DimReductionBase };
94 changes: 24 additions & 70 deletions dim_reduction/force.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,100 +3,61 @@
*/

import { getopt } from 'util.js';
import { assertArray2D, assert } from 'util/assert.js';
import { randn2d } from 'util/random.js';
import { centerPoints, zeros2d, adjMatrixDistance, distance, L2 } from 'util/array.js';
import { Adam } from 'optimizer/index.js'; // you can drop `index.js` if supported
import { assert } from 'util/assert.js';
import { DimReductionBase } from 'dim_reduction/base.js';
import { zeros2d, distance, L2 } from 'util/array.js';
import { naive_knn } from 'util/knn.js';
/**
* @param {?Object} opt Options.
* @constructor
*/
class ForceField {
class ForceField extends DimReductionBase {
constructor(opt={}) {
this.dim = getopt(opt, 'dim', 2); // by default 2-D
this.epsilon = getopt(opt, 'epsilon', 1); // learning rate
super(opt);
this.nn = getopt(opt, 'nn', 3); // nested neighbors
this.iter = 0;
}

// this function takes a set of high-dimensional points
// and creates matrix P from them using gaussian kernel
initDataRaw(X) {
assertArray2D(X);
var dists = adjMatrixDistance(X);
this.initDataDist(dists);
}

// D is assumed to be provided as an array of size N^2.
initDataDist(D) {
var N = D.length;
this.D = D;
this.N = N; // back up the size of the dataset
super.initDataDist(D);
if (this.nn > 0) {
assert(this.N - 1 >= this.nn, "Error: K-NN need at least N + 1 points");
this.A = naive_knn(D, this.nn);
}
this.initSolution(); // refresh this
}

// (re)initializes the solution to random
initSolution() {
this.Y = randn2d(this.N, this.dim, 0.0, 1e-4); // the solution
for (let i in this.Y) {
this.Y[i].optimizer = new Adam(this.Y[i].length, { learning_rate: this.epsilon }); //new SpecialSGD(this.Y[i].length, { learning_rate: this.epsilon });
}
this.iter = 0;
}

// return pointer to current solution
get solution() { return this.Y; }

get edges() {
let edges = [];
let A = this.A;
let n = A.length;
for (let i = 0; i < n; i++) {
for (let j = 0; j < A[i].length; j++) {
edges.push([i, A[i][j]]);
if (this.nn > 0) {
let A = this.A;
let n = A.length;
for (let i = 0; i < n; i++) {
for (let j = 0; j < A[i].length; j++) {
edges.push([i, A[i][j]]);
}
}
}
return edges;
}


// perform a single step of optimization to improve the embedding
step(calc_cost = true) {
this.iter += 1;
let N = this.N;

let cg = this.costGrad(this.Y, calc_cost); // evaluate gradient
let cost = cg.cost;
let grad = cg.grad;

// perform gradient step
for (let i = 0; i < N; i++) {
this.Y[i].optimizer.update(this.Y[i], grad[i]);
}

// reproject Y to be zero mean
centerPoints(this.Y);

return cost; // return current cost
}

/**
* return cost and gradient, given an arrangement
*
* E = \frac{1}{\sum\limits_{i<j}d^{*}_{ij}}\sum_{i<j}\frac{(d^{*}_{ij}-d_{ij})^2}{d^{*}_{ij}}.
*
*
*/
costGrad(Y, calc_cost=true) {
let D = this.D;
let N = this.N;
let dim = this.dim;
let grad = zeros2d(N, dim);
let cost = 0.;
let sum = 0. // normalize sum

/**
* energy of charge pair
*/

for (let i = 0; i < N; i++) {
for (let j = i + 1; j < N; j++) {
let k = -1.0 / (L2(Y[i], Y[j]) + 1e-8);
Expand All @@ -107,10 +68,7 @@ class ForceField {
}
}
}

// calc cost
let sum = 0. // normalize sum
let cost = 0.;

if (calc_cost) {
for (let i = 0; i < N; i++) {
for (let j = i + 1; j < N; j++) {
Expand All @@ -120,13 +78,13 @@ class ForceField {
cost += 1 / (dij + 1e-8);
}
}

}

/////////////////

/**
* energy of edges
*/
if (this.nn <= 0) { // then calc all pairs

for (let i = 0; i < N; i++) {
for (let j = i + 1; j < N; j++) {
let Dij = D[i][j];
Expand All @@ -139,7 +97,6 @@ class ForceField {
}
}
}

if (calc_cost) {
for (let i = 0; i < N; i++) {
for (let j = i + 1; j < N; j++) {
Expand Down Expand Up @@ -167,7 +124,6 @@ class ForceField {
}
}


if (calc_cost) {
for (let i = 0; i < N; i++) {
for (let e in A[i]) {
Expand All @@ -176,8 +132,6 @@ class ForceField {
}
}
}


}
return { grad: grad, cost: cost / sum };
}
Expand Down
73 changes: 4 additions & 69 deletions dim_reduction/mds.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,83 +2,20 @@
* Multidimensional scaling
*/

import { getopt, assert } from 'util.js';
import { assertArray2D } from 'util/assert.js';
import { randn2d } from 'util/random.js';
import { centerPoints, zeros2d, adjMatrixDistance, distance } from 'util/array.js';
import { Adam } from 'optimizer/index.js'; // you can drop `index.js` if supported
import { zeros2d, distance } from 'util/array.js';
import { DimReductionBase } from 'dim_reduction/base.js';

/**
* Multidimensional scaling
* @param {?Object} opt Options.
* @constructor
*/
class MDS {
constructor(opt={}) {
this.dim = getopt(opt, 'dim', 2); // by default 2-D
this.epsilon = getopt(opt, 'epsilon', 1); // learning rate
this.iter = 0;
}

// this function takes a set of high-dimensional points
// and creates matrix P from them using gaussian kernel
initDataRaw(X) {
assertArray2D(X);
var dists = adjMatrixDistance(X);
this.initDataDist(dists);
}

// this function takes a fattened distance matrix and creates
// matrix P from them.
// D is assumed to be provided as an array of size N^2.
initDataDist(D) {
var N = D.length;
this.D = D;
this.N = N; // back up the size of the dataset
this.initSolution(); // refresh this
}

// (re)initializes the solution to random
initSolution() {
this.Y = randn2d(this.N, this.dim, 0.0, 1e-4); // the solution
for (let i in this.Y) {
this.Y[i].optimizer = new Adam(this.Y[i].length, { learning_rate: this.epsilon }); //new SpecialSGD(this.Y[i].length, { learning_rate: this.epsilon });
}
this.iter = 0;
}

// return pointer to current solution
get solution() { return this.Y; }


// perform a single step of optimization to improve the embedding
step(calc_cost = true) {
this.iter += 1;
let N = this.N;

let cg = this.costGrad(this.Y, calc_cost); // evaluate gradient
let cost = cg.cost;
let grad = cg.grad;

//console.log('grad:' + grad);

// perform gradient step
for (let i = 0; i < N; i++) {
this.Y[i].optimizer.update(this.Y[i], grad[i]);
}

// reproject Y to be zero mean
centerPoints(this.Y);

return cost; // return current cost
}

class MDS extends DimReductionBase {
/**
* return cost and gradient, given an arrangement
*
* E = \frac{1}{\sum\limits_{i<j}d^{*}_{ij}}\sum_{i<j}(d^{*}_{ij}-d_{ij})^2.
*
*
*/
costGrad(Y, calc_cost=true) {
let D = this.D;
Expand Down Expand Up @@ -116,8 +53,6 @@ class MDS {
}
return { grad: grad, cost: cost };
}


}

export { MDS };
export { MDS };
Loading

0 comments on commit 97cf153

Please sign in to comment.