Skip to content

Commit

Permalink
minor fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Yusuke Sugomori committed Mar 16, 2013
1 parent 04fe276 commit c291e7e
Show file tree
Hide file tree
Showing 5 changed files with 40 additions and 9 deletions.
33 changes: 32 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,35 @@
.DS_Store
*.pyc
*.out
*.o
*.o

#
# from https://github.com/github/gitignore/blob/master/Global/Eclipse.gitignore
#

*.pydevproject
.project
.metadata
bin/**
tmp/**
tmp/**/*
*.tmp
*.bak
*.swp
*~.nib
local.properties
.classpath
.settings/
.loadpath

# External tool builders
.externalToolBuilders/

# Locally stored "Eclipse launch configurations"
*.launch

# CDT-specific
.cproject

# PDT-specific
.buildpath
6 changes: 3 additions & 3 deletions c/DBN.c
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ void DBN__construct(DBN* this, int N, \
if(i == 0) {
input_size = n_ins;
} else {
input_size = hidden_layer_sizes[i - 1];
input_size = hidden_layer_sizes[i-1];
}

// construct sigmoid_layer
Expand Down Expand Up @@ -135,7 +135,7 @@ void DBN_finetune(DBN* this, int *input, int *label, double lr, int epochs) {
int i, j, m, n, epoch;

int *layer_input;
int prev_layer_input_size;
// int prev_layer_input_size;
int *prev_layer_input;

int *train_X = (int *)malloc(sizeof(int) * this->n_ins);
Expand Down Expand Up @@ -178,7 +178,7 @@ void DBN_finetune(DBN* this, int *input, int *label, double lr, int epochs) {
void DBN_predict(DBN* this, int *x, double *y) {
int i, j, k;
double *layer_input;
int prev_layer_input_size;
// int prev_layer_input_size;
double *prev_layer_input;

double linear_output;
Expand Down
2 changes: 1 addition & 1 deletion c/SdA.c
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ void SdA__construct(SdA* this, int N, \
if(i == 0) {
input_size = n_ins;
} else {
input_size = hidden_layer_sizes[i - 1];
input_size = hidden_layer_sizes[i-1];
}

// construct sigmoid_layer
Expand Down
6 changes: 3 additions & 3 deletions cpp/DBN.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ DBN::DBN(int size, int n_i, int *hls, int n_o, int n_l) {
if(i == 0) {
input_size = n_ins;
} else {
input_size = hidden_layer_sizes[i - 1];
input_size = hidden_layer_sizes[i-1];
}

// construct sigmoid_layer
Expand Down Expand Up @@ -123,7 +123,7 @@ void DBN::pretrain(int *input, double lr, int k, int epochs) {

void DBN::finetune(int *input, int *label, double lr, int epochs) {
int *layer_input;
int prev_layer_input_size;
// int prev_layer_input_size;
int *prev_layer_input;

int *train_X = new int[n_ins];
Expand Down Expand Up @@ -164,7 +164,7 @@ void DBN::finetune(int *input, int *label, double lr, int epochs) {

void DBN::predict(int *x, double *y) {
double *layer_input;
int prev_layer_input_size;
// int prev_layer_input_size;
double *prev_layer_input;

double linear_output;
Expand Down
2 changes: 1 addition & 1 deletion cpp/SdA.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ SdA::SdA(int size, int n_i, int *hls, int n_o, int n_l) {
if(i == 0) {
input_size = n_ins;
} else {
input_size = hidden_layer_sizes[i - 1];
input_size = hidden_layer_sizes[i-1];
}

// construct sigmoid_layer
Expand Down

0 comments on commit c291e7e

Please sign in to comment.