Skip to content

Commit ef65203

Browse files
committed
feat: get working implementation !!!
a bit slow, but already works!!
1 parent 167ae2a commit ef65203

File tree

1 file changed

+15
-9
lines changed

1 file changed

+15
-9
lines changed

index.js

+15-9
Original file line numberDiff line numberDiff line change
@@ -48,17 +48,19 @@ const find_beta_from_ci = ({ci_lower, ci_upper}) => {
4848
// backtracking line search
4949
// <https://en.wikipedia.org/wiki/Backtracking_line_search>
5050
// Once we know the direction, how far to go along it?
51-
const get_optimal_step_size_a = ({a,b, d, is_a}) => {
52-
let dir = d_a > 0 ? 1 : -1
51+
let outer_step_size_max = 0.05
52+
let n_backtracking = 20
53+
let local_minima_indicator = 2 * outer_step_size_max * (1/(2**n_backtracking))
54+
const get_optimal_step_size_a = ({a,b, dir, is_a}) => {
5355

5456
let step_size_min = 0
5557
let loss_s_min = is_a ? loss(a + step_size_min * dir, b) : loss(a, b + step_size_min * dir)
5658

57-
let step_size_max = 0.1
59+
let step_size_max = outer_step_size_max
5860
let loss_s_max = is_a ? loss(a + step_size_max * dir, b) : loss(a, b + step_size_max * dir)
5961

6062

61-
for(let i=0; i<20; i++){
63+
for(let i=0; i<n_backtracking; i++){
6264
if(loss_s_min < loss_s_max){
6365
step_size_max = (step_size_max + step_size_min) / 2
6466
loss_s_max = is_a ? loss(a + step_size_max * dir, b) : loss(a, b + step_size_max * dir)
@@ -77,18 +79,22 @@ const find_beta_from_ci = ({ci_lower, ci_upper}) => {
7779
let max_steps = 2000
7880
for(let i = 0; i<max_steps; i++){
7981
// gradient step for a
80-
let dir_a = - df_da(a,b)
82+
let dir_a = df_da(a,b) > 0 ? -1 : 1
8183
// console.log(dir_a)
82-
let stepsize_a = 0.0005 // 1/n_a
84+
// let stepsize_a = 0.0005 // 1/n_a
85+
let stepsize_a = get_optimal_step_size_a({a,b, dir: dir_a, is_a: true})
8386
let step_a = stepsize_a // * dir_a
8487
a = Math.max(a + step_a, 0)
8588

8689
// gradient step for b
87-
let dir_b = - df_db(a,b)
88-
let stepsize_b = 0.0005 // 1/n_b
90+
let dir_b = df_db(a,b) > 0 ? -1 : 1
91+
// let stepsize_b = 0.0005 // 1/n_b
92+
let stepsize_b = get_optimal_step_size_a({a,b, dir: dir_b, is_a: false})
8993
let step_b = stepsize_b // * dir_b
9094
b = Math.max(b + step_b,0)
95+
// console.log(`stepsize_a: ${stepsize_a}, stepsize_b: ${stepsize_b}`)
9196
// console.log(`a: ${a}, b: ${b}`)
97+
if(stepsize_a + stepsize_b < local_minima_indicator) break;
9298
}
9399
return [a, b]
94100
}
@@ -98,7 +104,7 @@ const find_beta_from_ci = ({ci_lower, ci_upper}) => {
98104
let best_loss = Infinity
99105
let best_result = null
100106
// for(let i=0; i<num_initializations; i++){
101-
while(best_loss > 0.001){
107+
while(best_loss > 0.0045){
102108
let a_init = Math.random() * 5
103109
let b_init = Math.random() * 5
104110
let new_result = gradient_descent(a_init, b_init)

0 commit comments

Comments
 (0)