From 60bcdbba6feab9dccf87783d1e65610a2fb11744 Mon Sep 17 00:00:00 2001 From: Tenzin Chan Date: Mon, 13 May 2024 07:08:23 -0700 Subject: [PATCH] Updated to accomodate different learning rates --- scripts/blanche_expt.jl | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/scripts/blanche_expt.jl b/scripts/blanche_expt.jl index 6b21071..f168d15 100644 --- a/scripts/blanche_expt.jl +++ b/scripts/blanche_expt.jl @@ -17,7 +17,7 @@ function modeltocpu(model) end -function trainondata(data, maxiter, winsz, batchsize, arraycast) +function trainondata(data, maxiter, winsz, batchsize, arraycast, lr) println("windowing") traindata, counts = window(data, winsz) bs, n = size(traindata) @@ -67,7 +67,7 @@ function generatebernoulli(data) end -function runexperiment(binsz, winszs, maxiter, path, batchsize, arraycast, params, basedir, numsplit, extract_fn) +function runexperiment(binsz, winszs, maxiter, path, batchsize, arraycast, params, basedir, numsplit, extract_fn, lr) for winsz in winszs params["winsz"] = winsz # Only doing matrix for now @@ -87,7 +87,7 @@ function runexperiment(binsz, winszs, maxiter, path, batchsize, arraycast, param if !isfile(saveloc) println("processing $saveloc") model, ininds, uniqueinput, inspkcnt, counts, outinds, combout, comboutspkcnt, comboutcnt, losses = - trainondata(data_split[i], maxiter, winsz, batchsize, arraycast) + trainondata(data_split[i], maxiter, winsz, batchsize, arraycast, lr) savedata["$i"] = Dict("ininds"=>ininds, "outinds"=>outinds, "input"=>uniqueinput, "output"=>combout, "net"=>model, "inspikecnt"=>inspkcnt, "incount"=>counts, "outspikecnt"=>comboutspkcnt, "outcount"=>comboutcnt, "loss"=>losses) MAT.matwrite(joinpath(cdmdir, "input", "$(DrWatson.savename(params))_$(i).mat"), Dict("inspikecnt"=>inspkcnt, "counts"=>counts)) @@ -99,7 +99,7 @@ function runexperiment(binsz, winszs, maxiter, path, batchsize, arraycast, param println("processing $nullloc") nulldata = generatebernoulli(data_split[i]) model, ininds, uniqueinput, inspkcnt, counts, outinds, combout, comboutspkcnt, comboutcnt, losses = - trainondata(nulldata, maxiter, winsz, batchsize, arraycast) + trainondata(nulldata, maxiter, winsz, batchsize, arraycast, lr) savenull["$i"] = Dict("ininds"=>ininds, "outinds"=>outinds, "input"=>uniqueinput, "output"=>combout, "net"=>model, "inspikecnt"=>inspkcnt, "incount"=>counts, "outspikecnt"=>comboutspkcnt, "outcount"=>comboutcnt, "loss"=>losses) MAT.matwrite(joinpath(cdmdir, "null", "$(DrWatson.savename(params))_$(i).mat"), Dict("cells"=>model.n, "spike_counts"=>comboutspkcnt, "counts"=>comboutcnt)) @@ -126,15 +126,17 @@ if abspath(PROGRAM_FILE) == @__FILE__ binsz = params["binsz"] # For Blanche's data - path = DrWatson.datadir("exp_raw", "pvc3", "crcns_pvc3_cat_recordings", "spont_activity", "spike_data_area18") - extract_fn = extract_bin_spikes_blanche + #path = DrWatson.datadir("exp_raw", "pvc3", "crcns_pvc3_cat_recordings", "spont_activity", "spike_data_area18") + #extract_fn = extract_bin_spikes_blanche + #basedir = DrWatson.datadir("exp_pro", "matrix", "blanche", "full") + #lr = 0.1 # For Joost's data - #path = DrWatson.datadir("exp_raw", "joost_data", "Long_recordings-stability_MaxEnt_and_CFP", "long_1_spontaneous_activity.jld2") - #extract_fn = extract_bin_spikes_joost + path = DrWatson.datadir("exp_raw", "joost_data", "Long_recordings-stability_MaxEnt_and_CFP", "long_1_spontaneous_activity.jld2") + extract_fn = extract_bin_spikes_joost + basedir = DrWatson.datadir("exp_pro", "matrix", "joost_long", "full") + lr = 0.01 - basedir = DrWatson.datadir("exp_pro", "matrix", "blanche", "full") - #basedir = DrWatson.datadir("exp_pro", "matrix", "joost_long", "full") maxiter = params["maxiter"] numsplit = parse(Int, ARGS[3]) @@ -149,6 +151,6 @@ if abspath(PROGRAM_FILE) == @__FILE__ #model = MEFK.MEF3T(n; array_cast=arraycast) println("starting") - runexperiment(binsz, winszs, maxiter, path, batchsize, arraycast, params, basedir, numsplit, extract_fn) + runexperiment(binsz, winszs, maxiter, path, batchsize, arraycast, params, basedir, numsplit, extract_fn, lr) end