From 6474ac255aaafd997030c9cc00c02fe1838ec647 Mon Sep 17 00:00:00 2001 From: Michael Deistler Date: Mon, 25 Nov 2024 11:40:36 +0100 Subject: [PATCH] Prepare release v0.5.0 (#532) --- CHANGELOG.md | 88 +++++++++++++++++++++++++++++++++++++++++ docs/faq/question_04.md | 4 +- jaxley/__version__.py | 2 +- pyproject.toml | 2 +- 4 files changed, 93 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3dce1854..5e421d4a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,91 @@ +# 0.5.0 + +### API changes + +- Synapse views no longer exist (#447, #453, @jnsbck). Previous code such as +```python +net.IonotropicSynapse.set("IonotropicSynapse_s", 0.2) +``` +must be updated to: +```python +net.set("IonotropicSynapse_s", 0.2) +``` +For a more detailed tutorial on how to index synapses, see +[this new tutorial](https://jaxley.readthedocs.io/en/latest/tutorials/09_advanced_indexing.html). +- Throughout the codebase, we renamed any occurance of `seg` (for `segment`) to `comp` +(for `compartment`). The most notable user-facing changes are: + - `branch = jx.Branch(comp, ncomp=4)` + - `cell = jx.read_swc(fname, ncomp=4)` +- New defaults for the SWC reader with `jx.read_swc()`. By default, we now have +`assign_groups=True` (previously `False`) and `max_branch_len=None` (previously +`300.0`). +- We renamed `.view` to `.nodes`, e.g., `cell.branch(0).nodes` (#447, #453, @jnsbck). +- We renamed `_update_nodes_with_xyz()` to `compute_compartment_centers()` (#520, +@jnsbck) +- We updated the way in which transformations are built (#455, @manuelgloeckler). +Previous code such as +```python +tf = jx.ParamTransform( + lower={"radius": 0.1, "length": 2.0}, + lower={"radius": 3.0, "length": 20.0}, +) +``` +must be updated to: +```python +from jaxley.optimize.transforms import ParamTransform, SigmoidTransform +transforms = [ + {"radius": SigmoidTransform(lower=0.1, upper=3.0)}, + {"length": SigmoidTransform(lower=2.0, upper=20.0)}, +] +tf = jt.ParamTransform(transforms) +``` + +### New features + +- Added a new `delete_channel()` method (#521, @jnsbck) +- Allow to write trainables to the module (#470, @michaeldeistler): +```python +net.make_trainable("radius") +params = net.get_parameters() +net.write_trainables(params) +``` +- Expose the step function to allow for fine-grained simulation (#466, @manuelgloeckler) +- More flexible and throrough viewing (#447, #453, @jnsbck) +- Boolean indexing for cells, branches, and comps (@494, @jnsbck): +```python +r_greater_1 = net.nodes.groupby("global_cell_index")["radius"].mean() > 1 +net[r_greater_1].nodes.vis() +``` +- check if recordings are empty (#460, @deezer257) +- enable `clamp` to be jitted and vmapped with `data_clamp()` (#374, @kyralianaka) + +### Bug fixes + +- allow for cells that were read from swc to be pickled (#525, @jnsbck) +- fix units of `compute_current()` in channels (#461, @michaeldeistler) +- fix issues with plotting when the morphology has a different number of compartments +(#513, @jnsbck) + +### Documentation + +- new tutorial on synapse indexing (#464, @michaeldeistler, @zinaStef) +- new tutorial on parameter sharing (#464, @michaeldeistler, @zinaStef) +- new tutorial on modules and views (#493, @jnsbck) +- improved tutorial on building channel models (#473, @simoneeb) +- get rid of tensorflow dependency by defining our simple dataloader in the tutorial +(#484, @jnsbck) +- new FAQ about rate-based networks (#531, @michaeldeistler) + +### Code health + +- refactor tests with fixtures (@479, #499, @fabioseel, @jnsbck) +- make several attributes private (#495, @ntolley) +- move `read_swc.py` to new `io` folder (#524, @jnsbck) +- faster testing for SWC and plotting (#479, @fabioseel) +- automated tests to check if tutorials can be run (#480, @jnsbck) +- add helpers to deprecate functions and kwargs (#516, @jnsbck) + + # 0.4.0 ### New features diff --git a/docs/faq/question_04.md b/docs/faq/question_04.md index ff2ad827..7dee3258 100644 --- a/docs/faq/question_04.md +++ b/docs/faq/question_04.md @@ -1,10 +1,12 @@ # How can I implement rate-based neuron models in Jaxley? In this FAQ, we explain how one can implement rate-based neuron models of the form: + $$ \tau \frac{dV}{dt} = -V + \sum w_{\text{syn}} \phi(V_{\text{pre}}) $$ -Here, $\phi$ is a nonlinearity such as a `TanH` or a `ReLU`. + +Here, $\phi$ is a nonlinearity such as a `tanh` or a `ReLU`. To implement this in `Jaxley`, we first have to set up a network consisting of point-neurons: diff --git a/jaxley/__version__.py b/jaxley/__version__.py index b2395e5e..4a7d3a1e 100644 --- a/jaxley/__version__.py +++ b/jaxley/__version__.py @@ -1,6 +1,6 @@ # This file is part of Jaxley, a differentiable neuroscience simulator. Jaxley is # licensed under the Apache License Version 2.0, see -VERSION = (0, 4, 0) +VERSION = (0, 5, 0) __version__ = ".".join(map(str, VERSION)) diff --git a/pyproject.toml b/pyproject.toml index d37e2bda..35eb11cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "Jaxley" -version = "0.4.0" +version = "0.5.0" description = "Differentiable neuron simulations." authors = [ { name = "jaxleyverse", email = "jaxleyverse@gmail.com"},