From 2c66fdc8c5e36e9c3437c9bff29aceb559bae087 Mon Sep 17 00:00:00 2001 From: dorimolnar Date: Tue, 19 Nov 2024 16:37:09 +0100 Subject: [PATCH 1/2] error for transforms --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 43d2c103..c059fbb7 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,8 @@ ## What is Jaxley? +Something :) + `Jaxley` is a differentiable simulator for [biophysical neuron models](https://jaxley.readthedocs.io/en/latest/faq/question_03.html), written in the Python library [JAX](https://github.com/google/jax). Its key features are: - automatic differentiation, allowing gradient-based optimization of thousands of parameters From 045ab7c85bf74aa88ff9efddff1fb7eb174023b4 Mon Sep 17 00:00:00 2001 From: dorimolnar Date: Tue, 19 Nov 2024 16:42:09 +0100 Subject: [PATCH 2/2] remove stupid stuff --- README.md | 2 -- jaxley/optimize/transforms.py | 2 ++ 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c059fbb7..43d2c103 100644 --- a/README.md +++ b/README.md @@ -18,8 +18,6 @@ ## What is Jaxley? -Something :) - `Jaxley` is a differentiable simulator for [biophysical neuron models](https://jaxley.readthedocs.io/en/latest/faq/question_03.html), written in the Python library [JAX](https://github.com/google/jax). Its key features are: - automatic differentiation, allowing gradient-based optimization of thousands of parameters diff --git a/jaxley/optimize/transforms.py b/jaxley/optimize/transforms.py index d13c5e78..7b7c0923 100644 --- a/jaxley/optimize/transforms.py +++ b/jaxley/optimize/transforms.py @@ -47,6 +47,8 @@ def inverse(self, y: ArrayLike) -> Array: x = (y - self.lower) / self.width x = -jnp.log((1.0 / x) - 1.0) return x + + print("something") class SoftplusTransform(Transform):