|
| 1 | +"""Compatibility helpers for optional and version-gated dependencies. |
| 2 | +
|
| 3 | +``ott-jax>=0.6`` removed ``ott.neural.methods.flows.dynamics`` and the |
| 4 | +``ott.neural.networks.velocity_field.VelocityField`` (flax linen) class. |
| 5 | +This module re-exports the symbols needed by CellFlow so that both |
| 6 | +``ott-jax>=0.5,<0.6`` and ``ott-jax>=0.6`` are supported. |
| 7 | +
|
| 8 | +The embedding helpers (``torch``, ``transformers``) are optional and only |
| 9 | +required when using gene-embedding functionality. |
| 10 | +""" |
| 11 | + |
| 12 | +# --------------------------------------------------------------------------- |
| 13 | +# Probability-path dynamics (BaseFlow, ConstantNoiseFlow, BrownianBridge) |
| 14 | +# |
| 15 | +# For ott-jax <0.6 we import directly from ott. For ott-jax >=0.6 the |
| 16 | +# module was removed, so we provide a vendored copy below. |
| 17 | +# |
| 18 | +# The fallback classes are a verbatim copy of |
| 19 | +# ott.neural.methods.flows.dynamics |
| 20 | +# from ott-jax 0.5.0 (commit 690b1ae, 2024-12-03). |
| 21 | +# ott-jax is licensed under the Apache License 2.0, which permits |
| 22 | +# reproduction and distribution of derivative works provided the license |
| 23 | +# and copyright notice are retained. See: |
| 24 | +# https://github.com/ott-jax/ott/blob/0.5.0/LICENSE |
| 25 | +# --------------------------------------------------------------------------- |
| 26 | +try: |
| 27 | + from ott.neural.methods.flows.dynamics import ( # ott-jax <0.6 |
| 28 | + BaseFlow, |
| 29 | + BrownianBridge, |
| 30 | + ConstantNoiseFlow, |
| 31 | + ) |
| 32 | +except ImportError: |
| 33 | + # -- Vendored from ott-jax 0.5.0 (Apache-2.0) -------------------------- |
| 34 | + # Source: src/ott/neural/methods/flows/dynamics.py |
| 35 | + # Copyright OTT-JAX contributors |
| 36 | + # ----------------------------------------------------------------------- |
| 37 | + import abc |
| 38 | + |
| 39 | + import jax |
| 40 | + import jax.numpy as jnp |
| 41 | + |
| 42 | + class BaseFlow(abc.ABC): |
| 43 | + """Base class for all flows.""" |
| 44 | + |
| 45 | + def __init__(self, sigma: float): |
| 46 | + self.sigma = sigma |
| 47 | + |
| 48 | + @abc.abstractmethod |
| 49 | + def compute_mu_t(self, t: jnp.ndarray, x0: jnp.ndarray, x1: jnp.ndarray) -> jnp.ndarray: ... |
| 50 | + |
| 51 | + @abc.abstractmethod |
| 52 | + def compute_sigma_t(self, t: jnp.ndarray) -> jnp.ndarray: ... |
| 53 | + |
| 54 | + @abc.abstractmethod |
| 55 | + def compute_ut(self, t: jnp.ndarray, x: jnp.ndarray, x0: jnp.ndarray, x1: jnp.ndarray) -> jnp.ndarray: ... |
| 56 | + |
| 57 | + def compute_xt(self, rng: jax.Array, t: jnp.ndarray, x0: jnp.ndarray, x1: jnp.ndarray) -> jnp.ndarray: |
| 58 | + """Sample from the probability path.""" |
| 59 | + noise = jax.random.normal(rng, shape=x0.shape) |
| 60 | + mu_t = self.compute_mu_t(t, x0, x1) |
| 61 | + sigma_t = self.compute_sigma_t(t) |
| 62 | + return mu_t + sigma_t * noise |
| 63 | + |
| 64 | + class _StraightFlow(BaseFlow, abc.ABC): |
| 65 | + def compute_mu_t(self, t, x0, x1): |
| 66 | + return (1.0 - t) * x0 + t * x1 |
| 67 | + |
| 68 | + def compute_ut(self, t, x, x0, x1): |
| 69 | + del t, x |
| 70 | + return x1 - x0 |
| 71 | + |
| 72 | + class ConstantNoiseFlow(_StraightFlow): |
| 73 | + r"""Flow with straight paths and constant noise :math:`\sigma`.""" |
| 74 | + |
| 75 | + def compute_sigma_t(self, t): |
| 76 | + return jnp.full_like(t, fill_value=self.sigma) |
| 77 | + |
| 78 | + class BrownianBridge(_StraightFlow): |
| 79 | + r"""Brownian Bridge with :math:`\sigma_t = \sigma \sqrt{t(1-t)}`.""" |
| 80 | + |
| 81 | + def compute_sigma_t(self, t): |
| 82 | + return self.sigma * jnp.sqrt(t * (1.0 - t)) |
| 83 | + |
| 84 | + def compute_ut(self, t, x, x0, x1): |
| 85 | + drift_term = (1 - 2 * t) / (2 * t * (1 - t)) * (x - (t * x1 + (1 - t) * x0)) |
| 86 | + control_term = x1 - x0 |
| 87 | + return drift_term + control_term |
| 88 | + |
| 89 | + |
| 90 | +# --------------------------------------------------------------------------- |
| 91 | +# Optional embedding dependencies (torch, transformers) |
| 92 | +# --------------------------------------------------------------------------- |
| 93 | +_EMBEDDING_ERR_MSG = ( |
| 94 | + "To use gene embedding, please install `transformers` and `torch` e.g. via `pip install cellflow['embedding']`." |
| 95 | +) |
| 96 | + |
| 97 | +try: |
| 98 | + import torch # noqa: F401 |
| 99 | + import transformers # noqa: F401 |
| 100 | + |
| 101 | + HAS_EMBEDDING_DEPS = True |
| 102 | +except ImportError: |
| 103 | + HAS_EMBEDDING_DEPS = False |
| 104 | + |
| 105 | + |
| 106 | +def check_embedding_deps() -> None: |
| 107 | + """Raise a helpful error if torch/transformers are not installed.""" |
| 108 | + if not HAS_EMBEDDING_DEPS: |
| 109 | + raise ImportError(_EMBEDDING_ERR_MSG) |
| 110 | + |
| 111 | + |
| 112 | +__all__ = [ |
| 113 | + "BaseFlow", |
| 114 | + "BrownianBridge", |
| 115 | + "ConstantNoiseFlow", |
| 116 | + "HAS_EMBEDDING_DEPS", |
| 117 | + "check_embedding_deps", |
| 118 | +] |
0 commit comments