sharlatan pushed a commit to branch python-team
in repository guix.
commit 0fca51323e71e04f2a81a753b2f428d74403021e
Author: Sharlatan Hellseher <[email protected]>
AuthorDate: Tue Mar 25 12:08:21 2025 +0000
gnu: python-autograd: Fix indentation.
* gnu/packages/machine-learning.scm (python-autograd): Fix indentation.
Change-Id: I67b1c01d323e2458b49447969bb4164f71d1571b
---
gnu/packages/machine-learning.scm | 51 +++++++++++++++++++++------------------
1 file changed, 27 insertions(+), 24 deletions(-)
diff --git a/gnu/packages/machine-learning.scm
b/gnu/packages/machine-learning.scm
index 4e85991923..e7923bd503 100644
--- a/gnu/packages/machine-learning.scm
+++ b/gnu/packages/machine-learning.scm
@@ -2492,34 +2492,37 @@ Covariance Matrix Adaptation Evolution Strategy
(CMA-ES) for Python.")
(license license:expat)))
(define-public python-autograd
- (package
- (name "python-autograd")
- (version "1.7.0")
- (source (origin
- (method git-fetch)
- (uri (git-reference
- (url "https://github.com/HIPS/autograd")
- (commit (string-append "v" version))))
- (sha256
- (base32
- "1fpnmm3mzw355iq7w751j4mjfcr0yh324cxidba1l22652gg8r8m"))
- (file-name (git-file-name name version))))
- (build-system pyproject-build-system)
- (native-inputs
- (list python-hatchling python-pytest))
- (propagated-inputs
- (list python-future python-numpy))
- (home-page "https://github.com/HIPS/autograd")
- (synopsis "Efficiently computes derivatives of NumPy code")
- (description "Autograd can automatically differentiate native Python and
-NumPy code. It can handle a large subset of Python's features, including
loops,
-ifs, recursion and closures, and it can even take derivatives of derivatives
-of derivatives. It supports reverse-mode differentiation
+ (package
+ (name "python-autograd")
+ (version "1.7.0")
+ (source
+ (origin
+ (method git-fetch)
+ (uri (git-reference
+ (url "https://github.com/HIPS/autograd")
+ (commit (string-append "v" version))))
+ (sha256
+ (base32 "1fpnmm3mzw355iq7w751j4mjfcr0yh324cxidba1l22652gg8r8m"))
+ (file-name (git-file-name name version))))
+ (build-system pyproject-build-system)
+ (native-inputs
+ (list python-hatchling
+ python-pytest))
+ (propagated-inputs
+ (list python-future
+ python-numpy))
+ (home-page "https://github.com/HIPS/autograd")
+ (synopsis "Efficiently computes derivatives of NumPy code")
+ (description
+ "Autograd can automatically differentiate native Python and NumPy code.
+It can handle a large subset of Python's features, including loops, ifs,
+recursion and closures, and it can even take derivatives of derivatives of
+derivatives. It supports reverse-mode differentiation
(a.k.a. backpropagation), which means it can efficiently take gradients of
scalar-valued functions with respect to array-valued arguments, as well as
forward-mode differentiation, and the two can be composed arbitrarily. The
main intended application of Autograd is gradient-based optimization.")
- (license license:expat)))
+ (license license:expat)))
(define-public lightgbm
(package