Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
ebrains-spack-builds
Manage
Activity
Members
Labels
Plan
Issues
38
Issue boards
Milestones
Wiki
Code
Merge requests
29
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Container Registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
EBRAINS RI
Tech Hub
Platform
EBRAINS Software Distribution
ebrains-spack-builds
Commits
f1852381
Commit
f1852381
authored
1 year ago
by
Eleni Mathioulaki
Browse files
Options
Downloads
Patches
Plain Diff
fix(py-elephant): add py-pycuda@2023.1
parent
51b34849
No related branches found
Branches containing commit
No related tags found
1 merge request
!503
Enable CUDA support for py-elephant
Pipeline
#27976
passed with stage
in 19 minutes and 46 seconds
Changes
1
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
packages/py-pycuda/package.py
+53
-0
53 additions, 0 deletions
packages/py-pycuda/package.py
with
53 additions
and
0 deletions
packages/py-pycuda/package.py
0 → 100644
+
53
−
0
View file @
f1852381
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from
spack.package
import
*
from
spack.pkg.builtin.boost
import
Boost
class
PyPycuda
(
PythonPackage
):
"""
PyCUDA gives you easy, Pythonic access to Nvidia
'
s CUDA parallel
computation API
"""
homepage
=
"
https://mathema.tician.de/software/pycuda/
"
pypi
=
"
pycuda/pycuda-2019.1.2.tar.gz
"
# begin EBRAINS (added): added new version
version
(
"
2023.1
"
,
sha256
=
"
175ff675f0cf10e38e9adc03ed5df3ed8d8abf7da5134c8dccec752e8a0a3e91
"
)
# end EBRAINS
version
(
"
2021.1
"
,
sha256
=
"
ab87312d0fc349d9c17294a087bb9615cffcf966ad7b115f5b051008a48dd6ed
"
)
version
(
"
2020.1
"
,
sha256
=
"
effa3b99b55af67f3afba9b0d1b64b4a0add4dd6a33bdd6786df1aa4cc8761a5
"
)
version
(
"
2019.1.2
"
,
sha256
=
"
ada56ce98a41f9f95fe18809f38afbae473a5c62d346cfa126a2d5477f24cc8a
"
)
version
(
"
2016.1.2
"
,
sha256
=
"
a7dbdac7e2f0c0d2ad98f5f281d5a9d29d6673b3c20210e261b96e9a2d0b6e37
"
)
@run_before
(
"
install
"
)
def
configure
(
self
):
pyver
=
self
.
spec
[
"
python
"
].
version
.
up_to
(
2
).
joined
boostlib
=
"
boost_python{0}
"
.
format
(
pyver
)
configure_args
=
[
"
--no-use-shipped-boost
"
,
"
--boost-inc-dir={0}
"
.
format
(
self
.
spec
[
"
boost
"
].
prefix
.
include
),
"
--boost-lib-dir={0}
"
.
format
(
self
.
spec
[
"
boost
"
].
libs
.
directories
[
0
]),
"
--boost-python-libname={0}
"
.
format
(
boostlib
),
]
python
(
"
configure.py
"
,
*
configure_args
)
depends_on
(
"
py-setuptools
"
,
type
=
"
build
"
)
depends_on
(
"
cuda
"
)
depends_on
(
"
boost+python
"
)
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on
(
Boost
.
with_default_variants
)
depends_on
(
"
python@3.6:3
"
,
type
=
(
"
build
"
,
"
run
"
),
when
=
"
@2020.1:
"
)
depends_on
(
"
py-numpy@1.6:
"
,
type
=
(
"
build
"
,
"
run
"
))
depends_on
(
"
py-pytools@2011.2:
"
,
type
=
(
"
build
"
,
"
run
"
))
depends_on
(
"
py-six
"
,
type
=
"
run
"
,
when
=
"
@:2020.1
"
)
depends_on
(
"
py-decorator@3.2.0:
"
,
type
=
(
"
build
"
,
"
run
"
),
when
=
"
@:2020.1
"
)
depends_on
(
"
py-appdirs@1.4.0:
"
,
type
=
(
"
build
"
,
"
run
"
))
depends_on
(
"
py-mako
"
,
type
=
(
"
build
"
,
"
run
"
))
depends_on
(
"
cuda@:8.0.61
"
,
when
=
"
@2016.1.2
"
)
This diff is collapsed.
Click to expand it.
Preview
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment