Updated libsharp to commit 0787838ab3ec8afc0c28b98479a321ffba388980
This commit is contained in:
parent
a933430c60
commit
23aa450a77
62 changed files with 5075 additions and 11205 deletions
1
external/sharp/python/fake_pyrex/Pyrex/Distutils/__init__.py
vendored
Normal file
1
external/sharp/python/fake_pyrex/Pyrex/Distutils/__init__.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
# work around broken setuptools monkey patching
|
1
external/sharp/python/fake_pyrex/Pyrex/Distutils/build_ext.py
vendored
Normal file
1
external/sharp/python/fake_pyrex/Pyrex/Distutils/build_ext.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
build_ext = "yes, it's there!"
|
1
external/sharp/python/fake_pyrex/Pyrex/__init__.py
vendored
Normal file
1
external/sharp/python/fake_pyrex/Pyrex/__init__.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
# work around broken setuptools monkey patching
|
2
external/sharp/python/fake_pyrex/README
vendored
Normal file
2
external/sharp/python/fake_pyrex/README
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
This directory is here to fool setuptools into building .pyx files
|
||||
even if Pyrex is not installed. See ../setup.py.
|
1
external/sharp/python/libsharp/__init__.py
vendored
Normal file
1
external/sharp/python/libsharp/__init__.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
from .libsharp import *
|
79
external/sharp/python/libsharp/libsharp.pxd
vendored
Normal file
79
external/sharp/python/libsharp/libsharp.pxd
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
cdef extern from "sharp.h":
|
||||
ctypedef long ptrdiff_t
|
||||
|
||||
void sharp_legendre_transform_s(float *bl, float *recfac, ptrdiff_t lmax, float *x,
|
||||
float *out, ptrdiff_t nx)
|
||||
void sharp_legendre_transform(double *bl, double *recfac, ptrdiff_t lmax, double *x,
|
||||
double *out, ptrdiff_t nx)
|
||||
void sharp_legendre_transform_recfac(double *r, ptrdiff_t lmax)
|
||||
void sharp_legendre_transform_recfac_s(float *r, ptrdiff_t lmax)
|
||||
void sharp_legendre_roots(int n, double *x, double *w)
|
||||
|
||||
# sharp_lowlevel.h
|
||||
ctypedef struct sharp_alm_info:
|
||||
pass
|
||||
|
||||
ctypedef struct sharp_geom_info:
|
||||
pass
|
||||
|
||||
void sharp_make_alm_info (int lmax, int mmax, int stride,
|
||||
ptrdiff_t *mvstart, sharp_alm_info **alm_info)
|
||||
|
||||
void sharp_make_geom_info (int nrings, int *nph, ptrdiff_t *ofs,
|
||||
int *stride, double *phi0, double *theta,
|
||||
double *wgt, sharp_geom_info **geom_info)
|
||||
|
||||
void sharp_destroy_alm_info(sharp_alm_info *info)
|
||||
void sharp_destroy_geom_info(sharp_geom_info *info)
|
||||
|
||||
ptrdiff_t sharp_map_size(sharp_geom_info *info)
|
||||
ptrdiff_t sharp_alm_count(sharp_alm_info *self)
|
||||
|
||||
|
||||
ctypedef enum sharp_jobtype:
|
||||
SHARP_YtW
|
||||
SHARP_Yt
|
||||
SHARP_WY
|
||||
SHARP_Y
|
||||
|
||||
ctypedef enum:
|
||||
SHARP_DP
|
||||
SHARP_ADD
|
||||
|
||||
void sharp_execute(sharp_jobtype type_,
|
||||
int spin,
|
||||
void *alm,
|
||||
void *map,
|
||||
sharp_geom_info *geom_info,
|
||||
sharp_alm_info *alm_info,
|
||||
int ntrans,
|
||||
int flags,
|
||||
double *time,
|
||||
unsigned long long *opcnt) nogil
|
||||
|
||||
ctypedef enum:
|
||||
SHARP_ERROR_NO_MPI
|
||||
|
||||
int sharp_execute_mpi_maybe (void *pcomm, sharp_jobtype type, int spin,
|
||||
void *alm, void *map, sharp_geom_info *geom_info,
|
||||
sharp_alm_info *alm_info, int ntrans, int flags, double *time,
|
||||
unsigned long long *opcnt) nogil
|
||||
|
||||
|
||||
cdef extern from "sharp_geomhelpers.h":
|
||||
void sharp_make_subset_healpix_geom_info(
|
||||
int nside, int stride, int nrings,
|
||||
int *rings, double *weight, sharp_geom_info **geom_info)
|
||||
void sharp_make_gauss_geom_info(
|
||||
int nrings, int nphi, double phi0,
|
||||
int stride_lon, int stride_lat, sharp_geom_info **geom_info)
|
||||
|
||||
cdef extern from "sharp_almhelpers.h":
|
||||
void sharp_make_triangular_alm_info (int lmax, int mmax, int stride,
|
||||
sharp_alm_info **alm_info)
|
||||
void sharp_make_rectangular_alm_info (int lmax, int mmax, int stride,
|
||||
sharp_alm_info **alm_info)
|
||||
void sharp_make_mmajor_real_packed_alm_info (int lmax, int stride,
|
||||
int nm, const int *ms, sharp_alm_info **alm_info)
|
||||
|
||||
|
256
external/sharp/python/libsharp/libsharp.pyx
vendored
Normal file
256
external/sharp/python/libsharp/libsharp.pyx
vendored
Normal file
|
@ -0,0 +1,256 @@
|
|||
import numpy as np
|
||||
|
||||
__all__ = ['legendre_transform', 'legendre_roots', 'sht', 'synthesis', 'adjoint_synthesis',
|
||||
'analysis', 'adjoint_analysis', 'healpix_grid', 'triangular_order', 'rectangular_order',
|
||||
'packed_real_order']
|
||||
|
||||
|
||||
def legendre_transform(x, bl, out=None):
|
||||
if out is None:
|
||||
out = np.empty_like(x)
|
||||
if out.shape[0] == 0:
|
||||
return out
|
||||
elif x.dtype == np.float64:
|
||||
if bl.dtype != np.float64:
|
||||
bl = bl.astype(np.float64)
|
||||
return _legendre_transform(x, bl, out=out)
|
||||
elif x.dtype == np.float32:
|
||||
if bl.dtype != np.float32:
|
||||
bl = bl.astype(np.float32)
|
||||
return _legendre_transform_s(x, bl, out=out)
|
||||
else:
|
||||
raise ValueError("unsupported dtype")
|
||||
|
||||
|
||||
def _legendre_transform(double[::1] x, double[::1] bl, double[::1] out):
|
||||
if out.shape[0] != x.shape[0]:
|
||||
raise ValueError('x and out must have same shape')
|
||||
sharp_legendre_transform(&bl[0], NULL, bl.shape[0] - 1, &x[0], &out[0], x.shape[0])
|
||||
return np.asarray(out)
|
||||
|
||||
|
||||
def _legendre_transform_s(float[::1] x, float[::1] bl, float[::1] out):
|
||||
if out.shape[0] != x.shape[0]:
|
||||
raise ValueError('x and out must have same shape')
|
||||
sharp_legendre_transform_s(&bl[0], NULL, bl.shape[0] - 1, &x[0], &out[0], x.shape[0])
|
||||
return np.asarray(out)
|
||||
|
||||
|
||||
def legendre_roots(n):
|
||||
x = np.empty(n, np.double)
|
||||
w = np.empty(n, np.double)
|
||||
cdef double[::1] x_buf = x, w_buf = w
|
||||
if not (x_buf.shape[0] == w_buf.shape[0] == n):
|
||||
raise AssertionError()
|
||||
if n > 0:
|
||||
sharp_legendre_roots(n, &x_buf[0], &w_buf[0])
|
||||
return x, w
|
||||
|
||||
|
||||
JOBTYPE_TO_CONST = {
|
||||
'Y': SHARP_Y,
|
||||
'Yt': SHARP_Yt,
|
||||
'WY': SHARP_WY,
|
||||
'YtW': SHARP_YtW
|
||||
}
|
||||
|
||||
def sht(jobtype, geom_info ginfo, alm_info ainfo, double[:, :, ::1] input,
|
||||
int spin=0, comm=None, add=False):
|
||||
cdef void *comm_ptr
|
||||
cdef int flags = SHARP_DP | (SHARP_ADD if add else 0)
|
||||
cdef int r
|
||||
cdef sharp_jobtype jobtype_i
|
||||
cdef double[:, :, ::1] output_buf
|
||||
cdef int ntrans = input.shape[0] * input.shape[1]
|
||||
cdef int i, j
|
||||
|
||||
if spin == 0 and input.shape[1] != 1:
|
||||
raise ValueError('For spin == 0, we need input.shape[1] == 1')
|
||||
elif spin != 0 and input.shape[1] != 2:
|
||||
raise ValueError('For spin != 0, we need input.shape[1] == 2')
|
||||
|
||||
|
||||
cdef size_t[::1] ptrbuf = np.empty(2 * ntrans, dtype=np.uintp)
|
||||
cdef double **alm_ptrs = <double**>&ptrbuf[0]
|
||||
cdef double **map_ptrs = <double**>&ptrbuf[ntrans]
|
||||
|
||||
try:
|
||||
jobtype_i = JOBTYPE_TO_CONST[jobtype]
|
||||
except KeyError:
|
||||
raise ValueError('jobtype must be one of: %s' % ', '.join(sorted(JOBTYPE_TO_CONST.keys())))
|
||||
|
||||
if jobtype_i == SHARP_Y or jobtype_i == SHARP_WY:
|
||||
output = np.empty((input.shape[0], input.shape[1], ginfo.local_size()), dtype=np.float64)
|
||||
output_buf = output
|
||||
for i in range(input.shape[0]):
|
||||
for j in range(input.shape[1]):
|
||||
alm_ptrs[i * input.shape[1] + j] = &input[i, j, 0]
|
||||
map_ptrs[i * input.shape[1] + j] = &output_buf[i, j, 0]
|
||||
else:
|
||||
output = np.empty((input.shape[0], input.shape[1], ainfo.local_size()), dtype=np.float64)
|
||||
output_buf = output
|
||||
for i in range(input.shape[0]):
|
||||
for j in range(input.shape[1]):
|
||||
alm_ptrs[i * input.shape[1] + j] = &output_buf[i, j, 0]
|
||||
map_ptrs[i * input.shape[1] + j] = &input[i, j, 0]
|
||||
|
||||
if comm is None:
|
||||
with nogil:
|
||||
sharp_execute (
|
||||
jobtype_i,
|
||||
geom_info=ginfo.ginfo, alm_info=ainfo.ainfo,
|
||||
spin=spin, alm=alm_ptrs, map=map_ptrs,
|
||||
ntrans=ntrans, flags=flags, time=NULL, opcnt=NULL)
|
||||
else:
|
||||
from mpi4py import MPI
|
||||
if not isinstance(comm, MPI.Comm):
|
||||
raise TypeError('comm must be an mpi4py communicator')
|
||||
from .libsharp_mpi import _addressof
|
||||
comm_ptr = <void*><size_t>_addressof(comm)
|
||||
with nogil:
|
||||
r = sharp_execute_mpi_maybe (
|
||||
comm_ptr, jobtype_i,
|
||||
geom_info=ginfo.ginfo, alm_info=ainfo.ainfo,
|
||||
spin=spin, alm=alm_ptrs, map=map_ptrs,
|
||||
ntrans=ntrans, flags=flags, time=NULL, opcnt=NULL)
|
||||
if r == SHARP_ERROR_NO_MPI:
|
||||
raise Exception('MPI requested, but not available')
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def synthesis(*args, **kw):
|
||||
return sht('Y', *args, **kw)
|
||||
|
||||
def adjoint_synthesis(*args, **kw):
|
||||
return sht('Yt', *args, **kw)
|
||||
|
||||
def analysis(*args, **kw):
|
||||
return sht('YtW', *args, **kw)
|
||||
|
||||
def adjoint_analysis(*args, **kw):
|
||||
return sht('WY', *args, **kw)
|
||||
|
||||
|
||||
#
|
||||
# geom_info
|
||||
#
|
||||
class NotInitializedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
cdef class geom_info:
|
||||
cdef sharp_geom_info *ginfo
|
||||
|
||||
def __cinit__(self, *args, **kw):
|
||||
self.ginfo = NULL
|
||||
|
||||
def local_size(self):
|
||||
if self.ginfo == NULL:
|
||||
raise NotInitializedError()
|
||||
return sharp_map_size(self.ginfo)
|
||||
|
||||
def __dealloc__(self):
|
||||
if self.ginfo != NULL:
|
||||
sharp_destroy_geom_info(self.ginfo)
|
||||
self.ginfo = NULL
|
||||
|
||||
|
||||
cdef class healpix_grid(geom_info):
|
||||
|
||||
_weight_cache = {} # { (nside, 'T'/'Q'/'U') -> numpy array of ring weights cached from file }
|
||||
|
||||
def __init__(self, int nside, stride=1, int[::1] rings=None, double[::1] weights=None):
|
||||
if weights is not None and weights.shape[0] != 2 * nside:
|
||||
raise ValueError('weights must have length 2 * nside')
|
||||
sharp_make_subset_healpix_geom_info(nside, stride,
|
||||
nrings=4 * nside - 1 if rings is None else rings.shape[0],
|
||||
rings=NULL if rings is None else &rings[0],
|
||||
weight=NULL if weights is None else &weights[0],
|
||||
geom_info=&self.ginfo)
|
||||
|
||||
@classmethod
|
||||
def load_ring_weights(cls, nside, fields):
|
||||
"""
|
||||
Loads HEALPix ring weights from file. The environment variable
|
||||
HEALPIX should be set, and this routine will look in the `data`
|
||||
subdirectory.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
nside: int
|
||||
HEALPix nside parameter
|
||||
|
||||
fields: tuple of str
|
||||
Which weights to extract; pass ('T',) to only get scalar
|
||||
weights back, or ('T', 'Q', 'U') to get all the weights
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
||||
List of NumPy arrays, according to fields parameter.
|
||||
|
||||
"""
|
||||
import os
|
||||
from astropy.io import fits
|
||||
data_path = os.path.join(os.environ['HEALPIX'], 'data')
|
||||
fits_field_names = {
|
||||
'T': 'TEMPERATURE WEIGHTS',
|
||||
'Q': 'Q-POLARISATION WEIGHTS',
|
||||
'U': 'U-POLARISATION WEIGHTS'}
|
||||
|
||||
must_load = [field for field in fields if (nside, field) not in cls._weight_cache]
|
||||
|
||||
if must_load:
|
||||
hdulist = fits.open(os.path.join(data_path, 'weight_ring_n%05d.fits' % nside))
|
||||
try:
|
||||
for field in must_load:
|
||||
w = hdulist[1].data.field(fits_field_names[field]).ravel().astype(np.double)
|
||||
w += 1
|
||||
cls._weight_cache[nside, field] = w
|
||||
finally:
|
||||
hdulist.close()
|
||||
return [cls._weight_cache[(nside, field)].copy() for field in fields]
|
||||
|
||||
#
|
||||
# alm_info
|
||||
#
|
||||
|
||||
|
||||
cdef class alm_info:
|
||||
cdef sharp_alm_info *ainfo
|
||||
|
||||
def __cinit__(self, *args, **kw):
|
||||
self.ainfo = NULL
|
||||
|
||||
def local_size(self):
|
||||
if self.ainfo == NULL:
|
||||
raise NotInitializedError()
|
||||
return sharp_alm_count(self.ainfo)
|
||||
|
||||
def __dealloc__(self):
|
||||
if self.ainfo != NULL:
|
||||
sharp_destroy_alm_info(self.ainfo)
|
||||
self.ainfo = NULL
|
||||
|
||||
|
||||
cdef class triangular_order(alm_info):
|
||||
def __init__(self, int lmax, mmax=None, stride=1):
|
||||
mmax = mmax if mmax is not None else lmax
|
||||
sharp_make_triangular_alm_info(lmax, mmax, stride, &self.ainfo)
|
||||
|
||||
|
||||
cdef class rectangular_order(alm_info):
|
||||
def __init__(self, int lmax, mmax=None, stride=1):
|
||||
mmax = mmax if mmax is not None else lmax
|
||||
sharp_make_rectangular_alm_info(lmax, mmax, stride, &self.ainfo)
|
||||
|
||||
|
||||
cdef class packed_real_order(alm_info):
|
||||
def __init__(self, int lmax, stride=1, int[::1] ms=None):
|
||||
sharp_make_mmajor_real_packed_alm_info(lmax=lmax, stride=stride,
|
||||
nm=lmax + 1 if ms is None else ms.shape[0],
|
||||
ms=NULL if ms is None else &ms[0],
|
||||
alm_info=&self.ainfo)
|
||||
|
17
external/sharp/python/libsharp/libsharp_mpi.pyx
vendored
Normal file
17
external/sharp/python/libsharp/libsharp_mpi.pyx
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
cdef extern from "mpi.h":
|
||||
ctypedef void *MPI_Comm
|
||||
|
||||
cdef extern from "Python.h":
|
||||
object PyLong_FromVoidPtr(void*)
|
||||
|
||||
cdef extern:
|
||||
ctypedef class mpi4py.MPI.Comm [object PyMPICommObject]:
|
||||
cdef MPI_Comm ob_mpi
|
||||
cdef unsigned flags
|
||||
|
||||
# For compatibility with mpi4py <= 1.3.1
|
||||
# Newer versions could use the MPI._addressof function
|
||||
def _addressof(Comm comm):
|
||||
cdef void *ptr = NULL
|
||||
ptr = <void*>&comm.ob_mpi
|
||||
return PyLong_FromVoidPtr(ptr)
|
1
external/sharp/python/libsharp/tests/__init__.py
vendored
Normal file
1
external/sharp/python/libsharp/tests/__init__.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
# empty
|
59
external/sharp/python/libsharp/tests/test_legendre.py
vendored
Normal file
59
external/sharp/python/libsharp/tests/test_legendre.py
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
import numpy as np
|
||||
from scipy.special import legendre
|
||||
from scipy.special import p_roots
|
||||
import libsharp
|
||||
|
||||
from numpy.testing import assert_allclose
|
||||
|
||||
|
||||
def check_legendre_transform(lmax, ntheta):
|
||||
l = np.arange(lmax + 1)
|
||||
if lmax >= 1:
|
||||
sigma = -np.log(1e-3) / lmax / (lmax + 1)
|
||||
bl = np.exp(-sigma*l*(l+1))
|
||||
bl *= (2 * l + 1)
|
||||
else:
|
||||
bl = np.asarray([1], dtype=np.double)
|
||||
|
||||
theta = np.linspace(0, np.pi, ntheta, endpoint=True)
|
||||
x = np.cos(theta)
|
||||
|
||||
# Compute truth using scipy.special.legendre
|
||||
P = np.zeros((ntheta, lmax + 1))
|
||||
for l in range(lmax + 1):
|
||||
P[:, l] = legendre(l)(x)
|
||||
y0 = np.dot(P, bl)
|
||||
|
||||
|
||||
# double-precision
|
||||
y = libsharp.legendre_transform(x, bl)
|
||||
|
||||
assert_allclose(y, y0, rtol=1e-12, atol=1e-12)
|
||||
|
||||
# single-precision
|
||||
y32 = libsharp.legendre_transform(x.astype(np.float32), bl)
|
||||
assert_allclose(y, y0, rtol=1e-5, atol=1e-5)
|
||||
|
||||
|
||||
def test_legendre_transform():
|
||||
nthetas_to_try = [0, 9, 17, 19] + list(np.random.randint(500, size=20))
|
||||
for ntheta in nthetas_to_try:
|
||||
for lmax in [0, 1, 2, 3, 20] + list(np.random.randint(50, size=4)):
|
||||
yield check_legendre_transform, lmax, ntheta
|
||||
|
||||
def check_legendre_roots(n):
|
||||
xs, ws = ([], []) if n == 0 else p_roots(n) # from SciPy
|
||||
xl, wl = libsharp.legendre_roots(n)
|
||||
assert_allclose(xs, xl, rtol=1e-14, atol=1e-14)
|
||||
assert_allclose(ws, wl, rtol=1e-14, atol=1e-14)
|
||||
|
||||
def test_legendre_roots():
|
||||
"""
|
||||
Test the Legendre root-finding algorithm from libsharp by comparing it with
|
||||
the SciPy version.
|
||||
"""
|
||||
yield check_legendre_roots, 0
|
||||
yield check_legendre_roots, 1
|
||||
yield check_legendre_roots, 32
|
||||
yield check_legendre_roots, 33
|
||||
yield check_legendre_roots, 128
|
34
external/sharp/python/libsharp/tests/test_sht.py
vendored
Normal file
34
external/sharp/python/libsharp/tests/test_sht.py
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
import numpy as np
|
||||
import healpy
|
||||
from scipy.special import legendre
|
||||
from scipy.special import p_roots
|
||||
from numpy.testing import assert_allclose
|
||||
import libsharp
|
||||
|
||||
from mpi4py import MPI
|
||||
|
||||
|
||||
def test_basic():
|
||||
lmax = 10
|
||||
nside = 8
|
||||
rank = MPI.COMM_WORLD.Get_rank()
|
||||
ms = np.arange(rank, lmax + 1, MPI.COMM_WORLD.Get_size(), dtype=np.int32)
|
||||
|
||||
order = libsharp.packed_real_order(lmax, ms=ms)
|
||||
grid = libsharp.healpix_grid(nside)
|
||||
|
||||
|
||||
alm = np.zeros(order.local_size())
|
||||
if rank == 0:
|
||||
alm[0] = 1
|
||||
elif rank == 1:
|
||||
alm[0] = 1
|
||||
|
||||
|
||||
map = libsharp.synthesis(grid, order, np.repeat(alm[None, None, :], 3, 0), comm=MPI.COMM_WORLD)
|
||||
assert np.all(map[2, :] == map[1, :]) and np.all(map[1, :] == map[0, :])
|
||||
map = map[0, 0, :]
|
||||
if rank == 0:
|
||||
healpy.mollzoom(map)
|
||||
from matplotlib.pyplot import show
|
||||
show()
|
77
external/sharp/python/setup.py
vendored
Normal file
77
external/sharp/python/setup.py
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
descr = """Spherical Harmionic transforms package
|
||||
|
||||
Python API for the libsharp spherical harmonic transforms library
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
DISTNAME = 'libsharp'
|
||||
DESCRIPTION = 'libsharp library for fast Spherical Harmonic Transforms'
|
||||
LONG_DESCRIPTION = descr
|
||||
MAINTAINER = 'Dag Sverre Seljebotn',
|
||||
MAINTAINER_EMAIL = 'd.s.seljebotn@astro.uio.no',
|
||||
URL = 'http://sourceforge.net/projects/libsharp/'
|
||||
LICENSE = 'GPL'
|
||||
DOWNLOAD_URL = "http://sourceforge.net/projects/libsharp/"
|
||||
VERSION = '0.1'
|
||||
|
||||
# Add our fake Pyrex at the end of the Python search path
|
||||
# in order to fool setuptools into allowing compilation of
|
||||
# pyx files to C files. Importing Cython.Distutils then
|
||||
# makes Cython the tool of choice for this rather than
|
||||
# (the possibly nonexisting) Pyrex.
|
||||
project_path = os.path.split(__file__)[0]
|
||||
sys.path.append(os.path.join(project_path, 'fake_pyrex'))
|
||||
|
||||
from setuptools import setup, find_packages, Extension
|
||||
from Cython.Distutils import build_ext
|
||||
import numpy as np
|
||||
|
||||
libsharp = os.environ.get('LIBSHARP', None)
|
||||
libsharp_include = os.environ.get('LIBSHARP_INCLUDE', libsharp and os.path.join(libsharp, 'include'))
|
||||
libsharp_lib = os.environ.get('LIBSHARP_LIB', libsharp and os.path.join(libsharp, 'lib'))
|
||||
|
||||
if libsharp_include is None or libsharp_lib is None:
|
||||
sys.stderr.write('Please set LIBSHARP environment variable to the install directly of libsharp, '
|
||||
'this script will refer to the lib and include sub-directories. Alternatively '
|
||||
'set LIBSHARP_INCLUDE and LIBSHARP_LIB\n')
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup(install_requires = ['numpy'],
|
||||
packages = find_packages(),
|
||||
test_suite="nose.collector",
|
||||
# Well, technically zipping the package will work, but since it's
|
||||
# all compiled code it'll just get unzipped again at runtime, which
|
||||
# is pointless:
|
||||
zip_safe = False,
|
||||
name = DISTNAME,
|
||||
version = VERSION,
|
||||
maintainer = MAINTAINER,
|
||||
maintainer_email = MAINTAINER_EMAIL,
|
||||
description = DESCRIPTION,
|
||||
license = LICENSE,
|
||||
url = URL,
|
||||
download_url = DOWNLOAD_URL,
|
||||
long_description = LONG_DESCRIPTION,
|
||||
classifiers =
|
||||
[ 'Development Status :: 3 - Alpha',
|
||||
'Environment :: Console',
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: Science/Research',
|
||||
'License :: OSI Approved :: GNU General Public License (GPL)',
|
||||
'Topic :: Scientific/Engineering'],
|
||||
cmdclass = {"build_ext": build_ext},
|
||||
ext_modules = [
|
||||
Extension("libsharp.libsharp",
|
||||
["libsharp/libsharp.pyx"],
|
||||
libraries=["sharp", "fftpack", "c_utils"],
|
||||
include_dirs=[libsharp_include],
|
||||
library_dirs=[libsharp_lib],
|
||||
extra_link_args=["-fopenmp"],
|
||||
)
|
||||
],
|
||||
)
|
Loading…
Add table
Add a link
Reference in a new issue