numba用户手册
创建ufunc
--------------------------------------------------------------------------------------------------------------
2.1.@generated_jit #根据输入类型实现不同的功能.控制在编译时特化的选择
import numpy as np
from numba import generated_jit, guvectorize,vectorize,types
@generated_jit(nopython=True)#cache
def is_missing(x):
if isinstance(x, types.Float):
return lambda x: np.isnan(x)
elif isinstance(x, (types.NPDatetime, types.NPTimedelta)):
return lambda x: x == x('NaT')
else:
return lambda x: False
--------------------------------------------------------------------------------------------------------------
2.2.@vectorize装饰 #将纯Python函数编译为传统ufunc(输入标量)运行NumPy数组
from numba import vectorize, int32,int64,float32,float64,complex64
# 实例1.1:
@vectorize([float64(float64, float64)])#传递数据类型
def f(x, y):
return x + y
--------------------------------------------------------------------------------------------------------------
# 实例1.2:
@vectorize([int32(int32, int32), #主要顺序否则基于类型的分派将无法按预期进行
int64(int64, int64), float32(float32, float32),float64(float64, float64),complex64(complex64,complex64)])
def f(x, y):
return x + y
a = np.arange(6)
f(a, a) # array([ 0, 2, 4, 6, 8, 10])
a = np.linspace(0, 1, 6)
f(a, a) # array([ 0. , 0.4, 0.8, 1.2, 1.6, 2. ])
a = np.linspace(0, 1+1j, 6)
f(a, a) # error
--------------------------------------------------------------------------------------------------------------
# 实例1.3:
@vectorize([int32(int32, int32),int64(int64, int64),float32(float32, float32),float64(float64, float64),complex64(complex64[:])])
def f(x, y):
return x + y
a = np.linspace(0, 1+1j, 6)
f(a, a) #error
a = np.arange(12).reshape(3, 4)
f.reduce(a, axis=0) #迭代循环缩减,累积或广播
f.reduce(a, axis=1)
f.accumulate(a)
--------------------------------------------------------------------------------------------------------------
2.3.@guvectorize装饰 #将纯Python函数编译为传统ufunc(输入数组)运行NumPy数组
@guvectorize([(int64[:], int64, int64[:])], '(n),()->(n)')#输入arr,int标量(用空元组表示)并返回arr
def g(x, y, res):
for i in range(x.shape[0]):
res[i] = x[i] + y
--------------------------------------------------------------------------------------------------------------
-