r = @map((x, y) -> sin(x) + cos(y), a, b)
r = #some code to create an array of proper size and type
for i = 1 : length(r)
@inbounds r[i] = sin(a[i]) + cos(b[i])
end
(u, v) = @map((x, y) ->(s=sin(x); c=cos(y); (s+c, s-c)), a, b)
for i = 1 : length(u)
@inbounds s = sin(a[i])
@inbounds c = cos(b[i])
@inbounds u[i] = s + c
@inbounds v[i] = s - c
end
ssd = @fold(s += abs2(x - y), zero, a, b)
ssd = begin
R = #infer result type
n = #infer common array length
s = zero(R)
for i = 1 : n
@inbound s += abs2(a[i] - b[i])
end
s
end
s = @reduce(+, a)
ssq = @mapreduce(abs2, +, a)
ssd = @mapreduce((x, y) -> abs2(x - y), +, a, b)
s = @sum((x, y) -> abs2(x -y), a, b)
m = @max(abs, a)
r = @mean(abs, a, 1) # along columns
#old syntax: @devec r = sin(a) + cos(b)
#new syntax:
r = @devec sin(a) + cos(b)
myfunc(a, b) = @devec sin(a) + cos(b)
u, v = @devec begin
s = sin(a)
c = cos(b)
(s + c, s - c)
end
@devec! u v begin
s = sin(a)
c = cos(b)
u = s + c
v = s - c
end
I really do believe that many of these concerns will get addressed as early as 0.3.
I agree with Tim about many such things already in base, and some of Dahua's stuff will as well, once the basic machinery for better vectorized computations is implemented.
-viral
@devec!(a, x + y .* sum(z))
@devec (x + y) * (sin(2.0) + cos(2.0))
r = #create an array of proper type and shape
tmp1 = sin(2.0) + cos(2.0)
for i = 1 : length(r)
@inbounds r[i] = (x[i] + y[i]) * tmp1
end
@map((x, y)-> sin(f(x) + g(y)), a.^2 + b, b.^3)
for i = 1 : length(r)
@inbounds xi = a[i]^2 + b[i]
@inbounds yi = b[i]^3
@inbounds r[i] = sin(f(xi) + g(yi))
end
Implementation of this plan is underway (see https://github.com/lindahua/NumericExtensions.jl/pull/16)