Hello Everyone,
I am trying to implement the following function hungarian huber loss:
function huberloss(y, y_hat; delta=1)
absdiff = abs.(y.-y_hat)
loss_activation = (absdiff .>= 1)
loss = loss_activation.*(absdiff.-0.5*delta).*delta + (loss_activation.*(-1).+1).*(0.5.*absdiff.^2)
huberloss = sum(loss)/length(y)
return huberloss
end
function hungarian_huber_loss(labels, predictions)
batch_size, num_objects, object_labels = size(labels)
batch_loss = 0
for b in 1:batch_size
actual_costs = 0
pairwise_cost = zeros(num_objects, num_objects)
for i in 1:num_objects
for j in 1:num_objects
pairwise_cost[i,j] = huberloss(labels[j,:,b], predictions[i,:,b])
end
end
copy_pairwise_cost = copy(pairwise_cost)
indices = zeros(num_objects,1)
max = 30
for i in 1:num_objects
indexmin = argmin(copy_pairwise_cost)
r = indexmin[1]
c = indexmin[2]
indices[r] = c
copy_pairwise_cost[r,:] .= max
copy_pairwise_cost[:,c] .= max
end
for i in 1:num_objects
actual_costs += pairwise_cost[i, Int(indices[i])]
end
batch_loss += actual_costs/num_objects
end
return batch_loss/batch_size
end
However, the bolded line is giving errors in back propagation. I faced a similar problem when I was using the padding function which I got away with by using cat function. Here I don't know what to do. Here is the stacktrace:
[1] setindex! at ./array.jl:849 [inlined]
[2] hungarian_huber_loss(::KnetArray{Float32,3}, ::AutoGrad.Result{KnetArray{Float32,3}}) at ./In[16]:17
[3] loss(::Chain, ::KnetArray{Float32,4}, ::KnetArray{Float32,3}) at ./In[18]:3
[4] (::var"#12#13")() at /kuacc/users/ashah20/.julia/packages/AutoGrad/TTpeo/src/core.jl:205
[5] differentiate(::Function; o::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at /kuacc/users/ashah20/.julia/packages/AutoGrad/TTpeo/src/core.jl:144
[6] differentiate(::Function) at /kuacc/users/ashah20/.julia/packages/AutoGrad/TTpeo/src/core.jl:135
[7] top-level scope at In[20]:1
[8] include_string(::Function, ::Module, ::String, ::String) at ./loading.jl:1091
[9] softscope_include_string(::Module, ::String, ::String) at /kuacc/users/ashah20/.julia/packages/SoftGlobalScope/u4UzH/src/SoftGlobalScope.jl:65
[10] execute_request(::ZMQ.Socket, ::IJulia.Msg) at /kuacc/users/ashah20/.julia/packages/IJulia/IDNmS/src/execute_request.jl:67
[11] #invokelatest#1 at ./essentials.jl:710 [inlined]
[12] invokelatest at ./essentials.jl:709 [inlined]
[13] eventloop(::ZMQ.Socket) at /kuacc/users/ashah20/.julia/packages/IJulia/IDNmS/src/eventloop.jl:8
[14] (::IJulia.var"#15#18")() at ./task.jl:356
MethodError: Cannot `convert` an object of type AutoGrad.Result{Float32} to an object of type Float64
Closest candidates are:
convert(::Type{T}, !Matched::Ratios.SimpleRatio{S}) where {T<:AbstractFloat, S} at /kuacc/users/ashah20/.julia/packages/Ratios/xLeZh/src/Ratios.jl:52
convert(::Type{T}, !Matched::Static.StaticInt{N}) where {T<:Number, N} at /kuacc/users/ashah20/.julia/packages/Static/A5kpr/src/int.jl:22
convert(::Type{T}, !Matched::Static.StaticFloat64{N}) where {N, T<:AbstractFloat} at /kuacc/users/ashah20/.julia/packages/Static/A5kpr/src/float.jl:26
...
Stacktrace:
[1] differentiate(::Function; o::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}) at /kuacc/users/ashah20/.julia/packages/AutoGrad/TTpeo/src/core.jl:148
[2] differentiate(::Function) at /kuacc/users/ashah20/.julia/packages/AutoGrad/TTpeo/src/core.jl:135
[3] top-level scope at In[20]:1
[4] include_string(::Function, ::Module, ::String, ::String) at ./loading.jl:1091
Looking forward for any suggestions. Best regards,
Ahmed Imam ShahMS. Computer Science and Engineering
Koç University, Istanbul, Turkey