gpt4 book ai didi

julia - 优化 : InexactError: Int64(0. 01) 使用 IPNewton 时

转载 作者:行者123 更新时间:2023-12-04 08:19:02 27 4
gpt4 key购买 nike

我有这段代码可以使用 IPNewton method 优化函数( error.jl ):

import Optim

"""
Generate a matrix of constants used in computation
"""
function get_const(x::Vector{Float64}, sigma::Vector{Float64})::Array{Float64, 2}
exp.(-x'.^2 ./ (2 .* sigma.^2)) ./ (sigma .* sqrt(2 * π))
end

# Log likelihood for mixture model
log_likelihood(p, C::Array{Float64, 2}) = sum(log.(p' * C))

"""
Constraint: all probabilities (ps) must sum to 1
"""
function constraint!(c, ps)::typeof(c)
c[1] = sum(ps)
c
end

N = 100
x = range(-1, 1, length=1000) |> collect
sigma = range(0.001, 2, length=N) |> collect

C = get_const(x, sigma)

constraints = Optim.TwiceDifferentiableConstraints(
constraint!,
fill(0, N), fill(1, N), # 0 <= (each probability) <= 1
fill(1, N), fill(1, N) # 1 <= constraint(p) <= 1 (probabilities sum to 1)
)
p0 = fill(1, N) / N # initial guess == equal probabilities

res = Optim.optimize(
ps -> -log_likelihood(ps, C), # want to MAXIMIZE, so negate
constraints, p0,
Optim.IPNewton()
)
Project.toml :
[deps]
Optim = "429524aa-4258-5aef-a3af-852621145aeb"
Julia 版本:
forcebru@thing ~/test> julia --version
julia version 1.5.3
错误信息:
forcebru@thing ~/test> julia error.jl
ERROR: LoadError: InexactError: Int64(0.01)
Stacktrace:
[1] Int64 at ./float.jl:710 [inlined]
[2] convert at ./number.jl:7 [inlined]
[3] setindex! at ./array.jl:847 [inlined]
[4] _unsafe_copyto!(::Array{Int64,1}, ::Int64, ::Array{Float64,1}, ::Int64, ::Int64) at ./array.jl:257
[5] unsafe_copyto! at ./array.jl:311 [inlined]
[6] _copyto_impl! at ./array.jl:335 [inlined]
[7] copyto! at ./array.jl:321 [inlined]
[8] copyto! at ./array.jl:347 [inlined]
[9] finite_difference_jacobian!(::Array{Float64,2}, ::typeof(constraint!), ::Array{Float64,1}, ::FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}, ::Nothing; relstep::Float64, absstep::Float64, colorvec::UnitRange{Int64}, sparsity::Nothing, dir::Bool) at /Users/forcebru/.julia/packages/FiniteDiff/jLwWI/src/jacobians.jl:338
[10] finite_difference_jacobian!(::Array{Float64,2}, ::Function, ::Array{Float64,1}, ::FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}, ::Nothing) at /Users/forcebru/.julia/packages/FiniteDiff/jLwWI/src/jacobians.jl:334 (repeats 2 times)
[11] jac! at /Users/forcebru/.julia/packages/NLSolversBase/QPnui/src/objective_types/constraints.jl:298 [inlined]
[12] initial_state(::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Optim.Options{Float64,Nothing}, ::NLSolversBase.TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}, ::NLSolversBase.TwiceDifferentiableConstraints{typeof(constraint!),NLSolversBase.var"#jac!#126"{typeof(constraint!),FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},NLSolversBase.var"#con_hess!#130"{Int64,Array{Int64,2},Array{Int64,3},NLSolversBase.var"#jac_vec!#129"{Int64,Int64},FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},Int64}, ::Array{Float64,1}) at /Users/forcebru/.julia/packages/Optim/D7azp/src/multivariate/solvers/constrained/ipnewton/ipnewton.jl:135
[13] optimize(::NLSolversBase.TwiceDifferentiable{Float64,Array{Float64,1},Array{Float64,2},Array{Float64,1}}, ::NLSolversBase.TwiceDifferentiableConstraints{typeof(constraint!),NLSolversBase.var"#jac!#126"{typeof(constraint!),FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},NLSolversBase.var"#con_hess!#130"{Int64,Array{Int64,2},Array{Int64,3},NLSolversBase.var"#jac_vec!#129"{Int64,Int64},FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},Int64}, ::Array{Float64,1}, ::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Optim.Options{Float64,Nothing}) at /Users/forcebru/.julia/packages/Optim/D7azp/src/multivariate/solvers/constrained/ipnewton/interior.jl:228
[14] optimize(::Function, ::NLSolversBase.TwiceDifferentiableConstraints{typeof(constraint!),NLSolversBase.var"#jac!#126"{typeof(constraint!),FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},NLSolversBase.var"#con_hess!#130"{Int64,Array{Int64,2},Array{Int64,3},NLSolversBase.var"#jac_vec!#129"{Int64,Int64},FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},Int64}, ::Array{Float64,1}, ::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Optim.Options{Float64,Nothing}; inplace::Bool, autodiff::Symbol) at /Users/forcebru/.julia/packages/Optim/D7azp/src/multivariate/optimize/interface.jl:148
[15] optimize(::Function, ::NLSolversBase.TwiceDifferentiableConstraints{typeof(constraint!),NLSolversBase.var"#jac!#126"{typeof(constraint!),FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},NLSolversBase.var"#con_hess!#130"{Int64,Array{Int64,2},Array{Int64,3},NLSolversBase.var"#jac_vec!#129"{Int64,Int64},FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},Int64}, ::Array{Float64,1}, ::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Optim.Options{Float64,Nothing}) at /Users/forcebru/.julia/packages/Optim/D7azp/src/multivariate/optimize/interface.jl:147 (repeats 2 times)
[16] top-level scope at /Users/forcebru/test/error.jl:27
[17] include(::Function, ::Module, ::String) at ./Base.jl:380
[18] include(::Module, ::String) at ./Base.jl:368
[19] exec_options(::Base.JLOptions) at ./client.jl:296
[20] _start() at ./client.jl:506
in expression starting at /Users/forcebru/test/error.jl:27
forcebru@thing ~/test [1]>
所以... InexactError: Int64(0.01) ?而且它似乎也起源于 Optim ?
我明白 InexactError这里意味着 Julia 无法转换 0.01到一个整数,这是有道理的。但我不知道在哪里 0.01竟然来自!怎么查出产地?这段代码有什么问题,可以做些什么来解决这个问题?

编辑:我注意到 0.01必须是 p0 = fill(1, N) / N 的一个元素因为如果我设置 N = 50 ,错误变为 InexactError: Int64(0.02) ,其中 0.02 == 1/N .但是为什么它试图将其转换为整数?

最佳答案

在仔细查看错误消息的这些部分之后:

 [8] copyto! at ./array.jl:347 [inlined]
[9] finite_difference_jacobian!(::Array{Float64,2}, ::typeof(constraint!), ::Array{Float64,1}, ::FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}, ::Nothing; relstep::Float64, absstep::Float64, colorvec::UnitRange{Int64}, sparsity::Nothing, dir::Bool) at /Users/forcebru/.julia/packages/FiniteDiff/jLwWI/src/jacobians.jl:338
...
[15] optimize(::Function, ::NLSolversBase.TwiceDifferentiableConstraints{typeof(constraint!),NLSolversBase.var"#jac!#126"{typeof(constraint!),FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},NLSolversBase.var"#con_hess!#130"{Int64,Array{Int64,2},Array{Int64,3},NLSolversBase.var"#jac_vec!#129"{Int64,Int64},FiniteDiff.JacobianCache{Array{Int64,1},Array{Int64,1},Array{Int64,1},UnitRange{Int64},Nothing,Val{:central}(),Int64}},Int64}, ::Array{Float64,1}, ::Optim.IPNewton{typeof(Optim.backtrack_constrained_grad),Symbol}, ::Optim.Options{Float64,Nothing}) at /Users/forcebru/.julia/packages/Optim/D7azp/src/multivariate/optimize/interface.jl:147 (repeats 2 times)
[16] top-level scope at /Users/forcebru/test/error.jl:27
...我看到 FiniteDiff.JacobianCache因为约束被推断为参数化 Int64 :
FiniteDiff.JacobianCache{
Array{Int64,1},
Array{Int64,1},
Array{Int64,1},
UnitRange{Int64},
Nothing,
Val{:central}(),
Int64
}
...这很奇怪,因为我显然想优化实数。
原来在这部分代码中:
constraints = Optim.TwiceDifferentiableConstraints(
constraint!,
fill(0, N), fill(1, N), # 0 <= (each probability) <= 1
fill(1, N), fill(1, N) # 1 <= constraint(p) <= 1 (probabilities sum to 1)
)
fill(0, N)和 friend 都是整数,因为 0是一个整数。看起来这会导致尝试从浮点数转换为整数。
我将此代码更改为:
constraints = Optim.TwiceDifferentiableConstraints(
constraint!,
fill(0., N), fill(1., N), # 0 <= (each probability) <= 1
fill(1., N), fill(1., N) # 1 <= constraint(p) <= 1 (probabilities sum to 1)
)
...现在没有错误(虽然算法不收敛,但这是一个不同的问题)。

关于julia - 优化 : InexactError: Int64(0. 01) 使用 IPNewton 时,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/65586071/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com