Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions DifferentiationInterface/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Fixed

- Take `absstep` into account for FiniteDiff ([#812])
- Make basis work for `CuArray` ([#810])

## [0.7.0]
Expand Down Expand Up @@ -39,6 +40,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
[0.6.54]: https://github.com/JuliaDiff/DifferentiationInterface.jl/compare/DifferentiationInterface-v0.6.53...DifferentiationInterface-v0.6.54
[0.6.53]: https://github.com/JuliaDiff/DifferentiationInterface.jl/compare/DifferentiationInterface-v0.6.52...DifferentiationInterface-v0.6.53

[#812]: https://github.com/JuliaDiff/DifferentiationInterface.jl/pull/812
[#810]: https://github.com/JuliaDiff/DifferentiationInterface.jl/pull/810
[#799]: https://github.com/JuliaDiff/DifferentiationInterface.jl/pull/799
[#795]: https://github.com/JuliaDiff/DifferentiationInterface.jl/pull/795
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ function DI.prepare_pushforward_nokwarg(
absstep = if isnothing(backend.absstep)
relstep
else
backend.relstep
backend.absstep
end
dir = backend.dir
return FiniteDiffOneArgPushforwardPrep(_sig, cache, relstep, absstep, dir)
Expand Down Expand Up @@ -144,7 +144,7 @@ function DI.prepare_derivative_nokwarg(
absstep = if isnothing(backend.absstep)
relstep
else
backend.relstep
backend.absstep
end
dir = backend.dir
return FiniteDiffOneArgDerivativePrep(_sig, cache, relstep, absstep, dir)
Expand Down Expand Up @@ -269,7 +269,7 @@ function DI.prepare_gradient_nokwarg(
absstep = if isnothing(backend.absstep)
relstep
else
backend.relstep
backend.absstep
end
dir = backend.dir
return FiniteDiffGradientPrep(_sig, cache, relstep, absstep, dir)
Expand Down Expand Up @@ -359,7 +359,7 @@ function DI.prepare_jacobian_nokwarg(
absstep = if isnothing(backend.absstep)
relstep
else
backend.relstep
backend.absstep
end
dir = backend.dir
return FiniteDiffOneArgJacobianPrep(_sig, cache, relstep, absstep, dir)
Expand Down Expand Up @@ -465,8 +465,16 @@ function DI.prepare_hessian_nokwarg(
else
backend.relstep
end
absstep_g = isnothing(backend.absstep) ? relstep_g : backend.absstep
absstep_h = isnothing(backend.absstep) ? relstep_h : backend.absstep
absstep_g = if isnothing(backend.absstep)
relstep_g
else
backend.absstep
end
absstep_h = if isnothing(backend.absstep)
relstep_h
else
backend.absstep
end
return FiniteDiffHessianPrep(
_sig, gradient_cache, hessian_cache, relstep_g, absstep_g, relstep_h, absstep_h
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ function DI.prepare_pushforward_nokwarg(
absstep = if isnothing(backend.absstep)
relstep
else
backend.relstep
backend.absstep
end
dir = backend.dir
return FiniteDiffTwoArgPushforwardPrep(_sig, cache, relstep, absstep, dir)
Expand Down Expand Up @@ -175,7 +175,7 @@ function DI.prepare_derivative_nokwarg(
absstep = if isnothing(backend.absstep)
relstep
else
backend.relstep
backend.absstep
end
dir = backend.dir
return FiniteDiffTwoArgDerivativePrep(_sig, cache, relstep, absstep, dir)
Expand Down Expand Up @@ -295,7 +295,7 @@ function DI.prepare_jacobian_nokwarg(
absstep = if isnothing(backend.absstep)
relstep
else
backend.relstep
backend.absstep
end
dir = backend.dir
return FiniteDiffTwoArgJacobianPrep(_sig, cache, relstep, absstep, dir)
Expand Down
42 changes: 42 additions & 0 deletions DifferentiationInterface/test/Back/FiniteDiff/test.jl
Original file line number Diff line number Diff line change
Expand Up @@ -72,3 +72,45 @@ end
logging=LOGGING,
)
end;

@testset "Step size" begin # fix 811
backend = AutoFiniteDiff(; absstep=1000, relstep=0.1)
preps = [
prepare_pushforward(identity, backend, 1.0, (1.0,)),
prepare_pushforward(copyto!, [0.0], backend, [1.0], ([1.0],)),
prepare_derivative(identity, backend, 1.0),
prepare_derivative((y, x) -> y .= x, [0.0], backend, 1.0),
prepare_gradient(sum, backend, [1.0]),
prepare_jacobian(identity, backend, [1.0]),
prepare_jacobian(copyto!, [0.0], backend, [1.0]),
]
for prep in preps
@test prep.relstep == 0.1
@test prep.absstep == 1000
end
prep = prepare_hessian(sum, backend, [1.0])
@test prep.absstep_g == 1000
@test prep.absstep_h == 1000
@test prep.relstep_g == 0.1
@test prep.relstep_h == 0.1

backend = AutoFiniteDiff(; relstep=0.1)
preps = [
prepare_pushforward(identity, backend, 1.0, (1.0,)),
prepare_pushforward(copyto!, [0.0], backend, [1.0], ([1.0],)),
prepare_derivative(identity, backend, 1.0),
prepare_derivative((y, x) -> y .= x, [0.0], backend, 1.0),
prepare_gradient(sum, backend, [1.0]),
prepare_jacobian(identity, backend, [1.0]),
prepare_jacobian(copyto!, [0.0], backend, [1.0]),
]
for prep in preps
@test prep.relstep == 0.1
@test prep.absstep == 0.1
end
prep = prepare_hessian(sum, backend, [1.0])
@test prep.absstep_g == 0.1
@test prep.absstep_h == 0.1
@test prep.relstep_g == 0.1
@test prep.relstep_h == 0.1
end