Skip to content

Commit 07ff8be

Browse files
committed
switch to LBFGS
1 parent 0734554 commit 07ff8be

File tree

1 file changed

+13
-13
lines changed

1 file changed

+13
-13
lines changed
Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,31 @@
11
using YaoExtensions, Yao
22
using Test, Random
3-
using QuAlgorithmZoo: Adam, update!
3+
using Optim: LBFGS, optimize
44

5+
# port the `Matrix` function to Yao's AD.
56
include("zygote_patch.jl")
67

78
function loss(u, ansatz)
89
m = Matrix(ansatz)
910
sum(abs.(u .- m))
1011
end
1112

12-
function learn_su4(u::AbstractMatrix; optimizer=Adam(lr=0.1), niter=100)
13+
"""
14+
learn_u4(u::AbstractMatrix; niter=100)
15+
16+
Learn a general U4 gate. The optimizer is LBFGS.
17+
"""
18+
function learn_u4(u::AbstractMatrix; niter=100)
1319
ansatz = general_U4() * put(2, 1=>phase(0.0)) # initial values are 0, here, we attach a global phase.
1420
params = parameters(ansatz)
15-
for i=1:1000
16-
println("Step = $i, loss = $(loss(u,ansatz))")
17-
grad = gradient(ansatz->loss(u, ansatz), ansatz)[1]
18-
update!(params, grad, optimizer)
19-
dispatch!(ansatz, params)
20-
end
21+
g!(G, x) = (dispatch!(ansatz, x); G .= gradient(ansatz->loss(u, ansatz), ansatz)[1])
22+
optimize(x->(dispatch!(ansatz, x); loss(u, ansatz)), g!, parameters(ansatz),
23+
LBFGS(), Optim.Options(iterations=niter))
24+
println("final loss = $(loss(u,ansatz))")
2125
return ansatz
2226
end
2327

2428
using Random
2529
Random.seed!(2)
2630
u = rand_unitary(4)
27-
using LinearAlgebra
28-
#u[:,1] .*= -conj(det(u))
29-
#@show det(u)
30-
c = learn_su4(u; optimizer=Adam(lr=0.005))
31-
det(mat(c))
31+
c = learn_u4(u)

0 commit comments

Comments
 (0)