Skip to content

Commit

Permalink
add two new constructors for the GramMatrix based on modified OP moments
Browse files Browse the repository at this point in the history
  • Loading branch information
MikaelSlevinsky committed Nov 26, 2024
1 parent bc0d4d4 commit 0af4a9e
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 19 deletions.
1 change: 1 addition & 0 deletions docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ ApproxFun = "28f2ccd6-bb30-5033-b560-165f7b14dc2f"
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
FastTransforms = "057dd010-8810-581a-b7be-e3fc3b93f78c"
LaTeXStrings = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f"
LazyArrays = "5078a376-72f3-5289-bfd5-ec5146d43c02"
Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306"
PlotlyJS = "f0f68f2c-4968-5e81-91da-67840de0976a"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
Expand Down
40 changes: 22 additions & 18 deletions examples/semiclassical.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
# \langle f, g \rangle = \int_{-1}^1 f(x) g(x) w(x){\rm d} x,
# ```
# where $w(x) = w^{(\alpha,\beta,\gamma,\delta,\epsilon)}(x) = (1-x)^\alpha(1+x)^\beta(2+x)^\gamma(3+x)^\delta(5-x)^\epsilon$ is a modification of the Jacobi weight.
# We shall use results from [this paper](https://arxiv.org/abs/2302.08448) to consider these semi-classical orthogonal polynomials as modifications of the Jacobi polynomials $P_n^{(\alpha,\beta)}(x)$.
# We shall use results from [this paper](https://arxiv.org/abs/2302.08448) to consider these semi-classical orthogonal polynomials as modifications of the orthonormalized Jacobi polynomials $\tilde{P}_n^{(\alpha,\beta)}(x)$.

using ApproxFun, FastTransforms, LinearAlgebra, Plots, LaTeXStrings
using ApproxFun, FastTransforms, LazyArrays, LinearAlgebra, Plots, LaTeXStrings
const GENFIGS = joinpath(pkgdir(FastTransforms), "docs/src/generated")
!isdir(GENFIGS) && mkdir(GENFIGS)
plotlyjs()
Expand All @@ -30,7 +30,7 @@ P1 = plan_jac2cheb(Float64, n+1, α, β; normjac = true)
q = k -> lmul!(P1, lmul!(P, [zeros(k); 1.0; zeros(n-k)]))
qvals = k -> ichebyshevtransform(q(k))

# With the symmetric Jacobi matrix for $P_n^{(\alpha, \beta)}(x)$ and the modified plan, we may compute the modified Jacobi matrix and the corresponding roots (as eigenvalues):
# With the symmetric Jacobi matrix for $\tilde{P}_n^{(\alpha, \beta)}(x)$ and the modified plan, we may compute the modified Jacobi matrix and the corresponding roots (as eigenvalues):
x = Fun(x->x, NormalizedJacobi(β, α))
XP = SymTridiagonal(Symmetric(Multiplication(x, space(x))[1:n+1, 1:n+1]))
XQ = FastTransforms.modified_jacobi_matrix(P, XP)
Expand Down Expand Up @@ -61,18 +61,21 @@ function threshold!(A::AbstractArray, ϵ)
A
end
P′ = plan_modifiedjac2jac(Float64, n+1, α+1, β+1, v.coefficients)
DP = UpperTriangular(diagm(1=>[sqrt(n*(n+α+β+1)) for n in 1:n])) # The classical differentiation matrix representing 𝒟 P^{(α,β)}(y) = P^{(α+1,β+1)}(y) D_P.
DQ = UpperTriangular(threshold!(P′\(DP*(P*I)), 100eps())) # The semi-classical differentiation matrix representing 𝒟 Q(y) = Q̂(y) D_Q.
UpperTriangular(DQ[1:10,1:10])
DP = UpperTriangular(diagm(1=>[sqrt(n*(n+α+β+1)) for n in 1:n])) # The classical differentiation matrix representing 𝒟 P^{(α,β)}(x) = P^{(α+1,β+1)}(x) D_P.
DQ = UpperTriangular(threshold!(P′\(DP*(P*I)), 100eps())) # The semi-classical differentiation matrix representing 𝒟 Q(x) = Q̂(x) D_Q.
UpperTriangular(DQ[1:10, 1:10])

# A faster method now exists via the `GramMatrix` architecture and its associated displacement equation. We compute `U`:
U = Symmetric(Multiplication(u, space(u))[1:n+1, 1:n+1])

# Then we form a `GramMatrix` together with the Jacobi matrix:
G = GramMatrix(U, XP)
# A faster method now exists via the `GramMatrix` architecture and its associated displacement equation. Given the modified orthogonal polynomial moments implied by the normalized Jacobi series for $u(x)$, we pad this vector to the necessary size and construct the `GramMatrix` with these moments, the multiplication operator, and the constant $\tilde{P}_0^{(\alpha,\beta)}(x)$:
μ = PaddedVector(u.coefficients, 2n+1)
x = Fun(x->x, NormalizedJacobi(β, α))
XP2 = SymTridiagonal(Symmetric(Multiplication(x, space(x))[1:2n+1, 1:2n+1]))
p0 = Fun(NormalizedJacobi(β, α), [1])(0)
G = GramMatrix(μ, XP2, p0)
G[1:10, 1:10]

# And compute its cholesky factorization. The upper-triangular Cholesky factor represents the connection between original Jacobi and semi-classical Jacobi as ${\bf P}^{(\alpha,\beta)}(x) = {\bf Q}(x) R$.
R = cholesky(G).U
R[1:10, 1:10]

# Every else works almost as before, including evaluation on a Chebyshev grid:
q = k -> lmul!(P1, ldiv!(R, [zeros(k); 1.0; zeros(n-k)]))
Expand All @@ -99,11 +102,12 @@ savefig(joinpath(GENFIGS, "semiclassical1.html"))
###```

# And banded differentiation:
V = Symmetric(Multiplication(v, space(v))[1:n+1, 1:n+1])
x = Fun(x->x, NormalizedJacobi+1, α+1))
XP′ = SymTridiagonal(Symmetric(Multiplication(x, space(x))[1:n+1, 1:n+1]))
G′ = GramMatrix(V, XP′)
μ′ = PaddedVector(v.coefficients, 2n+1)
x′ = Fun(x->x, NormalizedJacobi+1, α+1))
XP′ = SymTridiagonal(Symmetric(Multiplication(x′, space(x′))[1:2n+1, 1:2n+1]))
p0′ = Fun(NormalizedJacobi+1, α+1), [1])(0)
G′ = GramMatrix(μ′, XP′, p0′)
R′ = cholesky(G′).U
DP = UpperTriangular(diagm(1=>[sqrt(n*(n+α+β+1)) for n in 1:n])) # The classical differentiation matrix representing 𝒟 P^{(α,β)}(y) = P^{(α+1,β+1)}(y) D_P.
DQ = UpperTriangular(threshold!(R′*(DP*(R\I)), 100eps())) # The semi-classical differentiation matrix representing 𝒟 Q(y) = Q̂(y) D_Q.
UpperTriangular(DQ[1:10,1:10])
DP = UpperTriangular(diagm(1=>[sqrt(n*(n+α+β+1)) for n in 1:n])) # The classical differentiation matrix representing 𝒟 P^{(α,β)}(x) = P^{(α+1,β+1)}(x) D_P.
DQ = UpperTriangular(threshold!(R′*(DP*(R\I)), 100eps())) # The semi-classical differentiation matrix representing 𝒟 Q(x) = Q̂(x) D_Q.
UpperTriangular(DQ[1:10, 1:10])
59 changes: 58 additions & 1 deletion src/GramMatrix.jl
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,64 @@ GramMatrix(W::WT, X::XT) where {T, WT <: AbstractMatrix{T}, XT <: AbstractMatrix
@inline bandwidths(G::GramMatrix) = bandwidths(G.W)
@inline MemoryLayout(G::GramMatrix) = MemoryLayout(G.W)

"""
GramMatrix(μ::AbstractVector, X::AbstractMatrix)
Construct a GramMatrix from modified orthogonal polynomial moments and the multiplication operator.
In the standard (classical) normalization, ``p_0(x) = 1``, so that the moments
``\\mu_n = \\langle p_{n-1}, 1\\rangle`` are in fact the first column of the Gram matrix.
The recurrence is built from ``X^\\top W = WX``.
"""
GramMatrix::AbstractVector{T}, X::XT) where {T, XT <: AbstractMatrix{T}} = GramMatrix(μ, X, one(T))
function GramMatrix::AbstractVector{T}, X::XT, p0::T) where {T, XT <: AbstractMatrix{T}}
N = length(μ)
n = (N+1)÷2
@assert N == size(X, 1) == size(X, 2)
@assert bandwidths(X) == (1, 1)
W = Matrix{T}(undef, N, N)
if n > 0
@inbounds for m in 1:N
W[m, 1] = p0*μ[m]
end
end
if n > 1
@inbounds for m in 2:N-1
W[m, 2] = (X[m-1, m]*W[m-1, 1] + (X[m, m]-X[1, 1])*W[m, 1] + X[m+1, m]*W[m+1, 1])/X[2, 1]
end
end
@inbounds @simd for n in 3:n
for m in n:N-n+1
W[m, n] = (X[m-1, m]*W[m-1, n-1] + (X[m, m]-X[n-1, n-1])*W[m, n-1] + X[m+1, m]*W[m+1, n-1] - X[n-2, n-1]*W[m, n-2])/X[n, n-1]
end
end
return GramMatrix(Symmetric(W[1:n, 1:n], :L), eval(XT.name.name)(view(X, 1:n, 1:n)))
end

function GramMatrix::PaddedVector{T}, X::XT, p0::T) where {T, XT <: AbstractMatrix{T}}
N = length(μ)
b = length.args[2])-1
n = (N+1)÷2
@assert N == size(X, 1) == size(X, 2)
@assert bandwidths(X) == (1, 1)
W = BandedMatrix{T}(undef, (N, N), (b, 0))
if n > 0
@inbounds for m in 1:min(N, b+1)
W[m, 1] = p0*μ[m]
end
end
if n > 1
@inbounds for m in 2:min(N-1, b+2)
W[m, 2] = (X[m-1, m]*W[m-1, 1] + (X[m, m]-X[1, 1])*W[m, 1] + X[m+1, m]*W[m+1, 1])/X[2, 1]
end
end
@inbounds @simd for n in 3:n
for m in n:min(N-n+1, b+n)
W[m, n] = (X[m-1, m]*W[m-1, n-1] + (X[m, m]-X[n-1, n-1])*W[m, n-1] + X[m+1, m]*W[m+1, n-1] - X[n-2, n-1]*W[m, n-2])/X[n, n-1]
end
end
return GramMatrix(Symmetric(W[1:n, 1:n], :L), eval(XT.name.name)(view(X, 1:n, 1:n)))
end

#
# X'W-W*X = G*J*G'
# This returns G, where J = [0 1; -1 0], respecting the skew-symmetry of the right-hand side.
Expand Down Expand Up @@ -201,7 +259,6 @@ function compute_skew_generators(W::ChebyshevGramMatrix{T}) where T
@inbounds @simd for j in 1:n-1
G[j, 2] = -(μ[n+2-j] + μ[n+j])/2
end
G[n, 2] = -μ[2]/2
G
end

Expand Down

0 comments on commit 0af4a9e

Please sign in to comment.