diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7229ef4..8b73b4f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,11 +29,11 @@ jobs: - x64 steps: - uses: actions/checkout@v2 - - uses: julia-actions/setup-julia@v1 + - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - uses: actions/cache@v1 + - uses: actions/cache@v4 env: cache-name: cache-artifacts with: @@ -46,6 +46,6 @@ jobs: - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 - uses: julia-actions/julia-processcoverage@v1 - - uses: codecov/codecov-action@v1 + - uses: codecov/codecov-action@v4 with: file: lcov.info diff --git a/Project.toml b/Project.toml index 175debf..326b8d2 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "SingularIntegrals" uuid = "d7440221-8b5e-42fc-909c-0567823f424a" authors = ["Sheehan Olver "] -version = "0.3" +version = "0.3.1" [deps] ArrayLayouts = "4c555306-a7a7-4459-81d9-ec55ddd5c99a" @@ -22,7 +22,7 @@ SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" ArrayLayouts = "1.4" BandedMatrices = "1" ClassicalOrthogonalPolynomials = "0.12" -ContinuumArrays = "0.17" +ContinuumArrays = "0.17.2" FastTransforms = "0.15, 0.16" FillArrays = "1" HypergeometricFunctions = "0.3.4" diff --git a/src/logkernel.jl b/src/logkernel.jl index aab959c..4cf266c 100644 --- a/src/logkernel.jl +++ b/src/logkernel.jl @@ -1,5 +1,7 @@ const ComplexLogKernelPoint{T,C,W<:Number,V,D} = BroadcastQuasiMatrix{T,typeof(log),Tuple{ConvKernel{C,W,V,D}}} +const ComplexLogKernelPoints{T,C,W<:AbstractVector{<:Number},V,D} = BroadcastQuasiMatrix{T,typeof(log),Tuple{ConvKernel{C,W,V,D}}} const LogKernelPoint{T<:Real,C,W<:Number,V,D} = BroadcastQuasiMatrix{T,typeof(log),Tuple{BroadcastQuasiMatrix{T,typeof(abs),Tuple{ConvKernel{C,W,V,D}}}}} +const LogKernelPoints{T<:Real,C,W<:AbstractVector{<:Number},V,D} = BroadcastQuasiMatrix{T,typeof(log),Tuple{BroadcastQuasiMatrix{T,typeof(abs),Tuple{ConvKernel{C,W,V,D}}}}} const LogKernel{T,D1,D2} = BroadcastQuasiMatrix{T,typeof(log),Tuple{BroadcastQuasiMatrix{T,typeof(abs),Tuple{ConvKernel{T,Inclusion{T,D1},T,D2}}}}} @@ -14,12 +16,25 @@ end logkernel(convert(AbstractQuasiArray{T}, P), z) end +@simplify function *(L::LogKernelPoints, P::AbstractQuasiVecOrMat) + T = promote_type(eltype(L), eltype(P)) + z, xc = L.args[1].args[1].args + logkernel(convert(AbstractQuasiArray{T}, P), z) +end + + @simplify function *(L::ComplexLogKernelPoint, P::AbstractQuasiVecOrMat) z, xc = L.args[1].args T = promote_type(eltype(L), eltype(P)) complexlogkernel(convert(AbstractQuasiArray{T}, P), z) end +@simplify function *(L::ComplexLogKernelPoints, P::AbstractQuasiVecOrMat) + z, xc = L.args[1].args + T = promote_type(eltype(L), eltype(P)) + complexlogkernel(convert(AbstractQuasiArray{T}, P), z) +end + ### # LogKernel ### @@ -61,11 +76,11 @@ function logkernel_demap(wT, z) c = inv(kr.A) LP = logkernel(P, z̃) Σ = sum(P; dims=1) - transpose(c*transpose(LP) + c*log(c)*vec(Σ)) + transpose(c*transpose(LP) .+ c*log(c)*vec(Σ)) end -logkernel_layout(::Union{MappedBasisLayouts, MappedOPLayouts}, wT, z::Number) = logkernel_demap(wT, z) +logkernel_layout(::Union{MappedBasisLayouts, MappedOPLayouts}, wT, z...) = logkernel_demap(wT, z...) logkernel_layout(::WeightedOPLayout{MappedOPLayout}, wT, z::Real) = logkernel_demap(wT, z) diff --git a/src/recurrence.jl b/src/recurrence.jl index b718e48..977e1c8 100644 --- a/src/recurrence.jl +++ b/src/recurrence.jl @@ -21,6 +21,8 @@ end const RecurrenceVector{T, A<:AbstractVector, B<:AbstractVector, C<:AbstractVector} = RecurrenceArray{T, 1, T, A, B, C} const RecurrenceMatrix{T, Z<:AbstractVector, A<:AbstractVector, B<:AbstractVector, C<:AbstractVector} = RecurrenceArray{T, 2, Z, A, B, C} +RecurrenceArray(z, A, B, C, data::Array{T,N}, datasize, p0, p1) where {T,N} = RecurrenceArray{T,N,typeof(z),typeof(A),typeof(B),typeof(C)}(z, A, B, C, data, datasize, p0, p1, T[]) + function RecurrenceArray(z::Number, (A,B,C), data::AbstractVector{T}) where T N = length(data) p0, p1 = initiateforwardrecurrence(N, A, B, C, z, one(z)) @@ -176,4 +178,30 @@ end ### # broadcasted ### -broadcasted(::LazyArrayStyle, op, A::Transpose{<:Any,<:RecurrenceArray}) = transpose(op.(parent(A))) \ No newline at end of file +broadcasted(::LazyArrayStyle, op, A::Transpose{<:Any,<:RecurrenceArray}) = transpose(op.(parent(A))) + +broadcasted(::LazyArrayStyle, ::typeof(*), c::Number, A::RecurrenceArray) = RecurrenceArray(A.z, A.A, A.B, A.C, c .* A.data, A.datasize, c .* A.p0, c .* A.p1) +function recurrence_broadcasted(op, A::RecurrenceMatrix, x::AbstractVector) + p = paddeddata(x) + n = size(p,1) + resizedata!(A, n, size(p,2)) + data = copy(A.data) + data[1:n,:] .+= p + RecurrenceArray(A.z, A.A, A.B, A.C, data, A.datasize, A.p0, A.p1) +end + +function recurrence_broadcasted(op, A::RecurrenceVector, x::AbstractVector) + p = paddeddata(x) + n = size(p,1) + resizedata!(A, n) + data = copy(A.data) + data[1:n] .+= p + RecurrenceArray(A.z, A.A, A.B, A.C, data, A.datasize, A.p0, A.p1) +end + +for op in (:+, :-) + @eval begin + broadcasted(::LazyArrayStyle, ::typeof($op), A::RecurrenceArray, x::AbstractVector) = recurrence_broadcasted($op, A, x) + broadcasted(::LazyArrayStyle, ::typeof($op), A::RecurrenceVector, x::Vcat{<:Any,1}) = recurrence_broadcasted($op, A, x) + end +end \ No newline at end of file diff --git a/test/test_logkernel.jl b/test/test_logkernel.jl index 85a5861..e371185 100644 --- a/test/test_logkernel.jl +++ b/test/test_logkernel.jl @@ -13,6 +13,10 @@ using ClassicalOrthogonalPolynomials: affine for z in (5, 1+2im, -1+2im, 1-2im, -3+0.0im, -3-0.0im) @test (log.(z .- x') * P)[1:5] ≈ L.(z, 0:4) end + + for z in ([2.1,3.], [2.1+im,-3-im]) + @test (log.(z .- x') * P)[:,1:5] ≈ L.(z, (0:4)') + end for z in (-5,-1,0,0.1) @test_throws DomainError log.(z .- x') * P @@ -74,6 +78,15 @@ end wU = Weighted(chebyshevu(1..2)) f = wU / wU \ @.(sqrt(2-x)sqrt(x-1)exp(x)) @test L*f ≈ 2.2374312398976586 # MAthematica + + @testset "vector" begin + z = [3.1,4] + wU = Weighted(ChebyshevU())[affine(x, axes(ChebyshevU(),1)),:] + c = wU \ @.(sqrt(2-x)sqrt(x-1)exp(x)) + x = axes(wU,1) + @test logkernel(wU, z)[:,1:1000] == (log.(abs.(z .- x')) * wU)[:,1:1000] + @test logkernel(wU, z) * c ≈ [logkernel(wU, 3.1)*c, logkernel(wU, 4)*c] + end end @testset "Legendre" begin