Skip to content

Commit

Permalink
Bugfix - duplicated embedding handling
Browse files Browse the repository at this point in the history
  • Loading branch information
KrainskiL committed Jul 29, 2020
1 parent a36ca2f commit 25c1519
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 7 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
Manifest.toml
example/.DS_Store
.DS_Store
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "CGE"
uuid = "f7ff1d1e-e254-4b26-babe-fc3421add060"
authors = ["KrainskiL <[email protected]>"]
version = "1.0.0"
version = "1.0.1"

[deps]
DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"
Expand Down
15 changes: 9 additions & 6 deletions src/landmarks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -157,11 +157,14 @@ function split_cluster_rss(m, w)
y = (m .- matrix_w_mean(m, w)) .* sqrt.(w)
yᵀwy = transpose(y) * y
z = y * (@view eigvecs(yᵀwy)[:, end])
l1 = Int[]
l2 = Int[]
gray = [axes(z, 1);]
rss_low = [WSSE(0.0, 0.0, 0.0) for i in axes(m, 2)]
rss_high = [WSSE(0.0, 0.0, 0.0) for i in axes(m, 2)]
l1 = Int[argmin(z)]
l2 = Int[argmax(z)]
if l1 == l2
throw(ErrorException("Trying to split homogenous cluster"))
end
gray = setdiff(axes(z, 1), l1, l2)
rss_low = [WSSE(m[l1[1],i]^2*w[l1[1]], m[l1[1],i]*w[l1[1]], w[l1[1]]) for i in axes(m, 2)]
rss_high = [WSSE(m[l2[1],i]^2*w[l2[1]], m[l2[1],i]*w[l2[1]], w[l2[1]]) for i in axes(m, 2)]
med = median(z)
t1 = Int[]
t2 = Int[]
Expand Down Expand Up @@ -417,7 +420,7 @@ function landmarks(edges::Array{Int,2}, weights::Vector{Float64}, vweights::Vect
landmark_edges = Array{Int}(undef, Int(N*(N+1)/2), 3)
for i in 1:N
for j in i:N
landmark_edges[CGE.idx(N,i,j),:] = [i j wedges[i,j]]
landmark_edges[idx(N,i,j),:] = [i j wedges[i,j]]
end
end
landmark_edges = landmark_edges[landmark_edges[:,3] .>0,:]
Expand Down

0 comments on commit 25c1519

Please sign in to comment.