|
| 1 | +""" |
| 2 | + Coreset() |
| 3 | +
|
| 4 | +Coreset algorithm implementation, based on "Lucic, Mario & Bachem, |
| 5 | +Olivier & Krause, Andreas. (2015). Strong Coresets for Hard and Soft Bregman |
| 6 | +Clustering with Applications to Exponential Family Mixtures." |
| 7 | +
|
| 8 | +`Coreset` supports following arguments: |
| 9 | +- `m`: default 100, subsample size |
| 10 | +- `alg`: default `Lloyd()`, algorithm used to clusterize sample |
| 11 | +
|
| 12 | +It can be used directly in `kmeans` function |
| 13 | +
|
| 14 | +```julia |
| 15 | +X = rand(30, 100_000) # 100_000 random points in 30 dimensions |
| 16 | +
|
| 17 | +# 3 clusters, Coreset algorithm with default Lloyd algorithm and 100 subsamples |
| 18 | +kmeans(Coreset(), X, 3) |
| 19 | +
|
| 20 | +# 3 clusters, Coreset algorithm with Hamerly algorithm and 500 subsamples |
| 21 | +kmeans(Coreset(m = 500, alg = Hamerly()), X, 3) |
| 22 | +kmeans(Coreset(500, Hamerly()), X, 3) |
| 23 | +
|
| 24 | +# alternatively short form can be used for defining subsample size or algorithm only |
| 25 | +kmeans(Coreset(500), X, 3) # sample of the size 500, Lloyd clustering algorithm |
| 26 | +kmeans(Coreset(Hamerly()), X, 3) # sample of the size 100, Hamerly clustering algorithm |
| 27 | +``` |
| 28 | +""" |
| 29 | +struct Coreset{T <: AbstractKMeansAlg} <: AbstractKMeansAlg |
| 30 | + m::Int |
| 31 | + alg::T |
| 32 | +end |
| 33 | + |
| 34 | +Coreset(; m = 100, alg = Lloyd()) = Coreset(m, alg) |
| 35 | +Coreset(m::Int) = Coreset(m, Lloyd()) |
| 36 | +Coreset(alg::AbstractKMeansAlg) = Coreset(100, alg) |
| 37 | + |
| 38 | +function kmeans!(alg::Coreset, containers, X, k, weights; |
| 39 | + n_threads = Threads.nthreads(), |
| 40 | + k_init = "k-means++", max_iters = 300, |
| 41 | + tol = eltype(design_matrix)(1e-6), verbose = false, init = nothing) |
| 42 | + nrow, ncol = size(X) |
| 43 | + centroids = isnothing(init) ? smart_init(X, k, n_threads, init=k_init).centroids : deepcopy(init) |
| 44 | + |
| 45 | + T = eltype(X) |
| 46 | + # Steps 2-4 of the paper's algorithm 3 |
| 47 | + # We distribute points over the centers and calculate weights of each cluster |
| 48 | + @parallelize n_threads ncol chunk_fit(alg, containers, centroids, X, weights) |
| 49 | + |
| 50 | + # after this step, containers.centroids_new |
| 51 | + collect_containers(alg, containers, n_threads) |
| 52 | + |
| 53 | + # step 7 of the algorithm 3 |
| 54 | + @parallelize n_threads ncol chunk_update_sensitivity(alg, containers) |
| 55 | + |
| 56 | + # sample from containers.s |
| 57 | + coreset_ids = wsample(1:ncol, containers.s, alg.m) |
| 58 | + coreset = X[:, coreset_ids] |
| 59 | + # create new weights as 1/s[i] |
| 60 | + coreset_weights = one(T) ./ @view containers.s[coreset_ids] |
| 61 | + |
| 62 | + # run usual kmeans for new set with new weights. |
| 63 | + res = kmeans(alg.alg, coreset, k, coreset_weights, tol = tol, max_iters = max_iters, |
| 64 | + verbose = verbose, init = centroids, n_threads = n_threads) |
| 65 | + |
| 66 | + @parallelize n_threads ncol chunk_apply(alg, containers, res.centers, X, weights) |
| 67 | + |
| 68 | + totalcost = sum(containers.totalcost) |
| 69 | + |
| 70 | + return KmeansResult(res.centers, containers.labels, T[], Int[], T[], totalcost, res.iterations, res.converged) |
| 71 | +end |
| 72 | + |
| 73 | +function create_containers(alg::Coreset, X, k, nrow, ncol, n_threads) |
| 74 | + T = eltype(X) |
| 75 | + |
| 76 | + centroids_cnt = Vector{Vector{T}}(undef, n_threads) |
| 77 | + centroids_dist = Vector{Vector{T}}(undef, n_threads) |
| 78 | + |
| 79 | + # sensitivity |
| 80 | + |
| 81 | + for i in 1:n_threads |
| 82 | + centroids_cnt[i] = zeros(T, k) |
| 83 | + centroids_dist[i] = zeros(T, k) |
| 84 | + end |
| 85 | + |
| 86 | + labels = Vector{Int}(undef, ncol) |
| 87 | + s = Vector{T}(undef, ncol) |
| 88 | + |
| 89 | + # J is the same as $c_\phi$ in the paper. |
| 90 | + J = Vector{T}(undef, n_threads) |
| 91 | + |
| 92 | + alpha = 16 * (log(k) + 2) |
| 93 | + |
| 94 | + centroids_const = Vector{T}(undef, k) |
| 95 | + |
| 96 | + # total_sum_calculation |
| 97 | + totalcost = Vector{T}(undef, n_threads) |
| 98 | + |
| 99 | + return ( |
| 100 | + centroids_cnt = centroids_cnt, |
| 101 | + centroids_dist = centroids_dist, |
| 102 | + s = s, |
| 103 | + labels = labels, |
| 104 | + totalcost = totalcost, |
| 105 | + J = J, |
| 106 | + centroids_const = centroids_const, |
| 107 | + alpha = alpha |
| 108 | + ) |
| 109 | +end |
| 110 | + |
| 111 | +function chunk_fit(alg::Coreset, containers, centroids, X, weights, r, idx) |
| 112 | + centroids_cnt = containers.centroids_cnt[idx] |
| 113 | + centroids_dist = containers.centroids_dist[idx] |
| 114 | + labels = containers.labels |
| 115 | + s = containers.s |
| 116 | + T = eltype(X) |
| 117 | + |
| 118 | + J = zero(T) |
| 119 | + for i in r |
| 120 | + dist = distance(X, centroids, i, 1) |
| 121 | + label = 1 |
| 122 | + for j in 2:size(centroids, 2) |
| 123 | + new_dist = distance(X, centroids, i, j) |
| 124 | + |
| 125 | + # calculation of the closest center (steps 2-3 of the paper's algorithm 3) |
| 126 | + label = new_dist < dist ? j : label |
| 127 | + dist = new_dist < dist ? new_dist : dist |
| 128 | + end |
| 129 | + labels[i] = label |
| 130 | + |
| 131 | + # calculation of the $c_\phi$ (step 4) |
| 132 | + # Note: $d_A(x', B) = min_{b \in B} d_A(x', b)$ |
| 133 | + # Not exactly sure about whole `weights` thing, needs further investigation |
| 134 | + # for Nothing `weights` (default) it'll work as intendent |
| 135 | + centroids_cnt[label] += isnothing(weights) ? one(T) : weights[i] |
| 136 | + centroids_dist[label] += isnothing(weights) ? dist : weights[i] * dist |
| 137 | + J += dist |
| 138 | + |
| 139 | + # for now we write dist to sensitivity, update it later |
| 140 | + s[i] = dist |
| 141 | + end |
| 142 | + |
| 143 | + containers.J[idx] = J |
| 144 | +end |
| 145 | + |
| 146 | +function collect_containers(::Coreset, containers, n_threads) |
| 147 | + # Here we transform formula of the step 6 |
| 148 | + # By multiplying both sides of equation on $c_\phi / \alpha$ we obtain |
| 149 | + # $s(x) <- d_A(x, B) + 2 \sum d_A(x, B) / |B_i| + 4 c_\phi |\Chi| / (|B_i| * \alpha)$ |
| 150 | + # Taking into account that $c_\phi = 1/|\Chi| \sum d_A(x', B) = J / |\Chi|$ we get |
| 151 | + # $s(x) <- d_A(x, B) + 2 \sum d_A(x, B) / |B_i| + 4 J / \alpha * 1/ |B_i|$ |
| 152 | + |
| 153 | + alpha = containers.alpha |
| 154 | + centroids_const = containers.centroids_const |
| 155 | + |
| 156 | + centroids_cnt = containers.centroids_cnt[1] |
| 157 | + centroids_dist = containers.centroids_dist[1] |
| 158 | + J = containers.J[1] |
| 159 | + |
| 160 | + @inbounds for i in 2:n_threads |
| 161 | + centroids_cnt .+= containers.centroids_cnt[i] |
| 162 | + centroids_dist .+= containers.centroids_dist[i] |
| 163 | + J += containers.J[i] |
| 164 | + end |
| 165 | + |
| 166 | + J = 4 * J / alpha |
| 167 | + |
| 168 | + for i in 1:length(centroids_dist) |
| 169 | + centroids_const[i] = 2 * centroids_dist[i] / centroids_cnt[i] + |
| 170 | + J / centroids_cnt[i] |
| 171 | + end |
| 172 | +end |
| 173 | + |
| 174 | +function chunk_update_sensitivity(alg::Coreset, containers, r, idx) |
| 175 | + labels = containers.labels |
| 176 | + centroids_const = containers.centroids_const |
| 177 | + s = containers.s |
| 178 | + |
| 179 | + @inbounds for i in r |
| 180 | + s[i] += centroids_const[labels[i]] |
| 181 | + end |
| 182 | +end |
| 183 | + |
| 184 | +function chunk_apply(alg::Coreset, containers, centroids, X, weights, r, idx) |
| 185 | + centroids_cnt = containers.centroids_cnt[idx] |
| 186 | + centroids_dist = containers.centroids_dist[idx] |
| 187 | + labels = containers.labels |
| 188 | + T = eltype(X) |
| 189 | + |
| 190 | + J = zero(T) |
| 191 | + for i in r |
| 192 | + dist = distance(X, centroids, i, 1) |
| 193 | + label = 1 |
| 194 | + for j in 2:size(centroids, 2) |
| 195 | + new_dist = distance(X, centroids, i, j) |
| 196 | + |
| 197 | + # calculation of the closest center (steps 2-3 of the paper's algorithm 3) |
| 198 | + label = new_dist < dist ? j : label |
| 199 | + dist = new_dist < dist ? new_dist : dist |
| 200 | + end |
| 201 | + labels[i] = label |
| 202 | + J += isnothing(weights) ? dist : weights[i] * dist |
| 203 | + end |
| 204 | + |
| 205 | + containers.totalcost[idx] = J |
| 206 | +end |
0 commit comments