diff --git a/lib/scholar/manifold/trimap.ex b/lib/scholar/manifold/trimap.ex index abbaa425..d6ab48f2 100644 --- a/lib/scholar/manifold/trimap.ex +++ b/lib/scholar/manifold/trimap.ex @@ -133,10 +133,10 @@ defmodule Scholar.Manifold.Trimap do @opts_schema NimbleOptions.new!(opts_schema) defnp tempered_log(x, t) do - if Nx.abs(t - 1) < 1.0e-5 do + if Nx.abs(t - 1.0) < 1.0e-5 do Nx.log(x) else - (x ** (1 - t) - 1) * (1 / (1 - t)) + 1.0 / (1.0 - t) * (x ** (1.0 - t) - 1.0) end end @@ -195,7 +195,7 @@ defmodule Scholar.Manifold.Trimap do {samples, key, _, _, _} = while {samples, key, discard, rejects, i}, Nx.any(discard) do {new_samples, key} = Nx.Random.randint(key, 0, opts[:maxval], shape: {elem(shape, 1)}) - discard = in1d(new_samples, rejects[i]) + discard = in1d(new_samples, rejects[i]) or in1d(new_samples, samples) samples = Nx.select(discard, samples, new_samples) {samples, key, in1d(samples, rejects[i]), rejects, i} end @@ -245,9 +245,9 @@ defmodule Scholar.Manifold.Trimap do sim = triplets[[.., 1]] out = triplets[[.., 2]] - p_sim = handle_dist(inputs[anc], inputs[sim], opts) / (sig[anc] * sig[sim]) + p_sim = -(handle_dist(inputs[anc], inputs[sim], opts) ** 2) / (sig[anc] * sig[sim]) - p_out = handle_dist(inputs[anc], inputs[out], opts) / (sig[anc] * sig[out]) + p_out = -(handle_dist(inputs[anc], inputs[out], opts) ** 2) / (sig[anc] * sig[out]) flip = p_sim < p_out weights = p_sim - p_out @@ -269,7 +269,7 @@ defmodule Scholar.Manifold.Trimap do hits = Nx.flatten(neighbors) distances = - handle_dist(inputs[anchors], inputs[hits], opts) |> Nx.reshape({num_points, :auto}) + (handle_dist(inputs[anchors], inputs[hits], opts) ** 2) |> Nx.reshape({num_points, :auto}) sigmas = Nx.max(Nx.mean(Nx.sqrt(distances[[.., 3..5]]), axes: [1]), 1.0e-10) @@ -282,27 +282,28 @@ defmodule Scholar.Manifold.Trimap do end defnp find_triplet_weights(inputs, triplets, neighbors, sigmas, distances, opts \\ []) do - {num_points, num_inliners} = Nx.shape(neighbors) + {num_points, num_inliers} = Nx.shape(neighbors) p_sim = -Nx.flatten(distances) - num_outliers = div(Nx.axis_size(triplets, 0), num_points * num_inliners) + num_outliers = div(Nx.axis_size(triplets, 0), num_points * num_inliers) p_sim = - Nx.tile(Nx.reshape(p_sim, {num_points, num_inliners}), [1, num_outliers]) |> Nx.flatten() + Nx.tile(Nx.reshape(p_sim, {num_points, num_inliers}), [1, num_outliers]) |> Nx.flatten() - out_distances = handle_dist(inputs[triplets[[.., 0]]], inputs[triplets[[.., 2]]], opts) + out_distances = handle_dist(inputs[triplets[[.., 0]]], inputs[triplets[[.., 2]]], opts) ** 2 p_out = -out_distances / (sigmas[triplets[[.., 0]]] * sigmas[triplets[[.., 2]]]) p_sim - p_out end defnp generate_triplets(key, inputs, opts \\ []) do - num_inliners = opts[:num_inliers] + num_inliers = opts[:num_inliers] num_random = opts[:num_random] weight_temp = opts[:weight_temp] num_points = Nx.axis_size(inputs, 0) - num_extra = min(num_inliners + 50, num_points) + + num_extra = min(num_inliers + 50, num_points) neighbors = case opts[:knn_algorithm] do @@ -364,13 +365,13 @@ defmodule Scholar.Manifold.Trimap do {knn_distances, neighbors, sigmas} = find_scaled_neighbors(inputs, neighbors, opts) - neighbors = neighbors[[.., 0..num_inliners]] - knn_distances = knn_distances[[.., 0..num_inliners]] + neighbors = neighbors[[.., 0..num_inliers]] + knn_distances = knn_distances[[.., 0..num_inliers]] {triplets, key} = sample_knn_triplets(key, neighbors, num_outliers: opts[:num_outliers], - num_inliers: num_inliners, + num_inliers: num_inliers, num_points: num_points ) @@ -378,15 +379,15 @@ defmodule Scholar.Manifold.Trimap do find_triplet_weights( inputs, triplets, - neighbors[[.., 1..num_inliners]], + neighbors[[.., 1..num_inliers]], sigmas, - knn_distances[[.., 1..num_inliners]], + knn_distances[[.., 1..num_inliers]], opts ) flip = weights < 0 anchors = triplets[[.., 0]] |> Nx.reshape({:auto, 1}) - pairs = triplets[[.., 1..2]] + pairs = triplets[[.., 1..-1//1]] pairs = Nx.select( @@ -446,7 +447,7 @@ defmodule Scholar.Manifold.Trimap do {loss, num_violated} end - defn trimap_loss(embedding, triplets, weights) do + defn trimap_loss({embedding, triplets, weights}) do {loss, _} = trimap_metrics(embedding, triplets, weights) loss end @@ -499,21 +500,18 @@ defmodule Scholar.Manifold.Trimap do {triplets, weights, key, applied_pca?} = case triplets do {} -> - inputs = + {inputs, applied_pca} = if num_components > @dim_pca do inputs = inputs - Nx.mean(inputs, axes: [0]) {u, s, vt} = Nx.LinAlg.SVD.svd(inputs, full_matrices: false) inputs = Nx.dot(u[[.., 0..@dim_pca]] * s[0..@dim_pca], vt[[0..@dim_pca, ..]]) - - inputs = inputs - Nx.reduce_min(inputs) - inputs = inputs / Nx.reduce_max(inputs) - inputs - Nx.mean(inputs, axes: [0]) + {inputs, Nx.u8(1)} else - inputs + {inputs, Nx.u8(0)} end {triplets, weights, key} = generate_triplets(key, inputs, opts) - {triplets, weights, key, Nx.u8(1)} + {triplets, weights, key, applied_pca} _ -> {triplets, weights, key, Nx.u8(0)} @@ -535,7 +533,10 @@ defmodule Scholar.Manifold.Trimap do opts[:init_embedding_type] == 1 -> {random_embedding, _key} = - Nx.Random.normal(key, shape: {num_points, opts[:num_components]}) + Nx.Random.normal(key, + shape: {num_points, opts[:num_components]}, + type: to_float_type(inputs) + ) random_embedding * @init_scale end @@ -553,10 +554,12 @@ defmodule Scholar.Manifold.Trimap do {embeddings, _} = while {embeddings, {vel, gain, lr, triplets, weights, i = Nx.s64(0)}}, i < opts[:num_iters] do - gamma = if i < @switch_iter, do: @final_momentum, else: @init_momentum - grad = trimap_loss(embeddings + gamma * vel, triplets, weights) + gamma = if i < @switch_iter, do: @init_momentum, else: @final_momentum + + gradient = + grad(embeddings + gamma * vel, fn x -> trimap_loss({x, triplets, weights}) end) - {embeddings, vel, gain} = update_embedding_dbd(embeddings, grad, vel, gain, lr, i) + {embeddings, vel, gain} = update_embedding_dbd(embeddings, gradient, vel, gain, lr, i) {embeddings, {vel, gain, lr, triplets, weights, i + 1}} end diff --git a/notebooks/files/mammoth.png b/notebooks/files/mammoth.png new file mode 100644 index 00000000..7e262186 Binary files /dev/null and b/notebooks/files/mammoth.png differ diff --git a/notebooks/manifold_learning.livemd b/notebooks/manifold_learning.livemd new file mode 100644 index 00000000..4617b33d --- /dev/null +++ b/notebooks/manifold_learning.livemd @@ -0,0 +1,149 @@ + + +# Manifold Learning + +```elixir +Mix.install([ + {:scholar, github: "elixir-nx/scholar"}, + {:explorer, "~> 0.8.2", override: true}, + {:exla, "~> 0.7.2"}, + {:nx, "~> 0.7.2"}, + {:req, "~> 0.4.14"}, + {:kino_vega_lite, "~> 0.1.11"}, + {:kino, "~> 0.12.3"}, + {:kino_explorer, "~> 0.1.18"}, + {:tucan, "~> 0.3.1"} +]) +``` + +## Setup + +We will use `Explorer` in this notebook, so let's define an alias for its main module DataFrame: + +```elixir +require Explorer.DataFrame, as: DF +``` + +And let's configure `EXLA` as our default backend (where our tensors are stored) and compiler (which compiles Scholar code) across the notebook and all branched sections: + +```elixir +Nx.global_default_backend(EXLA.Backend) +Nx.Defn.global_default_options(compiler: EXLA) +``` + +## Testing Manifold Learning Functionalities + +In this notebook we test how manifold learning algorithms works and what results we can get from them. + +First, let's fetch the dataset that we experiment on. The data represents 3D coordinates of a mammoth. Below we include a figure of original dataset. + +![](files/mammoth.png) + +```elixir +source = "https://raw.githubusercontent.com/MNoichl/UMAP-examples-mammoth-/master/mammoth_a.csv" + +data = Req.get!(source).body + +df = DF.load_csv!(data) +``` + +Now, convert the dataframe into tensor, so we can manipulate the data using `Scholar`. + +```elixir +tensor_data = Nx.stack(df, axis: 1) +``` + +Since there is almost 1 million data points and they are sorted, we shuffle dataset and then use only the part of the dataset. + + + +## Trimap + +We start with Trimap. It's a manifold learning algorithm that is based of nearest neighbors. It preserves the global structure of dataset, but it doesn't handle in a poroper way the local structure. Let's look what will be the result of the Trimap on mammoth dataset. + +```elixir +{tensor_data, key} = Nx.Random.shuffle(Nx.Random.key(42), tensor_data) + +trimap_res = + Scholar.Manifold.Trimap.transform(tensor_data[[0..10000, ..]], + key: Nx.Random.key(55), + num_components: 2, + num_inliers: 12, + num_outliers: 4, + weight_temp: 0.5, + learning_rate: 0.1, + metric: :squared_euclidean + ) +``` + +Now, lets plot the results of Trimap algorithm + +```elixir +coords = [ + x: trimap_res[[.., 0]] |> Nx.to_flat_list(), + y: trimap_res[[.., 1]] |> Nx.to_flat_list() +] + +Tucan.layers([ + Tucan.scatter(coords, "x", "y", point_size: 1) +]) +|> Tucan.set_size(300, 300) +|> Tucan.set_title( + "Mammoth dataset with reduced dimensionality using Trimap", + offset: 25 +) +``` + +For sure, we can recognize mammoth on this picture. Trimap indeed preserved the global structure of data. Result is similar to the projection of 3D mammoth to the YZ plane. Now, plot this projection and compare these two plots. + +```elixir +coords = [ + x: tensor_data[[0..10000, 1]] |> Nx.to_flat_list(), + y: tensor_data[[0..10000, 2]] |> Nx.to_flat_list() +] + +Tucan.layers([ + Tucan.scatter(coords, "x", "y", point_size: 1) +]) +|> Tucan.set_size(300, 300) +|> Tucan.set_title( + "Mammoth data set with reduced dimensionality using trimap", + offset: 25 +) +``` + +These two plots are similiar but there are some important differences. Even if the second figure seems "prettier" it is less informative than the result of trimap. On the first figure, we can spot two tusks while one the second one they overlap and we see only one. Similarly, legs overlay on the first plot and one the second one they are spread and don't intersect with each other. + +## t-SNE + +Now, lets try different algorithm: t-SNE + +```elixir +tsne_res = + Scholar.Manifold.TSNE.fit(tensor_data[[0..2000, ..]], + key: Nx.Random.key(55), + num_components: 2, + perplexity: 125, + exaggeration: 10.0, + learning_rate: 500, + metric: :squared_euclidean + ) +``` + +```elixir +coords = [ + x: tsne_res[[.., 0]] |> Nx.to_flat_list(), + y: tsne_res[[.., 1]] |> Nx.to_flat_list() +] + +Tucan.layers([ + Tucan.scatter(coords, "x", "y", point_size: 1) +]) +|> Tucan.set_size(300, 300) +|> Tucan.set_title( + "Mammoth dataset with reduced dimensionality using Trimap", + offset: 25 +) +``` + +As we see, t-SNE gives completely different results than trimap. This is because t-SNE has a completely different mathematical background of computation. Also t-SNE is slower algorithm, so it can't be used on such big datasets as trimap. However, t-SNE preserves some features of mammoth like small tusks, feets, and corp. You can experiment with parameter *perplexity* which can substantially change the output of the algorithm. diff --git a/test/scholar/manifold/trimap_test.exs b/test/scholar/manifold/trimap_test.exs index cffe1b92..5cc42079 100644 --- a/test/scholar/manifold/trimap_test.exs +++ b/test/scholar/manifold/trimap_test.exs @@ -5,7 +5,7 @@ defmodule Scholar.Manifold.TrimapTest do describe "transform" do test "non default num_inliers and num_outliers" do - x = Nx.iota({5, 6}) + x = Nx.iota({10, 6}) key = Nx.Random.key(42) res = @@ -18,18 +18,23 @@ defmodule Scholar.Manifold.TrimapTest do expected = Nx.tensor([ - [3.3822429180145264, 3.392242908477783], - [3.4422430992126465, 3.4522430896759033], - [3.5022432804107666, 3.5122432708740234], - [3.5622432231903076, 3.5722432136535645], - [3.6222431659698486, 3.6322431564331055] + [113.99239349365234, 164.60028076171875], + [111.32695007324219, 164.60028076171875], + [107.72736358642578, 164.60028076171875], + [94.22712707519531, 164.60028076171875], + [77.70183563232422, 164.60028076171875], + [73.04618835449219, 164.60028076171875], + [71.35726165771484, 164.60028076171875], + [61.91230773925781, 164.60028076171875], + [58.640655517578125, 164.60028076171875], + [56.583343505859375, 164.60028076171875] ]) assert_all_close(res, expected, atol: 1.0e-3, rtol: 1.0e-3) end test "non default num_random, weight_temp, and learning_rate" do - x = Nx.iota({5, 6}) + x = Nx.iota({10, 6}) key = Nx.Random.key(42) res = @@ -45,18 +50,23 @@ defmodule Scholar.Manifold.TrimapTest do expected = Nx.tensor([ - [3.352140426635742, 3.362140417098999], - [3.412140369415283, 3.42214035987854], - [3.472140312194824, 3.482140302658081], - [3.5321402549743652, 3.542140245437622], - [3.5921404361724854, 3.602140426635742] + [112.58768463134766, 164.60028076171875], + [108.42720794677734, 164.60028076171875], + [105.17445373535156, 164.60028076171875], + [94.98436737060547, 164.60028076171875], + [79.27961730957031, 164.60028076171875], + [70.53276824951172, 164.60028076171875], + [65.88448333740234, 164.60028076171875], + [55.379486083984375, 164.60028076171875], + [50.87002182006836, 164.60028076171875], + [49.01177215576172, 164.60028076171875] ]) assert_all_close(res, expected, atol: 1.0e-3, rtol: 1.0e-3) end test "non default num_iters and init_embedding_type" do - x = Nx.iota({5, 6}) + x = Nx.iota({10, 6}) key = Nx.Random.key(42) res = @@ -71,18 +81,23 @@ defmodule Scholar.Manifold.TrimapTest do expected = Nx.tensor([ - [1.4574551582336426, 1.443753719329834], - [1.4331351518630981, 1.4537053108215332], - [1.4543260335922241, 1.4485278129577637], - [1.4427212476730347, 1.4643783569335938], - [1.449319839477539, 1.455613374710083] + [20.231670379638672, 20.449552536010742], + [19.281051635742188, 19.500879287719727], + [18.06662368774414, 18.24373435974121], + [13.929012298583984, 14.01369857788086], + [9.131621360778809, 9.092915534973145], + [7.396491050720215, 7.3155999183654785], + [6.82077169418335, 6.664179801940918], + [3.6580913066864014, 3.518498182296753], + [2.479952096939087, 2.3532018661499023], + [1.4492647647857666, 1.4677170515060425] ]) assert_all_close(res, expected, atol: 1.0e-1, rtol: 1.0e-1) end test "passed precomputed triplets and weights" do - x = Nx.iota({5, 6}) + x = Nx.iota({10, 6}) key = Nx.Random.key(42) triplets = Nx.tensor([[0, 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, 0], [4, 0, 1]]) weights = Nx.tensor([1.0, 1.0, 1.0, 1.0, 1.0]) @@ -99,18 +114,23 @@ defmodule Scholar.Manifold.TrimapTest do expected = Nx.tensor([ - [2.822676420211792, 3.116623878479004], - [2.9696502685546875, 3.116623878479004], - [3.116623878479004, 3.116623878479004], - [3.263594388961792, 3.116623878479004], - [3.4105637073516846, 3.116623878479004] + [93.04278564453125, 164.60028076171875], + [92.46737670898438, 164.60028076171875], + [82.51052856445312, 164.60028076171875], + [20.507057189941406, 164.60028076171875], + [3.474262237548828, 164.60028076171875], + [164.52679443359375, 164.60028076171875], + [164.37982177734375, 164.60028076171875], + [164.23284912109375, 164.60028076171875], + [164.0858917236328, 164.60028076171875], + [163.9389190673828, 164.60028076171875] ]) assert_all_close(res, expected, atol: 1.0e-3, rtol: 1.0e-3) end test "passed initial_embedding" do - x = Nx.iota({5, 6}) + x = Nx.iota({10, 6}) key = Nx.Random.key(42) init_embeddings = @@ -119,7 +139,12 @@ defmodule Scholar.Manifold.TrimapTest do [2.0, 2.0], [3.0, 3.0], [4.0, 4.0], - [5.0, 5.0] + [5.0, 5.0], + [6.0, 6.0], + [7.0, 7.0], + [8.0, 8.0], + [9.0, 9.0], + [10.0, 10.0] ]) res = @@ -133,18 +158,23 @@ defmodule Scholar.Manifold.TrimapTest do expected = Nx.tensor([ - [4.396871089935303, 4.396871089935303], - [5.396846771240234, 5.396846771240234], - [6.396846771240234, 6.396846771240234], - [7.396847724914551, 7.396847724914551], - [8.39684772491455, 8.39684772491455] + [55.56947326660156, 55.56947326660156], + [58.321434020996094, 58.321434020996094], + [60.73122787475586, 60.73122787475586], + [73.6160888671875, 73.6160888671875], + [88.54448699951172, 88.54448699951172], + [92.61587524414062, 92.61587524414062], + [93.69548034667969, 93.69548034667969], + [103.44023132324219, 103.44023132324219], + [105.14888000488281, 105.14888000488281], + [107.56280517578125, 107.56280517578125] ]) assert_all_close(res, expected, atol: 1.0e-3, rtol: 1.0e-3) end - test "metric set to manhattan" do - x = Nx.iota({5, 6}) + test "metric set to euclidean" do + x = Nx.iota({10, 6}) key = Nx.Random.key(42) res = @@ -153,17 +183,21 @@ defmodule Scholar.Manifold.TrimapTest do key: key, num_inliers: 3, num_outliers: 1, - metric: :manhattan, - knn_algorithm: :nndescent + knn_algorithm: :brute ) expected = Nx.tensor([ - [3.3887996673583984, 3.3987996578216553], - [3.4487998485565186, 3.4587998390197754], - [3.5087997913360596, 3.5187997817993164], - [3.5687997341156006, 3.5787997245788574], - [3.6287999153137207, 3.6387999057769775] + [113.99239349365234, 164.60028076171875], + [111.32695007324219, 164.60028076171875], + [107.72736358642578, 164.60028076171875], + [94.22712707519531, 164.60028076171875], + [77.70183563232422, 164.60028076171875], + [73.04618835449219, 164.60028076171875], + [71.35726165771484, 164.60028076171875], + [61.91230773925781, 164.60028076171875], + [58.640655517578125, 164.60028076171875], + [56.583343505859375, 164.60028076171875] ]) assert_all_close(res, expected, atol: 1.0e-3, rtol: 1.0e-3)