Skip to content

Commit

Permalink
Promote FITSNE from experimental (#4361)
Browse files Browse the repository at this point in the history
Closes #3805

Authors:
  - Micka (https://github.com/lowener)

Approvers:
  - Corey J. Nolet (https://github.com/cjnolet)

URL: #4361
  • Loading branch information
lowener authored Nov 18, 2021
1 parent 3cf9778 commit cd6fb7f
Showing 1 changed file with 3 additions and 8 deletions.
11 changes: 3 additions & 8 deletions python/cuml/manifold/t_sne.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,8 @@ class TSNE(Base,
0.8. (Barnes-Hut only.)
learning_rate_method : str 'adaptive', 'none' or None (default 'adaptive')
Either adaptive or None. 'adaptive' tunes the learning rate, early
exaggeration and perplexity automatically based on input size.
exaggeration, perplexity and n_neighbors automatically based on
input size.
n_neighbors : int (default 90)
The number of datapoints you want to use in the
attractive forces. Smaller values are better for preserving
Expand Down Expand Up @@ -482,11 +483,6 @@ class TSNE(Base,
if self.method == 'barnes_hut':
algo = TSNE_ALGORITHM.BARNES_HUT
elif self.method == 'fft':
warnings.warn("Method 'fft' is experimental and may be " +
"unstable. If you find this implementation is not" +
" behaving as intended, please consider using one" +
" of the other methods, such as 'barnes_hut' or" +
" 'exact'")
algo = TSNE_ALGORITHM.FFT
elif self.method == 'exact':
algo = TSNE_ALGORITHM.EXACT
Expand Down Expand Up @@ -529,8 +525,7 @@ class TSNE(Base,
free(params)

self._kl_divergence_ = kl_divergence
if self.verbose:
print("[t-SNE] KL divergence: {}".format(kl_divergence))
logger.debug("[t-SNE] KL divergence: {}".format(kl_divergence))
return self

@generate_docstring(convert_dtype_cast='np.float32',
Expand Down

0 comments on commit cd6fb7f

Please sign in to comment.