]> code.communitydata.science - cdsc_reddit.git/blob - visualization/tsne_vis.py
Some improvements to run affinity clustering on larger dataset and
[cdsc_reddit.git] / visualization / tsne_vis.py
1 import pyarrow
2 import altair as alt
3 alt.data_transformers.disable_max_rows()
4 alt.data_transformers.enable('default')
5 from sklearn.neighbors import NearestNeighbors
6 import pandas as pd
7 from numpy import random
8 import fire
9 import numpy as np
10
11 def base_plot(plot_data):
12     base = alt.Chart(plot_data).mark_text().encode(
13         alt.X('x',axis=alt.Axis(grid=False),scale=alt.Scale(domain=(-65,65))),
14         alt.Y('y',axis=alt.Axis(grid=False),scale=alt.Scale(domain=(-65,65))),
15         text='subreddit')
16
17     return base
18
19 def zoom_plot(plot_data):
20     chart = base_plot(plot_data)
21     chart = chart.encode(alt.Color(field='color',type='nominal',scale=alt.Scale(scheme='category10')))
22     chart = chart.interactive()
23     chart = chart.properties(width=1275,height=1000)
24
25     return chart
26
27 def viewport_plot(plot_data):
28     selector1 = alt.selection_interval(encodings=['x','y'],init={'x':(-65,65),'y':(-65,65)})
29     selectorx2 = alt.selection_interval(encodings=['x'],init={'x':(30,40)})
30     selectory2 = alt.selection_interval(encodings=['y'],init={'y':(-20,0)})
31
32     base = base_plot(plot_data)
33
34     viewport = base.mark_point(fillOpacity=0.2,opacity=0.2).encode(
35         alt.X('x',axis=alt.Axis(grid=False)),
36         alt.Y('y',axis=alt.Axis(grid=False)),
37     )
38
39     viewport = viewport.properties(width=600,height=400)
40
41     viewport1 = viewport.add_selection(selector1)
42
43     viewport2 = viewport.encode(
44         alt.X('x',axis=alt.Axis(grid=False),scale=alt.Scale(domain=selector1)),
45         alt.Y('y',axis=alt.Axis(grid=False),scale=alt.Scale(domain=selector1))
46     )
47
48     viewport2 = viewport2.add_selection(selectorx2)
49     viewport2 = viewport2.add_selection(selectory2)
50
51     sr = base.encode(alt.X('x',axis=alt.Axis(grid=False),scale=alt.Scale(domain=selectorx2)),
52                      alt.Y('y',axis=alt.Axis(grid=False),scale=alt.Scale(domain=selectory2))
53     )
54
55     sr = sr.encode(alt.Color(field='color',type='nominal',scale=alt.Scale(scheme='category10')))
56     sr = sr.properties(width=1275,height=600)
57
58
59     chart = (viewport1 | viewport2) & sr
60
61
62     return chart
63
64 def assign_cluster_colors(tsne_data, clusters, n_colors, n_neighbors = 4):
65     tsne_data = tsne_data.merge(clusters,on='subreddit')
66     
67     centroids = tsne_data.groupby('cluster').agg({'x':np.mean,'y':np.mean})
68
69     color_ids = np.arange(n_colors)
70
71     distances = np.empty(shape=(centroids.shape[0],centroids.shape[0]))
72
73     groups = tsne_data.groupby('cluster')
74     for centroid in centroids.itertuples():
75         c_dists = groups.apply(lambda r: min(np.sqrt(np.square(centroid.x - r.x) + np.square(centroid.y-r.y))))
76         distances[:,centroid.Index] = c_dists
77
78     # nbrs = NearestNeighbors(n_neighbors=n_neighbors).fit(centroids) 
79     # distances, indices = nbrs.kneighbors()
80
81     nbrs = NearestNeighbors(n_neighbors=n_neighbors,metric='precomputed').fit(distances) 
82     distances, indices = nbrs.kneighbors()
83
84     color_assignments = np.repeat(-1,len(centroids))
85
86     for i in range(len(centroids)):
87         knn = indices[i]
88         knn_colors = color_assignments[knn]
89         available_colors = color_ids[list(set(color_ids) - set(knn_colors))]
90
91         if(len(available_colors) > 0):
92             color_assignments[i] = available_colors[0]
93         else:
94             raise Exception("Can't color this many neighbors with this many colors")
95
96
97     centroids = centroids.reset_index()
98     colors = centroids.loc[:,['cluster']]
99     colors['color'] = color_assignments
100
101     tsne_data = tsne_data.merge(colors,on='cluster')
102     return(tsne_data)
103
104 def build_visualization(tsne_data, clusters, output):
105
106     tsne_data = pd.read_feather(tsne_data)
107     clusters = pd.read_feather(clusters)
108
109     tsne_data = assign_cluster_colors(tsne_data,clusters,10,8)
110
111     term_zoom_plot = zoom_plot(tsne_data)
112
113     term_zoom_plot.save(output)
114
115     term_viewport_plot = viewport_plot(tsne_data)
116
117     term_viewport_plot.save(output.replace(".html","_viewport.html"))
118
119 if __name__ == "__main__":
120     fire.Fire(build_visualization)
121
122 # commenter_data = pd.read_feather("tsne_author_fit.feather")
123 # clusters = pd.read_feather('author_3000_clusters.feather')
124 # commenter_data = assign_cluster_colors(commenter_data,clusters,10,8)
125 # commenter_zoom_plot = zoom_plot(commenter_data)
126 # commenter_viewport_plot = viewport_plot(commenter_data)
127 # commenter_zoom_plot.save("subreddit_commenters_tsne_3000.html")
128 # commenter_viewport_plot.save("subreddit_commenters_tsne_3000_viewport.html")
129
130 # chart = chart.properties(width=10000,height=10000)
131 # chart.save("test_tsne_whole.svg")

Community Data Science Collective || Want to submit a patch?