+ self.result = clustering_result(outpath=str(self.outpath.resolve()),
+ silhouette_score=self.score,
+ name=self.name,
+ n_clusters=self.n_clusters,
+ n_isolates=self.n_isolates,
+ silhouette_samples = str(self.silsampout.resolve())
+ )
+ return self.result
+
+ def silhouette(self):
+ isolates = self.clustering.labels_ == -1
+ scoremat = self.mat[~isolates][:,~isolates]
+ score = silhouette_score(scoremat, self.clustering.labels_[~isolates], metric='precomputed')
+ silhouette_samp = silhouette_samples(self.mat, self.clustering.labels_, metric='precomputed')
+ silhouette_samp = pd.DataFrame({'subreddit':self.subreddits,'score':silhouette_samp})
+ self.outpath.mkdir(parents=True, exist_ok=True)
+ self.silsampout = self.outpath / ("silhouette_samples-" + self.name + ".feather")
+ silhouette_samp.to_feather(self.silsampout)
+ return score
+
+ def read_distance_mat(self, similarities, use_threads=True):
+ df = pd.read_feather(similarities, use_threads=use_threads)
+ mat = np.array(df.drop('_subreddit',1))
+ n = mat.shape[0]
+ mat[range(n),range(n)] = 1
+ return (df._subreddit,1-mat)
+
+ def process_clustering(self, clustering, subreddits):
+
+ if hasattr(clustering,'n_iter_'):
+ print(f"clustering took {clustering.n_iter_} iterations")
+
+ clusters = clustering.labels_
+ self.n_clusters = len(set(clusters))
+
+ print(f"found {self.n_clusters} clusters")
+
+ cluster_data = pd.DataFrame({'subreddit': subreddits,'cluster':clustering.labels_})
+
+ cluster_sizes = cluster_data.groupby("cluster").count().reset_index()
+ print(f"the largest cluster has {cluster_sizes.loc[cluster_sizes.cluster!=-1].subreddit.max()} members")
+
+ print(f"the median cluster has {cluster_sizes.subreddit.median()} members")
+ n_isolates1 = (cluster_sizes.subreddit==1).sum()
+
+ print(f"{n_isolates1} clusters have 1 member")
+
+ n_isolates2 = (cluster_sizes.loc[cluster_sizes.cluster==-1,['subreddit']])
+
+ print(f"{n_isolates2} subreddits are in cluster -1",flush=True)
+
+ if n_isolates1 == 0:
+ self.n_isolates = n_isolates2
+ else:
+ self.n_isolates = n_isolates1
+
+ return cluster_data
+
+
+class lsi_mixin():
+ def set_lsi_dims(self, lsi_dims):
+ self.lsi_dims = lsi_dims