3 from pathlib import Path
 
   5 selection_data="/gscratch/comdata/output/reddit_clustering/subreddit_comment_authors-tf_10k_LSI/hdbscan/selection_data.csv"
 
   7 outpath = 'test_best.feather'
 
   8 min_clusters=50; max_isolates=5000; min_cluster_size=2
 
  10 # pick the best clustering according to silhouette score subject to contraints
 
  11 def pick_best_clustering(selection_data, output, min_clusters, max_isolates, min_cluster_size):
 
  12     df = pd.read_csv(selection_data,index_col=0)
 
  13     df = df.sort_values("silhouette_score",ascending=False)
 
  15     # not sure I fixed the bug underlying this fully or not.
 
  16     df['n_isolates_str'] = df.n_isolates.str.strip("[]")
 
  17     df['n_isolates_0'] = df['n_isolates_str'].apply(lambda l: len(l) == 0)
 
  18     df.loc[df.n_isolates_0,'n_isolates'] = 0
 
  19     df.loc[~df.n_isolates_0,'n_isolates'] = df.loc[~df.n_isolates_0].n_isolates_str.apply(lambda l: int(l))
 
  21     best_cluster = df[(df.n_isolates <= max_isolates)&(df.n_clusters >= min_clusters)&(df.min_cluster_size==min_cluster_size)].iloc[df.shape[1]]
 
  23     print(best_cluster.to_dict())
 
  24     best_path = Path(best_cluster.outpath) / (str(best_cluster['name']) + ".feather")
 
  25     shutil.copy(best_path,output)
 
  27 if __name__ == "__main__":
 
  28     fire.Fire(pick_best_clustering)