diff --git a/README.md b/README.md index 874ed303..c39cdbee 100644 --- a/README.md +++ b/README.md @@ -4,13 +4,13 @@ [![pypi](https://img.shields.io/pypi/v/convokit.svg)](https://pypi.org/pypi/convokit/) -[![py\_versions](https://img.shields.io/badge/python-3.7%2B-blue)](https://pypi.org/pypi/convokit/) +[![py\_versions](https://img.shields.io/badge/python-3.8%2B-blue)](https://pypi.org/pypi/convokit/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![license](https://img.shields.io/badge/license-MIT-green)](https://github.com/CornellNLP/ConvoKit/blob/master/LICENSE.md) [![Slack Community](https://img.shields.io/static/v1?logo=slack&style=flat&color=red&label=slack&message=community)](https://join.slack.com/t/convokit/shared_invite/zt-1axq34qrp-1hDXQrvSXClIbJOqw4S03Q) -This toolkit contains tools to extract conversational features and analyze social phenomena in conversations, using a [single unified interface](https://convokit.cornell.edu/documentation/architecture.html) inspired by (and compatible with) scikit-learn. Several large [conversational datasets](https://github.com/CornellNLP/ConvoKit#datasets) are included together with scripts exemplifying the use of the toolkit on these datasets. The latest version is [2.5.3](https://github.com/CornellNLP/ConvoKit/releases/tag/v2.5.2) (released 16 Jan 2022); follow the [project on GitHub](https://github.com/CornellNLP/ConvoKit) to keep track of updates. +This toolkit contains tools to extract conversational features and analyze social phenomena in conversations, using a [single unified interface](https://convokit.cornell.edu/documentation/architecture.html) inspired by (and compatible with) scikit-learn. Several large [conversational datasets](https://github.com/CornellNLP/ConvoKit#datasets) are included together with scripts exemplifying the use of the toolkit on these datasets. The latest version is [3.0.0](https://github.com/CornellNLP/ConvoKit/releases/tag/v3.0.0) (released July 17, 2023); follow the [project on GitHub](https://github.com/CornellNLP/ConvoKit) to keep track of updates. Read our [documentation](https://convokit.cornell.edu/documentation) or try ConvoKit in our [interactive tutorial](https://colab.research.google.com/github/CornellNLP/ConvoKit/blob/master/examples/Introduction_to_ConvoKit.ipynb). diff --git a/convokit/convokitConfig.py b/convokit/convokitConfig.py index 6d11d8dc..37f3ac37 100644 --- a/convokit/convokitConfig.py +++ b/convokit/convokitConfig.py @@ -4,13 +4,13 @@ DEFAULT_CONFIG_CONTENTS = ( - "# Default Storage Parameters\n" + "# Default Backend Parameters\n" "db_host: localhost:27017\n" "data_directory: ~/.convokit/saved-corpora\n" - "default_storage_mode: mem" + "default_backend: mem" ) -ENV_VARS = {"db_host": "CONVOKIT_DB_HOST", "default_storage_mode": "CONVOKIT_STORAGE_MODE"} +ENV_VARS = {"db_host": "CONVOKIT_DB_HOST", "default_backend": "CONVOKIT_BACKEND"} class ConvoKitConfig: @@ -52,5 +52,5 @@ def data_directory(self): return self.config_contents.get("data_directory", "~/.convokit/saved-corpora") @property - def default_storage_mode(self): - return self._get_config_from_env_or_file("default_storage_mode", "mem") + def default_backend(self): + return self._get_config_from_env_or_file("default_backend", "mem") diff --git a/convokit/coordination/coordination.py b/convokit/coordination/coordination.py index 1fb712b1..1d524479 100644 --- a/convokit/coordination/coordination.py +++ b/convokit/coordination/coordination.py @@ -1,5 +1,6 @@ from collections import defaultdict from typing import Callable, Tuple, List, Dict, Optional, Collection, Union +import copy import pkg_resources @@ -108,11 +109,22 @@ def transform(self, corpus: Corpus) -> Corpus: utterance_thresh_func=self.utterance_thresh_func, ) + # Keep record of all score update for all (speakers, target) pairs to avoid redundant operations + todo = {} + for (speaker, target), score in pair_scores.items(): if self.coordination_attribute_name not in speaker.meta: speaker.meta[self.coordination_attribute_name] = {} - speaker.meta[self.coordination_attribute_name][target.id] = score - + key = (speaker, target.id) + todo.update({key: score}) + + for key, score in todo.items(): + speaker = key[0] + target = key[1] + # For avoiding mutability for the sake of DB corpus + temp_dict = copy.deepcopy(speaker.meta[self.coordination_attribute_name]) + temp_dict[target] = score + speaker.meta[self.coordination_attribute_name] = temp_dict assert isinstance(speaker, Speaker) return corpus diff --git a/convokit/expected_context_framework/demos/parliament_demo.ipynb b/convokit/expected_context_framework/demos/parliament_demo.ipynb index f7ee2390..23e31909 100644 --- a/convokit/expected_context_framework/demos/parliament_demo.ipynb +++ b/convokit/expected_context_framework/demos/parliament_demo.ipynb @@ -20,8 +20,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -74,7 +73,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# PARL_CORPUS_PATH = download('parliament-corpus', data_dir=DATA_DIR)\n", @@ -129,7 +128,7 @@ }, "outputs": [], "source": [ - "parl_corpus.load_info(\"utterance\", [\"arcs\", \"q_arcs\"])" + "parl_corpus.load_info('utterance',['arcs','q_arcs'])" ] }, { @@ -156,10 +155,7 @@ }, "outputs": [], "source": [ - "from convokit.expected_context_framework import (\n", - " ColNormedTfidfTransformer,\n", - " ExpectedContextModelTransformer,\n", - ")" + "from convokit.expected_context_framework import ColNormedTfidfTransformer, ExpectedContextModelTransformer" ] }, { @@ -187,12 +183,9 @@ } ], "source": [ - "q_tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"q_arcs\", output_field=\"q_arc_tfidf\", min_df=100, max_df=0.1, binary=False\n", - ")\n", - "q_tfidf_obj.fit(\n", - " parl_corpus, selector=lambda x: x.meta[\"is_question\"] and x.meta[\"pair_has_features\"]\n", - ")\n", + "q_tfidf_obj = ColNormedTfidfTransformer(input_field='q_arcs', output_field='q_arc_tfidf',\n", + " min_df=100, max_df=.1, binary=False)\n", + "q_tfidf_obj.fit(parl_corpus, selector=lambda x: x.meta['is_question'] and x.meta['pair_has_features'])\n", "print(len(q_tfidf_obj.get_vocabulary()))" ] }, @@ -210,10 +203,9 @@ } ], "source": [ - "a_tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"arcs\", output_field=\"arc_tfidf\", min_df=100, max_df=0.1, binary=False\n", - ")\n", - "a_tfidf_obj.fit(parl_corpus, selector=lambda x: x.meta[\"is_answer\"] and x.meta[\"pair_has_features\"])\n", + "a_tfidf_obj = ColNormedTfidfTransformer(input_field='arcs', output_field='arc_tfidf',\n", + " min_df=100, max_df=.1, binary=False)\n", + "a_tfidf_obj.fit(parl_corpus, selector=lambda x: x.meta['is_answer'] and x.meta['pair_has_features'])\n", "print(len(a_tfidf_obj.get_vocabulary()))" ] }, @@ -225,12 +217,8 @@ }, "outputs": [], "source": [ - "_ = q_tfidf_obj.transform(\n", - " parl_corpus, selector=lambda x: x.meta[\"is_question\"] and x.meta[\"pair_has_features\"]\n", - ")\n", - "_ = a_tfidf_obj.transform(\n", - " parl_corpus, selector=lambda x: x.meta[\"is_answer\"] and x.meta[\"pair_has_features\"]\n", - ")" + "_ = q_tfidf_obj.transform(parl_corpus, selector=lambda x: x.meta['is_question'] and x.meta['pair_has_features'])\n", + "_ = a_tfidf_obj.transform(parl_corpus, selector=lambda x: x.meta['is_answer'] and x.meta['pair_has_features'])" ] }, { @@ -262,15 +250,10 @@ "outputs": [], "source": [ "q_ec = ExpectedContextModelTransformer(\n", - " context_field=\"next_id\",\n", - " output_prefix=\"fw\",\n", - " vect_field=\"q_arc_tfidf\",\n", - " context_vect_field=\"arc_tfidf\",\n", - " n_svd_dims=25,\n", - " n_clusters=8,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + " context_field='next_id', output_prefix='fw', \n", + " vect_field='q_arc_tfidf', context_vect_field='arc_tfidf',\n", + " n_svd_dims=25, n_clusters=8,\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -288,11 +271,8 @@ }, "outputs": [], "source": [ - "q_ec.fit(\n", - " parl_corpus,\n", - " selector=lambda x: x.meta[\"is_question\"] and (x.meta.get(\"q_arc_tfidf__n_feats\", 0) > 0),\n", - " context_selector=lambda x: x.meta[\"is_answer\"] and (x.meta.get(\"arc_tfidf__n_feats\", 0) > 0),\n", - ")" + "q_ec.fit(parl_corpus, selector=lambda x: x.meta['is_question'] and (x.meta.get('q_arc_tfidf__n_feats',0)>0),\n", + " context_selector=lambda x: x.meta['is_answer'] and (x.meta.get('arc_tfidf__n_feats',0)>0))" ] }, { @@ -813,18 +793,8 @@ }, "outputs": [], "source": [ - "q_ec.set_cluster_names(\n", - " [\n", - " \"demand_account\",\n", - " \"shared_concern\",\n", - " \"agreement\",\n", - " \"issue_update\",\n", - " \"question_premises\",\n", - " \"request_assurance\",\n", - " \"prompt_comment\",\n", - " \"accept_propose\",\n", - " ]\n", - ")" + "q_ec.set_cluster_names(['demand_account', 'shared_concern', 'agreement', 'issue_update',\n", + " 'question_premises', 'request_assurance', 'prompt_comment', 'accept_propose'])" ] }, { @@ -956,16 +926,10 @@ }, "outputs": [], "source": [ - "_ = q_ec.transform(\n", - " parl_corpus,\n", - " selector=lambda x: x.meta[\"is_question\"] and (x.meta.get(\"q_arc_tfidf__n_feats\", 0) > 0),\n", - ")\n", + "_ = q_ec.transform(parl_corpus, selector=lambda x: x.meta['is_question'] and (x.meta.get('q_arc_tfidf__n_feats',0)>0))\n", "\n", "# this call derives representations of answers, following our method.\n", - "_ = q_ec.transform_context_utts(\n", - " parl_corpus,\n", - " selector=lambda x: x.meta[\"is_answer\"] and (x.meta.get(\"arc_tfidf__n_feats\", 0) > 0),\n", - ")" + "_ = q_ec.transform_context_utts(parl_corpus, selector=lambda x: x.meta['is_answer'] and (x.meta.get('arc_tfidf__n_feats',0)>0)) " ] }, { @@ -989,7 +953,7 @@ } ], "source": [ - "ut_eg_id = \"2010-03-25c.364.5\"\n", + "ut_eg_id = '2010-03-25c.364.5'\n", "eg_ut = parl_corpus.get_utterance(ut_eg_id)\n", "print(eg_ut.text)" ] @@ -1018,7 +982,7 @@ } ], "source": [ - "eg_ut.meta[\"fw_clustering.cluster\"], eg_ut.meta[\"fw_clustering.cluster_dist\"]" + "eg_ut.meta['fw_clustering.cluster'], eg_ut.meta['fw_clustering.cluster_dist']" ] }, { @@ -1049,7 +1013,7 @@ } ], "source": [ - "parl_corpus.get_vectors(\"fw_repr\", [ut_eg_id])\n", + "parl_corpus.get_vectors('fw_repr',[ut_eg_id])\n", "# technical note: for an explanation of why there are only 24 dimensions, instead of 25, see the `snip_first_dim` parameter in the documentation" ] }, @@ -1077,7 +1041,7 @@ } ], "source": [ - "eg_ut.meta[\"fw_range\"]" + "eg_ut.meta['fw_range']" ] }, { @@ -1109,23 +1073,10 @@ "outputs": [], "source": [ "for ut in parl_corpus.iter_utterances():\n", - " ut.meta[\"speaker\"] = ut.speaker.id\n", - "utt_meta_df = parl_corpus.get_attribute_table(\n", - " \"utterance\",\n", - " [\n", - " \"fw_clustering.cluster\",\n", - " \"govt\",\n", - " \"govt_coarse\",\n", - " \"is_question\",\n", - " \"is_answer\",\n", - " \"is_incumbent\",\n", - " \"is_oppn\",\n", - " \"speaker\",\n", - " \"party\",\n", - " \"tenure\",\n", - " \"next_id\",\n", - " ],\n", - ")" + " ut.meta['speaker'] = ut.speaker.id\n", + "utt_meta_df = parl_corpus.get_attribute_table('utterance',\n", + " ['fw_clustering.cluster','govt', 'govt_coarse','is_question','is_answer',\n", + " 'is_incumbent','is_oppn','speaker','party', 'tenure','next_id'])" ] }, { @@ -1143,13 +1094,10 @@ }, "outputs": [], "source": [ - "utt_meta_sub = utt_meta_df[\n", - " ((utt_meta_df.is_incumbent == True) | (utt_meta_df.is_oppn == True))\n", - " & (utt_meta_df.speaker != \"\")\n", - " & (utt_meta_df.party.notnull())\n", - " & (utt_meta_df.govt_coarse != \"thatcher+major\")\n", - " & (utt_meta_df[\"fw_clustering.cluster\"].notnull())\n", - "].copy()" + "utt_meta_sub = utt_meta_df[((utt_meta_df.is_incumbent == True) | (utt_meta_df.is_oppn == True))\n", + " & (utt_meta_df.speaker != '') & (utt_meta_df.party.notnull())\n", + " & (utt_meta_df.govt_coarse != 'thatcher+major')\n", + " & (utt_meta_df['fw_clustering.cluster'].notnull())].copy()" ] }, { @@ -1178,9 +1126,7 @@ " val_false = sum((col == val) & ~bool_col)\n", " nval_true = sum((col != val) & bool_col)\n", " nval_false = sum((col != val) & ~bool_col)\n", - " log_odds_entries.append(\n", - " {\"val\": val, \"log_odds\": np.log((val_true / val_false) / (nval_true / nval_false))}\n", - " )\n", + " log_odds_entries.append({'val': val, 'log_odds': np.log((val_true/val_false)/(nval_true/nval_false))})\n", " return log_odds_entries" ] }, @@ -1192,13 +1138,12 @@ }, "outputs": [], "source": [ - "log_odds_party = []\n", + "log_odds_party = []\n", "for cname in q_ec.get_cluster_names():\n", - " entry = compute_log_odds(\n", - " utt_meta_sub[\"fw_clustering.cluster\"], utt_meta_sub[\"is_incumbent\"], val_subset=[cname]\n", - " )\n", + " entry = compute_log_odds(utt_meta_sub['fw_clustering.cluster'],utt_meta_sub['is_incumbent'],\n", + " val_subset=[cname])\n", " log_odds_party += entry\n", - "log_odds_party_df = pd.DataFrame(log_odds_party).set_index(\"val\")" + "log_odds_party_df = pd.DataFrame(log_odds_party).set_index('val')" ] }, { @@ -1209,7 +1154,7 @@ }, "outputs": [], "source": [ - "type_order = log_odds_party_df.sort_values(\"log_odds\").index" + "type_order = log_odds_party_df.sort_values('log_odds').index\n" ] }, { @@ -1220,16 +1165,9 @@ }, "outputs": [], "source": [ - "display_names = [\n", - " \"Demand for account\",\n", - " \"Questioning premises\",\n", - " \"Prompt for comment\",\n", - " \"Accept and propose\",\n", - " \"Req. for assurance\",\n", - " \"Issue update\",\n", - " \"Shared concerns\",\n", - " \"Agreement\",\n", - "]" + "display_names = ['Demand for account', 'Questioning premises', 'Prompt for comment',\n", + " 'Accept and propose', 'Req. for assurance', 'Issue update', \n", + " 'Shared concerns', 'Agreement']" ] }, { @@ -1241,7 +1179,6 @@ "outputs": [], "source": [ "from matplotlib import pyplot as plt\n", - "\n", "%matplotlib inline" ] }, @@ -1262,21 +1199,21 @@ } ], "source": [ - "fig, ax = plt.subplots(figsize=(4, 6))\n", + "fig, ax = plt.subplots(figsize=(4,6))\n", "ax.set_xlim(-1.5, 1.5)\n", - "ax.set_ylim(-0.5, 7.5)\n", - "for i, cname in enumerate(type_order):\n", + "ax.set_ylim(-.5,7.5)\n", + "for i,cname in enumerate(type_order):\n", " log_odds = log_odds_party_df.loc[cname].log_odds\n", - " ax.scatter([log_odds], [i], color=\"black\", s=49)\n", - " ax.plot([-1.25, 1.25], [i, i], \"--\", color=\"grey\", linewidth=0.5)\n", - "ax.plot([0, 0], [-2, 8], color=\"grey\", linewidth=1)\n", + " ax.scatter([log_odds], [i], color='black',s=49)\n", + " ax.plot([-1.25,1.25],[i,i],'--', color='grey', linewidth=.5)\n", + "ax.plot([0,0],[-2,8], color='grey', linewidth=1)\n", "ax.invert_yaxis()\n", "ax.set_yticks(np.arange(len(type_order)))\n", "ax.set_yticklabels(display_names, fontsize=14)\n", - "ax.set_xticklabels([-1.5, -1, -0.5, 0, 0.5, 1, 1.5], fontsize=14)\n", - "plt.rc(\"xtick\", labelsize=12)\n", - "plt.rc(\"ytick\", labelsize=12)\n", - "ax.set_xlabel(\"log odds ratio\", fontsize=16)\n", + "ax.set_xticklabels([-1.5,-1,-.5,0,.5,1,1.5], fontsize=14)\n", + "plt.rc('xtick',labelsize=12)\n", + "plt.rc('ytick',labelsize=12)\n", + "ax.set_xlabel('log odds ratio', fontsize=16)\n", "None" ] }, @@ -1311,19 +1248,8 @@ }, "outputs": [], "source": [ - "med_tenures = pd.concat(\n", - " [\n", - " utt_meta_sub[utt_meta_sub.is_incumbent]\n", - " .groupby(\"fw_clustering.cluster\")\n", - " .tenure.median()\n", - " .rename(\"govt\"),\n", - " utt_meta_sub[~utt_meta_sub.is_incumbent]\n", - " .groupby(\"fw_clustering.cluster\")\n", - " .tenure.median()\n", - " .rename(\"oppn\"),\n", - " ],\n", - " axis=1,\n", - ")\n", + "med_tenures = pd.concat([utt_meta_sub[utt_meta_sub.is_incumbent].groupby('fw_clustering.cluster').tenure.median().rename('govt'),\n", + "utt_meta_sub[~utt_meta_sub.is_incumbent].groupby('fw_clustering.cluster').tenure.median().rename('oppn')], axis=1)\n", "med_in_tenure = utt_meta_sub[utt_meta_sub.is_incumbent].tenure.median()\n", "med_op_tenure = utt_meta_sub[~utt_meta_sub.is_incumbent].tenure.median()" ] @@ -1447,25 +1373,23 @@ } ], "source": [ - "fig, ax = plt.subplots(figsize=(4, 6))\n", + "fig, ax = plt.subplots(figsize=(4,6))\n", "ax.set_xlim(2, 13)\n", - "ax.set_ylim(-0.5, 7.5)\n", - "for i, cname in enumerate(type_order):\n", - " ax.scatter([med_tenures.loc[cname].govt], [i - 0.05], s=49, color=\"blue\")\n", - " ax.scatter(\n", - " [med_tenures.loc[cname].oppn], [i + 0.05], s=49, color=\"red\", facecolor=\"white\", marker=\"s\"\n", - " )\n", - " ax.plot([0.5, 14.5], [i, i], \"--\", color=\"grey\", linewidth=0.5)\n", - "ax.plot([med_in_tenure, med_in_tenure], [-2, 8], color=\"blue\", linewidth=1)\n", - "ax.plot([med_op_tenure, med_op_tenure], [-2, 8], \"--\", color=\"red\", linewidth=1)\n", + "ax.set_ylim(-.5,7.5)\n", + "for i,cname in enumerate(type_order):\n", + " ax.scatter([med_tenures.loc[cname].govt],[i-.05], s=49, color='blue')\n", + " ax.scatter([med_tenures.loc[cname].oppn],[i+.05], s=49, color='red', facecolor='white',marker='s')\n", + " ax.plot([.5,14.5],[i,i],'--', color='grey', linewidth=.5)\n", + "ax.plot([med_in_tenure, med_in_tenure],[-2,8], color='blue',linewidth=1)\n", + "ax.plot([med_op_tenure, med_op_tenure],[-2,8], '--', color='red', linewidth=1)\n", "ax.invert_yaxis()\n", - "ax.set_xticks([5, 10])\n", - "ax.set_xticklabels([5, 10], fontsize=14)\n", + "ax.set_xticks([5,10])\n", + "ax.set_xticklabels([5,10], fontsize=14)\n", "ax.set_yticks(np.arange(8))\n", "ax.set_yticklabels(display_names, fontsize=14)\n", - "ax.set_xlabel(\"median tenure\", fontsize=16)\n", - "plt.rc(\"xtick\", labelsize=12)\n", - "plt.rc(\"ytick\", labelsize=12)" + "ax.set_xlabel('median tenure', fontsize=16)\n", + "plt.rc('xtick',labelsize=12)\n", + "plt.rc('ytick',labelsize=12)" ] }, { @@ -1509,15 +1433,10 @@ "outputs": [], "source": [ "a_ec = ExpectedContextModelTransformer(\n", - " context_field=\"reply_to\",\n", - " output_prefix=\"bk\",\n", - " vect_field=\"arc_tfidf\",\n", - " context_vect_field=\"q_arc_tfidf\",\n", - " n_svd_dims=15,\n", - " n_clusters=5,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + " context_field='reply_to', output_prefix='bk', \n", + " vect_field='arc_tfidf', context_vect_field='q_arc_tfidf',\n", + " n_svd_dims=15, n_clusters=5,\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -1528,12 +1447,8 @@ }, "outputs": [], "source": [ - "a_ec.fit(\n", - " parl_corpus,\n", - " selector=lambda x: x.meta[\"is_answer\"] and (x.meta.get(\"arc_tfidf__n_feats\", 0) > 0),\n", - " context_selector=lambda x: x.meta[\"is_question\"]\n", - " and (x.meta.get(\"q_arc_tfidf__n_feats\", 0) > 0),\n", - ")" + "a_ec.fit(parl_corpus, selector=lambda x: x.meta['is_answer'] and (x.meta.get('arc_tfidf__n_feats',0)>0),\n", + " context_selector=lambda x: x.meta['is_question'] and (x.meta.get('q_arc_tfidf__n_feats',0)>0))" ] }, { @@ -1858,7 +1773,7 @@ }, "outputs": [], "source": [ - "a_ec.set_cluster_names([\"progress_report\", \"statement\", \"endorsement\", \"comment\", \"commitment\"])" + "a_ec.set_cluster_names(['progress_report', 'statement', 'endorsement', 'comment', 'commitment'])" ] }, { @@ -1966,10 +1881,7 @@ }, "outputs": [], "source": [ - "_ = a_ec.transform(\n", - " parl_corpus,\n", - " selector=lambda x: x.meta[\"is_answer\"] and (x.meta.get(\"arc_tfidf__n_feats\", 0) > 0),\n", - ")" + "_ = a_ec.transform(parl_corpus, selector=lambda x: x.meta['is_answer'] and (x.meta.get('arc_tfidf__n_feats',0)>0))" ] }, { @@ -1981,17 +1893,12 @@ }, "outputs": [], "source": [ - "a_utt_meta_df = parl_corpus.get_attribute_table(\"utterance\", [\"bk_clustering.cluster\"])\n", - "a_utt_meta_sub = a_utt_meta_df.join(\n", - " utt_meta_df[\n", - " ((utt_meta_df.is_incumbent == True) | (utt_meta_df.is_oppn == True))\n", - " & (utt_meta_df.speaker != \"\")\n", - " & (utt_meta_df.party.notnull())\n", - " & (utt_meta_df.govt_coarse != \"thatcher+major\")\n", - " ].set_index(\"next_id\"),\n", - " how=\"inner\",\n", - ")\n", - "a_utt_meta_sub = a_utt_meta_sub[a_utt_meta_sub[\"bk_clustering.cluster\"].notnull()]" + "a_utt_meta_df = parl_corpus.get_attribute_table('utterance',\n", + " ['bk_clustering.cluster'])\n", + "a_utt_meta_sub = a_utt_meta_df.join(utt_meta_df[((utt_meta_df.is_incumbent == True) | (utt_meta_df.is_oppn == True))\n", + " & (utt_meta_df.speaker != '') & (utt_meta_df.party.notnull())\n", + " & (utt_meta_df.govt_coarse != 'thatcher+major')].set_index('next_id'), how='inner')\n", + "a_utt_meta_sub = a_utt_meta_sub[a_utt_meta_sub['bk_clustering.cluster'].notnull()]" ] }, { @@ -2022,13 +1929,12 @@ }, "outputs": [], "source": [ - "log_odds_party_answer = []\n", + "log_odds_party_answer = []\n", "for cname in a_ec.get_cluster_names():\n", - " entry = compute_log_odds(\n", - " a_utt_meta_sub[\"bk_clustering.cluster\"], a_utt_meta_sub[\"is_incumbent\"], val_subset=[cname]\n", - " )\n", + " entry = compute_log_odds(a_utt_meta_sub['bk_clustering.cluster'],a_utt_meta_sub['is_incumbent'],\n", + " val_subset=[cname])\n", " log_odds_party_answer += entry\n", - "log_odds_party_answer_df = pd.DataFrame(log_odds_party_answer).set_index(\"val\")" + "log_odds_party_answer_df = pd.DataFrame(log_odds_party_answer).set_index('val')" ] }, { @@ -2039,7 +1945,7 @@ }, "outputs": [], "source": [ - "a_type_order = log_odds_party_answer_df.sort_values(\"log_odds\").index" + "a_type_order = log_odds_party_answer_df.sort_values('log_odds').index" ] }, { @@ -2050,7 +1956,7 @@ }, "outputs": [], "source": [ - "a_display_names = [\"Statement\", \"Comment\", \"Progress report\", \"Commitment\", \"Endorsement\"]" + "a_display_names = ['Statement', 'Comment', 'Progress report', 'Commitment', 'Endorsement']" ] }, { @@ -2070,20 +1976,20 @@ } ], "source": [ - "fig, ax = plt.subplots(figsize=(3, 4))\n", + "fig, ax = plt.subplots(figsize=(3,4))\n", "ax.set_xlim(-1.5, 1.5)\n", - "ax.set_ylim(-0.5, 4.5)\n", - "for i, cname in enumerate(a_type_order):\n", + "ax.set_ylim(-.5,4.5)\n", + "for i,cname in enumerate(a_type_order):\n", " log_odds = log_odds_party_answer_df.loc[cname].log_odds\n", - " ax.scatter([log_odds], [i], color=\"black\", s=49)\n", - " ax.plot([-1.25, 1.25], [i, i], \"--\", color=\"grey\", linewidth=0.5)\n", - "ax.plot([0, 0], [-2, 5], color=\"grey\", linewidth=1)\n", + " ax.scatter([log_odds], [i], color='black',s=49) \n", + " ax.plot([-1.25,1.25],[i,i],'--', color='grey', linewidth=.5)\n", + "ax.plot([0,0],[-2,5], color='grey', linewidth=1)\n", "ax.invert_yaxis()\n", "ax.set_yticks(np.arange(len(a_type_order)))\n", "ax.set_yticklabels(a_display_names, fontsize=14)\n", - "ax.set_xlabel(\"log odds ratio\", fontsize=16)\n", - "ax.set_xticks([-1, 0, 1])\n", - "ax.set_xticklabels([-1, 0, 1], fontsize=14)\n", + "ax.set_xlabel('log odds ratio', fontsize=16)\n", + "ax.set_xticks([-1,0,1])\n", + "ax.set_xticklabels([-1,0,1], fontsize=14)\n", "None" ] }, @@ -2113,7 +2019,8 @@ }, "outputs": [], "source": [ - "utt_range_df = parl_corpus.get_attribute_table(\"utterance\", [\"fw_clustering.cluster\", \"fw_range\"])\n", + "utt_range_df = parl_corpus.get_attribute_table('utterance',\n", + " ['fw_clustering.cluster','fw_range'])\n", "utt_range_df = utt_range_df[utt_range_df.fw_range.notnull()].copy()" ] }, @@ -2125,9 +2032,7 @@ }, "outputs": [], "source": [ - "fw_range_distrs = (\n", - " utt_range_df.groupby(\"fw_clustering.cluster\").fw_range.describe().sort_values(\"50%\")\n", - ")\n", + "fw_range_distrs = utt_range_df.groupby('fw_clustering.cluster').fw_range.describe().sort_values('50%')\n", "med_range_full = utt_range_df.fw_range.median()" ] }, @@ -2148,23 +2053,23 @@ } ], "source": [ - "fig, ax = plt.subplots(figsize=(4, 6))\n", - "ax.set_xlim(0.7, 0.9)\n", - "ax.set_ylim(-0.5, 7.5)\n", - "for i, cname in enumerate(type_order):\n", - " med_range = fw_range_distrs.loc[cname][\"50%\"]\n", - " left = fw_range_distrs.loc[cname][\"25%\"]\n", - " right = fw_range_distrs.loc[cname][\"75%\"]\n", - " ax.scatter([med_range], [i], color=\"black\", s=49)\n", - " ax.plot([left, right], [i, i], color=\"black\")\n", - " ax.plot([-1.25, 1.25], [i, i], \"--\", color=\"grey\", linewidth=0.5)\n", - "ax.plot([med_range_full, med_range_full], [-2, 8], \"--\", color=\"grey\", linewidth=1)\n", + "fig, ax = plt.subplots(figsize=(4,6))\n", + "ax.set_xlim(.7, .9)\n", + "ax.set_ylim(-.5,7.5)\n", + "for i,cname in enumerate(type_order):\n", + " med_range = fw_range_distrs.loc[cname]['50%']\n", + " left = fw_range_distrs.loc[cname]['25%']\n", + " right = fw_range_distrs.loc[cname]['75%']\n", + " ax.scatter([med_range], [i], color='black',s=49)\n", + " ax.plot([left,right],[i,i], color='black')\n", + " ax.plot([-1.25,1.25],[i,i],'--', color='grey', linewidth=.5)\n", + "ax.plot([med_range_full,med_range_full],[-2,8], '--', color='grey', linewidth=1)\n", "ax.invert_yaxis()\n", "ax.set_yticks(np.arange(len(type_order)))\n", "ax.set_yticklabels(display_names, fontsize=14)\n", - "ax.set_xlabel(\"$\\overrightarrow{\\Sigma}$\", fontsize=16)\n", - "plt.rc(\"xtick\", labelsize=14)\n", - "plt.rc(\"ytick\", labelsize=14)" + "ax.set_xlabel('$\\overrightarrow{\\Sigma}$', fontsize=16)\n", + "plt.rc('xtick',labelsize=14)\n", + "plt.rc('ytick',labelsize=14)\n" ] }, { @@ -2242,21 +2147,13 @@ }, "outputs": [], "source": [ - "q_pipe = ExpectedContextModelPipeline(\n", - " context_field=\"next_id\",\n", - " output_prefix=\"fw\",\n", - " text_field=\"q_arcs\",\n", - " context_text_field=\"arcs\",\n", - " share_tfidf_models=False,\n", - " text_pipe=parliament_arc_pipeline(),\n", - " tfidf_params={\"binary\": False, \"min_df\": 100, \"max_df\": 0.1},\n", - " min_terms=1,\n", - " n_svd_dims=25,\n", - " n_clusters=8,\n", - " cluster_on=\"utts\",\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "q_pipe = ExpectedContextModelPipeline(context_field='next_id', output_prefix='fw',\n", + " text_field='q_arcs', context_text_field='arcs', share_tfidf_models=False,\n", + " text_pipe=parliament_arc_pipeline(), \n", + " tfidf_params={'binary': False, 'min_df': 100, 'max_df': .1}, \n", + " min_terms=1,\n", + " n_svd_dims=25, n_clusters=8, cluster_on='utts',\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -2267,11 +2164,9 @@ }, "outputs": [], "source": [ - "q_pipe.fit(\n", - " parl_corpus,\n", - " selector=lambda x: x.meta[\"is_question\"] and x.meta[\"pair_has_features\"],\n", - " context_selector=lambda x: x.meta[\"is_answer\"] and x.meta[\"pair_has_features\"],\n", - ")" + "q_pipe.fit(parl_corpus,\n", + " selector=lambda x: x.meta['is_question'] and x.meta['pair_has_features'],\n", + " context_selector=lambda x: x.meta['is_answer'] and x.meta['pair_has_features'])" ] }, { @@ -2554,18 +2449,8 @@ }, "outputs": [], "source": [ - "q_pipe.set_cluster_names(\n", - " [\n", - " \"demand_account\",\n", - " \"shared_concern\",\n", - " \"agreement\",\n", - " \"issue_update\",\n", - " \"question_premises\",\n", - " \"request_assurance\",\n", - " \"prompt_comment\",\n", - " \"accept_propose\",\n", - " ]\n", - ")" + "q_pipe.set_cluster_names(['demand_account', 'shared_concern', 'agreement', 'issue_update',\n", + " 'question_premises', 'request_assurance', 'prompt_comment', 'accept_propose'])" ] }, { @@ -2584,8 +2469,7 @@ "outputs": [], "source": [ "new_ut = q_pipe.transform_utterance(\n", - " \"Can the Minister please explain why the reopening was delayed?\"\n", - ")" + " 'Can the Minister please explain why the reopening was delayed?')" ] }, { @@ -2602,7 +2486,7 @@ } ], "source": [ - "print(\"question type:\", new_ut.meta[\"fw_clustering.cluster\"])" + "print('question type:', new_ut.meta['fw_clustering.cluster'])" ] }, { @@ -2654,7 +2538,7 @@ "source": [ "# note that different versions of SpaCy may produce different outputs, since the\n", "# dependency parses may change from version to version\n", - "new_ut.meta[\"fw_repr\"]" + "new_ut.meta['fw_repr']" ] }, { diff --git a/convokit/expected_context_framework/demos/scotus_orientation_demo.ipynb b/convokit/expected_context_framework/demos/scotus_orientation_demo.ipynb index cb047f4c..0de52a3b 100644 --- a/convokit/expected_context_framework/demos/scotus_orientation_demo.ipynb +++ b/convokit/expected_context_framework/demos/scotus_orientation_demo.ipynb @@ -21,8 +21,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -75,7 +74,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# SCOTUS_CORPUS_PATH = download('supreme-corpus', data_dir=DATA_DIR)\n", @@ -130,7 +129,7 @@ }, "outputs": [], "source": [ - "scotus_corpus.load_info(\"utterance\", [\"arcs\", \"tokens\"])" + "scotus_corpus.load_info('utterance',['arcs','tokens'])" ] }, { @@ -149,10 +148,8 @@ "outputs": [], "source": [ "from convokit.text_processing import TextProcessor\n", - "\n", - "wordcounter = TextProcessor(\n", - " input_field=\"tokens\", output_field=\"wordcount\", proc_fn=lambda x: len(x.split())\n", - ")\n", + "wordcounter = TextProcessor(input_field='tokens', output_field='wordcount',\n", + " proc_fn=lambda x: len(x.split()))\n", "scotus_corpus = wordcounter.transform(scotus_corpus)" ] }, @@ -172,7 +169,7 @@ "outputs": [], "source": [ "for ut in scotus_corpus.iter_utterances(selector=lambda x: x.reply_to is not None):\n", - " scotus_corpus.get_utterance(ut.reply_to).meta[\"next_id\"] = ut.id" + " scotus_corpus.get_utterance(ut.reply_to).meta['next_id'] = ut.id" ] }, { @@ -220,24 +217,20 @@ "outputs": [], "source": [ "for ut in scotus_corpus.iter_utterances():\n", - " ut.meta[\"is_valid_context\"] = (\n", - " (ut.meta[\"speaker_type\"] == \"A\")\n", - " and (ut.meta[\"arcs\"] != \"\")\n", - " and (ut.meta[\"wordcount\"] >= min_wc_context)\n", - " and (ut.meta[\"wordcount\"] <= max_wc_context)\n", - " )\n", + " ut.meta['is_valid_context'] = (ut.meta['speaker_type'] == 'A')\\\n", + " and (ut.meta['arcs'] != '')\\\n", + " and (ut.meta['wordcount'] >= min_wc_context)\\\n", + " and (ut.meta['wordcount'] <= max_wc_context) \n", "for ut in scotus_corpus.iter_utterances():\n", - " if (\"next_id\" not in ut.meta) or (ut.reply_to is None):\n", - " ut.meta[\"is_valid_utt\"] = False\n", + " if ('next_id' not in ut.meta) or (ut.reply_to is None): \n", + " ut.meta['is_valid_utt'] = False\n", " else:\n", - " ut.meta[\"is_valid_utt\"] = (\n", - " (ut.meta[\"speaker_type\"] == \"J\")\n", - " and (ut.meta[\"arcs\"] != \"\")\n", - " and (ut.meta[\"wordcount\"] >= min_wc)\n", - " and (ut.meta[\"wordcount\"] <= max_wc)\n", - " and scotus_corpus.get_utterance(ut.meta[\"next_id\"]).meta[\"is_valid_context\"]\n", - " and scotus_corpus.get_utterance(ut.reply_to).meta[\"is_valid_context\"]\n", - " )" + " ut.meta['is_valid_utt'] = (ut.meta['speaker_type'] == 'J')\\\n", + " and (ut.meta['arcs'] != '')\\\n", + " and (ut.meta['wordcount'] >= min_wc)\\\n", + " and (ut.meta['wordcount'] <= max_wc)\\\n", + " and scotus_corpus.get_utterance(ut.meta['next_id']).meta['is_valid_context']\\\n", + " and scotus_corpus.get_utterance(ut.reply_to).meta['is_valid_context']" ] }, { @@ -264,7 +257,7 @@ } ], "source": [ - "sum(ut.meta[\"is_valid_utt\"] for ut in scotus_corpus.iter_utterances())" + "sum(ut.meta['is_valid_utt'] for ut in scotus_corpus.iter_utterances())" ] }, { @@ -284,7 +277,7 @@ } ], "source": [ - "sum(ut.meta[\"is_valid_context\"] for ut in scotus_corpus.iter_utterances())" + "sum(ut.meta['is_valid_context'] for ut in scotus_corpus.iter_utterances())" ] }, { @@ -331,16 +324,10 @@ }, "outputs": [], "source": [ - "j_tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"arcs\",\n", - " output_field=\"j_tfidf\",\n", - " binary=True,\n", - " min_df=250,\n", - " max_df=1.0,\n", - " max_features=2000,\n", - ")\n", - "_ = j_tfidf_obj.fit(scotus_corpus, selector=lambda x: x.meta[\"is_valid_utt\"])\n", - "_ = j_tfidf_obj.transform(scotus_corpus, selector=lambda x: x.meta[\"is_valid_utt\"])" + "j_tfidf_obj = ColNormedTfidfTransformer(input_field='arcs', output_field='j_tfidf', binary=True, \n", + " min_df=250, max_df=1., max_features=2000)\n", + "_ = j_tfidf_obj.fit(scotus_corpus, selector=lambda x: x.meta['is_valid_utt'])\n", + "_ = j_tfidf_obj.transform(scotus_corpus, selector=lambda x: x.meta['is_valid_utt'])" ] }, { @@ -351,16 +338,10 @@ }, "outputs": [], "source": [ - "a_tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"arcs\",\n", - " output_field=\"a_tfidf\",\n", - " binary=True,\n", - " min_df=250,\n", - " max_df=1.0,\n", - " max_features=2000,\n", - ")\n", - "_ = a_tfidf_obj.fit(scotus_corpus, selector=lambda x: x.meta[\"is_valid_context\"])\n", - "_ = a_tfidf_obj.transform(scotus_corpus, selector=lambda x: x.meta[\"is_valid_context\"])" + "a_tfidf_obj = ColNormedTfidfTransformer(input_field='arcs', output_field='a_tfidf', binary=True, \n", + " min_df=250, max_df=1., max_features=2000)\n", + "_ = a_tfidf_obj.fit(scotus_corpus, selector=lambda x: x.meta['is_valid_context'])\n", + "_ = a_tfidf_obj.transform(scotus_corpus, selector=lambda x: x.meta['is_valid_context'])" ] }, { @@ -380,14 +361,10 @@ }, "outputs": [], "source": [ - "dual_context_model = DualContextWrapper(\n", - " context_fields=[\"reply_to\", \"next_id\"],\n", - " output_prefixes=[\"bk\", \"fw\"],\n", - " vect_field=\"j_tfidf\",\n", - " context_vect_field=\"a_tfidf\",\n", - " n_svd_dims=15,\n", - " random_state=1000,\n", - ")" + "dual_context_model = DualContextWrapper(context_fields=['reply_to','next_id'], output_prefixes=['bk','fw'],\n", + " vect_field='j_tfidf', context_vect_field='a_tfidf', \n", + " n_svd_dims=15,\n", + " random_state=1000)" ] }, { @@ -398,11 +375,8 @@ }, "outputs": [], "source": [ - "dual_context_model.fit(\n", - " scotus_corpus,\n", - " selector=lambda x: x.meta[\"is_valid_utt\"],\n", - " context_selector=lambda x: x.meta[\"is_valid_context\"],\n", - ")" + "dual_context_model.fit(scotus_corpus, selector=lambda x: x.meta['is_valid_utt'],\n", + " context_selector=lambda x: x.meta['is_valid_context'])" ] }, { @@ -774,10 +748,10 @@ } ], "source": [ - "print(\"\\nhigh orientation\")\n", - "display(term_df.sort_values(\"orn\")[[\"orn\"]].tail(20))\n", - "print(\"low orientation\")\n", - "display(term_df.sort_values(\"orn\")[[\"orn\"]].head(20))" + "print('\\nhigh orientation')\n", + "display(term_df.sort_values('orn')[['orn']].tail(20))\n", + "print('low orientation')\n", + "display(term_df.sort_values('orn')[['orn']].head(20))" ] }, { @@ -818,24 +792,19 @@ "outputs": [], "source": [ "sentence_utts = []\n", - "for ut in scotus_corpus.iter_utterances(selector=lambda x: x.meta[\"is_valid_utt\"]):\n", - " sents = ut.meta[\"arcs\"].split(\"\\n\")\n", - " tok_sents = ut.meta[\"tokens\"].split(\"\\n\")\n", + "for ut in scotus_corpus.iter_utterances(selector=lambda x: x.meta['is_valid_utt']):\n", + " sents = ut.meta['arcs'].split('\\n')\n", + " tok_sents = ut.meta['tokens'].split('\\n')\n", " for i, (sent, tok_sent) in enumerate(zip(sents, tok_sents)):\n", - " utt_id = ut.id + \"_\" + \"%03d\" % i\n", + " utt_id = ut.id + '_' + '%03d' % i\n", " speaker = ut.speaker\n", " text = tok_sent\n", - " meta = {\"arcs\": sent, \"utt_id\": ut.id, \"speaker\": ut.speaker.id}\n", - " sentence_utts.append(\n", - " Utterance(\n", - " id=utt_id,\n", - " speaker=speaker,\n", - " text=text,\n", - " reply_to=ut.reply_to,\n", - " conversation_id=ut.conversation_id,\n", - " meta=meta,\n", - " )\n", - " )" + " meta = {'arcs': sent, 'utt_id': ut.id, 'speaker': ut.speaker.id}\n", + " sentence_utts.append(Utterance(\n", + " id=utt_id, speaker=speaker, text=text,\n", + " reply_to=ut.reply_to, conversation_id=ut.conversation_id,\n", + " meta=meta\n", + " ))" ] }, { @@ -884,9 +853,7 @@ "outputs": [], "source": [ "_ = j_tfidf_obj.transform(sentence_corpus)\n", - "_ = dual_context_model.transform(\n", - " sentence_corpus, selector=lambda x: x.meta[\"j_tfidf__n_feats\"] >= 1\n", - ")" + "_ = dual_context_model.transform(sentence_corpus, selector=lambda x: x.meta['j_tfidf__n_feats'] >= 1)\n" ] }, { @@ -903,9 +870,9 @@ } ], "source": [ - "ut_eg_id = \"20030__1_029_000\"\n", + "ut_eg_id = '20030__1_029_000'\n", "eg_ut = sentence_corpus.get_utterance(ut_eg_id)\n", - "print(eg_ut.speaker.meta[\"name\"], \":\", eg_ut.text)" + "print(eg_ut.speaker.meta['name'], ':',eg_ut.text)" ] }, { @@ -925,7 +892,7 @@ } ], "source": [ - "eg_ut.meta[\"orn\"]" + "eg_ut.meta['orn']" ] }, { @@ -952,13 +919,10 @@ }, "outputs": [], "source": [ - "sent_df = sentence_corpus.get_attribute_table(\"utterance\", [\"orn\", \"j_tfidf__n_feats\"])\n", - "text_df = pd.DataFrame(\n", - " [\n", - " {\"id\": ut._id, \"text\": ut.text, \"speaker\": ut.speaker.meta[\"name\"]}\n", - " for ut in sentence_corpus.iter_utterances()\n", - " ]\n", - ").set_index(\"id\")\n", + "sent_df = sentence_corpus.get_attribute_table('utterance',['orn','j_tfidf__n_feats'])\n", + "text_df = pd.DataFrame([{'id': ut._id, 'text': ut.text, 'speaker': ut.speaker.meta['name']}\n", + " for ut in sentence_corpus.iter_utterances()\n", + "]).set_index('id')\n", "sent_df = sent_df.join(text_df)" ] }, @@ -999,12 +963,10 @@ }, "outputs": [], "source": [ - "low_subset = sent_df[\n", - " (sent_df.j_tfidf__n_feats >= 30) & (sent_df.orn < sent_df.orn.quantile(0.1))\n", - "].sample(10, random_state=9)\n", - "high_subset = sent_df[\n", - " (sent_df.j_tfidf__n_feats >= 30) & (sent_df.orn > sent_df.orn.quantile(0.9))\n", - "].sample(10, random_state=9)" + "low_subset = sent_df[(sent_df.j_tfidf__n_feats >= 30)\n", + " & (sent_df.orn < sent_df.orn.quantile(.1))].sample(10,random_state=9)\n", + "high_subset = sent_df[(sent_df.j_tfidf__n_feats >= 30)\n", + " & (sent_df.orn > sent_df.orn.quantile(.9))].sample(10,random_state=9)" ] }, { @@ -1057,9 +1019,9 @@ } ], "source": [ - "for id, row in high_subset.sort_values(\"orn\", ascending=False).iterrows():\n", - " print(id, row.speaker, \"orientation:\", row.orn)\n", - " print(\">\", row.text)\n", + "for id, row in high_subset.sort_values('orn', ascending=False).iterrows():\n", + " print(id,row.speaker, 'orientation:',row.orn)\n", + " print('>', row.text)\n", " print()" ] }, @@ -1106,9 +1068,9 @@ } ], "source": [ - "for id, row in low_subset.sort_values(\"orn\").iterrows():\n", - " print(id, row.speaker, \"orientation:\", row.orn)\n", - " print(\">\", row.text)\n", + "for id, row in low_subset.sort_values('orn').iterrows():\n", + " print(id,row.speaker, 'orientation:',row.orn)\n", + " print('>', row.text)\n", " print()" ] }, @@ -1178,16 +1140,11 @@ }, "outputs": [], "source": [ - "dual_pipe = DualContextPipeline(\n", - " context_fields=[\"reply_to\", \"next_id\"],\n", - " output_prefixes=[\"bk\", \"fw\"],\n", - " share_tfidf_models=False,\n", - " text_field=\"arcs\",\n", - " text_pipe=scotus_arc_pipeline(),\n", - " tfidf_params={\"binary\": True, \"min_df\": 250, \"max_features\": 2000},\n", - " n_svd_dims=15,\n", - " random_state=1000,\n", - ")" + "dual_pipe = DualContextPipeline(context_fields=['reply_to','next_id'], \n", + " output_prefixes=['bk','fw'], share_tfidf_models=False,\n", + " text_field='arcs', text_pipe=scotus_arc_pipeline(), \n", + " tfidf_params={'binary': True, 'min_df': 250, 'max_features': 2000}, \n", + " n_svd_dims=15, random_state=1000)" ] }, { @@ -1198,11 +1155,9 @@ }, "outputs": [], "source": [ - "dual_pipe.fit(\n", - " scotus_corpus,\n", - " selector=lambda x: x.meta[\"is_valid_utt\"],\n", - " context_selector=lambda x: x.meta[\"is_valid_context\"],\n", - ")" + "dual_pipe.fit(scotus_corpus,\n", + " selector=lambda x: x.meta['is_valid_utt'],\n", + " context_selector=lambda x: x.meta['is_valid_context'])" ] }, { @@ -1527,10 +1482,10 @@ } ], "source": [ - "print(\"\\nhigh orientation\")\n", - "display(term_df_new.sort_values(\"orn\")[[\"orn\"]].tail(20))\n", - "print(\"low orientation\")\n", - "display(term_df_new.sort_values(\"orn\")[[\"orn\"]].head(20))" + "print('\\nhigh orientation')\n", + "display(term_df_new.sort_values('orn')[['orn']].tail(20))\n", + "print('low orientation')\n", + "display(term_df_new.sort_values('orn')[['orn']].head(20))" ] }, { @@ -1548,7 +1503,7 @@ }, "outputs": [], "source": [ - "eg_ut_new = dual_pipe.transform_utterance(\"What is the difference between these statutes?\")" + "eg_ut_new = dual_pipe.transform_utterance('What is the difference between these statutes?')" ] }, { @@ -1565,7 +1520,7 @@ } ], "source": [ - "print(\"orientation:\", eg_ut_new.meta[\"orn\"])" + "print('orientation:', eg_ut_new.meta['orn'])" ] }, { diff --git a/convokit/expected_context_framework/demos/switchboard_exploration_demo.ipynb b/convokit/expected_context_framework/demos/switchboard_exploration_demo.ipynb index c233d41a..d330b462 100644 --- a/convokit/expected_context_framework/demos/switchboard_exploration_demo.ipynb +++ b/convokit/expected_context_framework/demos/switchboard_exploration_demo.ipynb @@ -22,8 +22,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -80,7 +79,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# SW_CORPUS_PATH = download('switchboard-processed-corpus', data_dir=DATA_DIR)\n", @@ -128,7 +127,7 @@ }, "outputs": [], "source": [ - "utt_eg_id = \"3496-79\"" + "utt_eg_id = '3496-79'" ] }, { @@ -155,7 +154,7 @@ } ], "source": [ - "sw_corpus.get_utterance(utt_eg_id).meta[\"alpha_text\"]" + "sw_corpus.get_utterance(utt_eg_id).meta['alpha_text']" ] }, { @@ -195,19 +194,19 @@ "source": [ "topic_counts = defaultdict(set)\n", "for ut in sw_corpus.iter_utterances():\n", - " topic = sw_corpus.get_conversation(ut.conversation_id).meta[\"topic\"]\n", - " for x in set(ut.meta[\"alpha_text\"].lower().split()):\n", + " topic = sw_corpus.get_conversation(ut.conversation_id).meta['topic']\n", + " for x in set(ut.meta['alpha_text'].lower().split()):\n", " topic_counts[x].add(topic)\n", "topic_counts = {x: len(y) for x, y in topic_counts.items()}\n", "\n", "word_convo_counts = defaultdict(set)\n", "for ut in sw_corpus.iter_utterances():\n", - " for x in set(ut.meta[\"alpha_text\"].lower().split()):\n", + " for x in set(ut.meta['alpha_text'].lower().split()):\n", " word_convo_counts[x].add(ut.conversation_id)\n", - "word_convo_counts = {x: len(y) for x, y in word_convo_counts.items()}\n", + "word_convo_counts = {x: len(y) for x, y in word_convo_counts.items()}\n", "\n", - "min_topic_words = set(x for x, y in topic_counts.items() if y >= 33)\n", - "min_convo_words = set(x for x, y in word_convo_counts.items() if y >= 200)\n", + "min_topic_words = set(x for x,y in topic_counts.items() if y >= 33)\n", + "min_convo_words = set(x for x,y in word_convo_counts.items() if y >= 200)\n", "vocab = sorted(min_topic_words.intersection(min_convo_words))" ] }, @@ -248,10 +247,7 @@ }, "outputs": [], "source": [ - "from convokit.expected_context_framework import (\n", - " ColNormedTfidfTransformer,\n", - " ExpectedContextModelTransformer,\n", - ")" + "from convokit.expected_context_framework import ColNormedTfidfTransformer, ExpectedContextModelTransformer" ] }, { @@ -271,9 +267,7 @@ }, "outputs": [], "source": [ - "tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"alpha_text\", output_field=\"col_normed_tfidf\", binary=True, vocabulary=vocab\n", - ")\n", + "tfidf_obj = ColNormedTfidfTransformer(input_field='alpha_text', output_field='col_normed_tfidf', binary=True, vocabulary=vocab)\n", "_ = tfidf_obj.fit(sw_corpus)\n", "_ = tfidf_obj.transform(sw_corpus)" ] @@ -302,16 +296,10 @@ }, "outputs": [], "source": [ - "ec_fw = ExpectedContextModelTransformer(\n", - " context_field=\"next_id\",\n", - " output_prefix=\"fw\",\n", - " vect_field=\"col_normed_tfidf\",\n", - " context_vect_field=\"col_normed_tfidf\",\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "ec_fw = ExpectedContextModelTransformer(context_field='next_id', output_prefix='fw', \n", + " vect_field='col_normed_tfidf', context_vect_field='col_normed_tfidf', \n", + " n_svd_dims=15, n_clusters=2,\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -329,11 +317,8 @@ }, "outputs": [], "source": [ - "ec_fw.fit(\n", - " sw_corpus,\n", - " selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5,\n", - " context_selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5,\n", - ")" + "ec_fw.fit(sw_corpus, selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5, \n", + " context_selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>= 5)" ] }, { @@ -353,22 +338,13 @@ }, "outputs": [], "source": [ - "ec_bk = ExpectedContextModelTransformer(\n", - " context_field=\"reply_to\",\n", - " output_prefix=\"bk\",\n", - " vect_field=\"col_normed_tfidf\",\n", - " context_vect_field=\"col_normed_tfidf\",\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - " model=ec_fw,\n", - ")\n", - "ec_bk.fit(\n", - " sw_corpus,\n", - " selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5,\n", - " context_selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5,\n", - ")" + "ec_bk = ExpectedContextModelTransformer(context_field='reply_to', output_prefix='bk', \n", + " vect_field='col_normed_tfidf', context_vect_field='col_normed_tfidf', \n", + " n_svd_dims=15, n_clusters=2,\n", + " random_state=1000, cluster_random_state=1000,\n", + " model=ec_fw)\n", + "ec_bk.fit(sw_corpus, selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5, \n", + " context_selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>= 5)" ] }, { @@ -689,8 +665,8 @@ }, "outputs": [], "source": [ - "ec_fw.set_cluster_names([\"commentary\", \"personal\"])\n", - "ec_bk.set_cluster_names([\"personal\", \"commentary\"])" + "ec_fw.set_cluster_names(['commentary','personal'])\n", + "ec_bk.set_cluster_names(['personal', 'commentary'])" ] }, { @@ -781,13 +757,9 @@ }, "outputs": [], "source": [ - "term_df = pd.DataFrame(\n", - " {\n", - " \"index\": ec_fw.get_terms(),\n", - " \"fw_range\": ec_fw.get_term_ranges(),\n", - " \"bk_range\": ec_bk.get_term_ranges(),\n", - " }\n", - ").set_index(\"index\")" + "term_df = pd.DataFrame({'index': ec_fw.get_terms(),\n", + " 'fw_range': ec_fw.get_term_ranges(),\n", + " 'bk_range': ec_bk.get_term_ranges()}).set_index('index')" ] }, { @@ -893,8 +865,10 @@ }, "outputs": [], "source": [ - "term_df[\"orn\"] = term_df.bk_range - term_df.fw_range\n", - "term_df[\"shift\"] = paired_distances(ec_fw.ec_model.term_reprs, ec_bk.ec_model.term_reprs)" + "term_df['orn'] = term_df.bk_range - term_df.fw_range\n", + "term_df['shift'] = paired_distances(\n", + " ec_fw.ec_model.term_reprs, ec_bk.ec_model.term_reprs\n", + " )" ] }, { @@ -1297,15 +1271,15 @@ } ], "source": [ - "k = 10\n", - "print(\"low orientation\")\n", - "display(term_df.sort_values(\"orn\").head(k)[[\"orn\"]])\n", - "print(\"high orientation\")\n", - "display(term_df.sort_values(\"orn\").tail(k)[[\"orn\"]])\n", - "print(\"\\nlow shift\")\n", - "display(term_df.sort_values(\"shift\").head(k)[[\"shift\"]])\n", - "print(\"high shift\")\n", - "display(term_df.sort_values(\"shift\").tail(k)[[\"shift\"]])" + "k=10\n", + "print('low orientation')\n", + "display(term_df.sort_values('orn').head(k)[['orn']])\n", + "print('high orientation')\n", + "display(term_df.sort_values('orn').tail(k)[['orn']])\n", + "print('\\nlow shift')\n", + "display(term_df.sort_values('shift').head(k)[['shift']])\n", + "print('high shift')\n", + "display(term_df.sort_values('shift').tail(k)[['shift']])" ] }, { @@ -1334,8 +1308,8 @@ }, "outputs": [], "source": [ - "_ = ec_fw.transform(sw_corpus, selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5)\n", - "_ = ec_bk.transform(sw_corpus, selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5)" + "_ = ec_fw.transform(sw_corpus, selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5)\n", + "_ = ec_bk.transform(sw_corpus, selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5)" ] }, { @@ -1390,8 +1364,8 @@ ], "source": [ "eg_ut = sw_corpus.get_utterance(utt_eg_id)\n", - "print(\"Forwards range:\", eg_ut.meta[\"fw_range\"])\n", - "print(\"Backwards range:\", eg_ut.meta[\"bk_range\"])" + "print('Forwards range:', eg_ut.meta['fw_range'])\n", + "print('Backwards range:', eg_ut.meta['bk_range'])" ] }, { @@ -1416,8 +1390,8 @@ } ], "source": [ - "print(\"Forwards cluster:\", eg_ut.meta[\"fw_clustering.cluster\"])\n", - "print(\"Backwards cluster:\", eg_ut.meta[\"bk_clustering.cluster\"])" + "print('Forwards cluster:', eg_ut.meta['fw_clustering.cluster'])\n", + "print('Backwards cluster:', eg_ut.meta['bk_clustering.cluster'])" ] }, { @@ -1435,10 +1409,8 @@ }, "outputs": [], "source": [ - "for ut in sw_corpus.iter_utterances(\n", - " selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5\n", - "):\n", - " ut.meta[\"orn\"] = ut.meta[\"bk_range\"] - ut.meta[\"fw_range\"]" + "for ut in sw_corpus.iter_utterances(selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5):\n", + " ut.meta['orn'] = ut.meta['bk_range'] - ut.meta['fw_range']" ] }, { @@ -1456,9 +1428,9 @@ }, "outputs": [], "source": [ - "utt_shifts = paired_distances(sw_corpus.get_vectors(\"fw_repr\"), sw_corpus.get_vectors(\"bk_repr\"))\n", - "for id, shift in zip(sw_corpus.get_vector_matrix(\"fw_repr\").ids, utt_shifts):\n", - " sw_corpus.get_utterance(id).meta[\"shift\"] = shift" + "utt_shifts = paired_distances(sw_corpus.get_vectors('fw_repr'), sw_corpus.get_vectors('bk_repr'))\n", + "for id, shift in zip(sw_corpus.get_vector_matrix('fw_repr').ids, utt_shifts):\n", + " sw_corpus.get_utterance(id).meta['shift'] = shift" ] }, { @@ -1476,8 +1448,8 @@ } ], "source": [ - "print(\"shift:\", eg_ut.meta[\"shift\"])\n", - "print(\"orientation:\", eg_ut.meta[\"orn\"])" + "print('shift:', eg_ut.meta['shift'])\n", + "print('orientation:', eg_ut.meta['orn'])" ] }, { @@ -1522,10 +1494,10 @@ }, "outputs": [], "source": [ - "df = sw_corpus.get_attribute_table(\n", - " \"utterance\", [\"bk_clustering.cluster\", \"fw_clustering.cluster\", \"orn\", \"shift\", \"tags\"]\n", - ")\n", - "df = df[df[\"bk_clustering.cluster\"].notnull()]" + "df = sw_corpus.get_attribute_table('utterance',\n", + " ['bk_clustering.cluster', 'fw_clustering.cluster',\n", + " 'orn', 'shift', 'tags'])\n", + "df = df[df['bk_clustering.cluster'].notnull()]" ] }, { @@ -1543,9 +1515,9 @@ }, "outputs": [], "source": [ - "tag_subset = [\"aa\", \"b\", \"ba\", \"h\", \"ny\", \"qw\", \"qy\", \"sd\", \"sv\"]\n", + "tag_subset = ['aa', 'b', 'ba', 'h', 'ny', 'qw', 'qy', 'sd', 'sv'] \n", "for tag in tag_subset:\n", - " df[\"has_\" + tag] = df.tags.apply(lambda x: tag in x.split())" + " df['has_' + tag] = df.tags.apply(lambda x: tag in x.split())" ] }, { @@ -1574,9 +1546,7 @@ " val_false = sum((col == val) & ~bool_col)\n", " nval_true = sum((col != val) & bool_col)\n", " nval_false = sum((col != val) & ~bool_col)\n", - " log_odds_entries.append(\n", - " {\"val\": val, \"log_odds\": np.log((val_true / val_false) / (nval_true / nval_false))}\n", - " )\n", + " log_odds_entries.append({'val': val, 'log_odds': np.log((val_true/val_false)/(nval_true/nval_false))})\n", " return log_odds_entries" ] }, @@ -1590,10 +1560,10 @@ "source": [ "bk_log_odds = []\n", "for tag in tag_subset:\n", - " entry = compute_log_odds(df[\"bk_clustering.cluster\"], df[\"has_\" + tag], [\"commentary\"])[0]\n", - " entry[\"tag\"] = tag\n", + " entry = compute_log_odds(df['bk_clustering.cluster'],df['has_' + tag], ['commentary'])[0]\n", + " entry['tag'] = tag\n", " bk_log_odds.append(entry)\n", - "bk_log_odds_df = pd.DataFrame(bk_log_odds).set_index(\"tag\").sort_values(\"log_odds\")[[\"log_odds\"]]" + "bk_log_odds_df = pd.DataFrame(bk_log_odds).set_index('tag').sort_values('log_odds')[['log_odds']]" ] }, { @@ -1606,10 +1576,10 @@ "source": [ "fw_log_odds = []\n", "for tag in tag_subset:\n", - " entry = compute_log_odds(df[\"fw_clustering.cluster\"], df[\"has_\" + tag], [\"commentary\"])[0]\n", - " entry[\"tag\"] = tag\n", + " entry = compute_log_odds(df['fw_clustering.cluster'],df['has_' + tag], ['commentary'])[0]\n", + " entry['tag'] = tag\n", " fw_log_odds.append(entry)\n", - "fw_log_odds_df = pd.DataFrame(fw_log_odds).set_index(\"tag\").sort_values(\"log_odds\")[[\"log_odds\"]]" + "fw_log_odds_df = pd.DataFrame(fw_log_odds).set_index('tag').sort_values('log_odds')[['log_odds']]" ] }, { @@ -1754,10 +1724,10 @@ } ], "source": [ - "print(\"forwards types vs labels\")\n", + "print('forwards types vs labels')\n", "display(fw_log_odds_df.T)\n", - "print(\"--------------------------\")\n", - "print(\"backwards types vs labels\")\n", + "print('--------------------------')\n", + "print('backwards types vs labels')\n", "display(bk_log_odds_df.T)" ] }, @@ -1809,17 +1779,14 @@ " s = np.sqrt(((n1 - 1) * s1 + (n2 - 1) * s2) / (n1 + n2 - 2))\n", " u1, u2 = np.mean(d1), np.mean(d2)\n", " return (u1 - u2) / s\n", - "\n", - "\n", "def get_pstars(p):\n", - " if p < 0.001:\n", - " return \"***\"\n", + " if p < 0.001:\n", + " return '***'\n", " elif p < 0.01:\n", - " return \"**\"\n", + " return '**'\n", " elif p < 0.05:\n", - " return \"*\"\n", - " else:\n", - " return \"\"" + " return '*'\n", + " else: return ''" ] }, { @@ -1830,16 +1797,17 @@ }, "outputs": [], "source": [ - "stat_col = \"orn\"\n", + "stat_col = 'orn'\n", "entries = []\n", "for tag in tag_subset:\n", - " has = df[df[\"has_\" + tag]][stat_col]\n", - " hasnt = df[~df[\"has_\" + tag]][stat_col]\n", - " entry = {\"tag\": tag, \"pval\": stats.mannwhitneyu(has, hasnt)[1], \"cd\": cohend(has, hasnt)}\n", - " entry[\"ps\"] = get_pstars(entry[\"pval\"] * len(tag_subset))\n", + " has = df[df['has_' + tag]][stat_col]\n", + " hasnt = df[~df['has_' + tag]][stat_col]\n", + " entry = {'tag': tag, 'pval': stats.mannwhitneyu(has, hasnt)[1],\n", + " 'cd': cohend(has, hasnt)}\n", + " entry['ps'] = get_pstars(entry['pval'] * len(tag_subset))\n", " entries.append(entry)\n", - "orn_stat_df = pd.DataFrame(entries).set_index(\"tag\").sort_values(\"cd\")\n", - "orn_stat_df = orn_stat_df[np.abs(orn_stat_df.cd) >= 0.1]" + "orn_stat_df = pd.DataFrame(entries).set_index('tag').sort_values('cd')\n", + "orn_stat_df = orn_stat_df[np.abs(orn_stat_df.cd) >= .1]" ] }, { @@ -1850,16 +1818,17 @@ }, "outputs": [], "source": [ - "stat_col = \"shift\"\n", + "stat_col = 'shift'\n", "entries = []\n", "for tag in tag_subset:\n", - " has = df[df[\"has_\" + tag]][stat_col]\n", - " hasnt = df[~df[\"has_\" + tag]][stat_col]\n", - " entry = {\"tag\": tag, \"pval\": stats.mannwhitneyu(has, hasnt)[1], \"cd\": cohend(has, hasnt)}\n", - " entry[\"ps\"] = get_pstars(entry[\"pval\"] * len(tag_subset))\n", + " has = df[df['has_' + tag]][stat_col]\n", + " hasnt = df[~df['has_' + tag]][stat_col]\n", + " entry = {'tag': tag, 'pval': stats.mannwhitneyu(has, hasnt)[1],\n", + " 'cd': cohend(has, hasnt)}\n", + " entry['ps'] = get_pstars(entry['pval'] * len(tag_subset))\n", " entries.append(entry)\n", - "shift_stat_df = pd.DataFrame(entries).set_index(\"tag\").sort_values(\"cd\")\n", - "shift_stat_df = shift_stat_df[np.abs(shift_stat_df.cd) >= 0.1]" + "shift_stat_df = pd.DataFrame(entries).set_index('tag').sort_values('cd')\n", + "shift_stat_df = shift_stat_df[np.abs(shift_stat_df.cd) >= .1]" ] }, { @@ -2030,10 +1999,10 @@ } ], "source": [ - "print(\"orientation vs labels\")\n", + "print('orientation vs labels')\n", "display(orn_stat_df.T)\n", - "print(\"--------------------------\")\n", - "print(\"shift vs labels\")\n", + "print('--------------------------')\n", + "print('shift vs labels')\n", "display(shift_stat_df.T)" ] }, @@ -2075,7 +2044,7 @@ }, "outputs": [], "source": [ - "FW_MODEL_PATH = os.path.join(SW_CORPUS_PATH, \"fw\")" + "FW_MODEL_PATH = os.path.join(SW_CORPUS_PATH, 'fw')" ] }, { @@ -2133,16 +2102,9 @@ }, "outputs": [], "source": [ - "ec_fw_new = ExpectedContextModelTransformer(\n", - " \"next_id\",\n", - " \"fw_new\",\n", - " \"col_normed_tfidf\",\n", - " \"col_normed_tfidf\",\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "ec_fw_new = ExpectedContextModelTransformer('next_id', 'fw_new', 'col_normed_tfidf', 'col_normed_tfidf', \n", + " n_svd_dims=15, n_clusters=2,\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -2171,9 +2133,7 @@ }, "outputs": [], "source": [ - "_ = ec_fw_new.transform(\n", - " sw_corpus, selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5\n", - ")" + "_ = ec_fw_new.transform(sw_corpus, selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5)" ] }, { @@ -2193,7 +2153,7 @@ } ], "source": [ - "np.allclose(sw_corpus.get_vectors(\"fw_repr\"), sw_corpus.get_vectors(\"fw_new_repr\"))" + "np.allclose(sw_corpus.get_vectors('fw_repr'), sw_corpus.get_vectors('fw_new_repr'))" ] }, { @@ -2238,7 +2198,7 @@ "source": [ "# see `demo_text_pipelines.py` in this demo's directory for details\n", "# in short, this pipeline will either output the `alpha_text` metadata field\n", - "# of an utterance, or write the utterance's `text` attribute into the `alpha_text`\n", + "# of an utterance, or write the utterance's `text` attribute into the `alpha_text` \n", "# metadata field\n", "from demo_text_pipelines import switchboard_text_pipeline" ] @@ -2264,19 +2224,13 @@ }, "outputs": [], "source": [ - "fw_pipe = ExpectedContextModelPipeline(\n", - " context_field=\"next_id\",\n", - " output_prefix=\"fw\",\n", - " text_field=\"alpha_text\",\n", - " text_pipe=switchboard_text_pipeline(),\n", - " tfidf_params={\"binary\": True, \"vocabulary\": vocab},\n", - " min_terms=5,\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " cluster_on=\"utts\",\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "fw_pipe = ExpectedContextModelPipeline(context_field='next_id', output_prefix='fw',\n", + " text_field='alpha_text',\n", + " text_pipe=switchboard_text_pipeline(), \n", + " tfidf_params={'binary': True, 'vocabulary': vocab}, \n", + " min_terms=5,\n", + " n_svd_dims=15, n_clusters=2, cluster_on='utts',\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -2307,20 +2261,14 @@ }, "outputs": [], "source": [ - "bk_pipe = ExpectedContextModelPipeline(\n", - " context_field=\"reply_to\",\n", - " output_prefix=\"bk\",\n", - " text_field=\"alpha_text\",\n", - " text_pipe=switchboard_text_pipeline(),\n", - " tfidf_params={\"binary\": True, \"vocabulary\": vocab},\n", - " min_terms=5,\n", - " ec_model=fw_pipe,\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " cluster_on=\"utts\",\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "bk_pipe = ExpectedContextModelPipeline(context_field='reply_to', output_prefix='bk',\n", + " text_field='alpha_text',\n", + " text_pipe=switchboard_text_pipeline(), \n", + " tfidf_params={'binary': True, 'vocabulary': vocab}, \n", + " min_terms=5,\n", + " ec_model=fw_pipe,\n", + " n_svd_dims=15, n_clusters=2, cluster_on='utts',\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -2349,8 +2297,8 @@ }, "outputs": [], "source": [ - "fw_pipe.set_cluster_names([\"commentary\", \"personal\"])\n", - "bk_pipe.set_cluster_names([\"personal\", \"commentary\"])" + "fw_pipe.set_cluster_names(['commentary','personal'])\n", + "bk_pipe.set_cluster_names(['personal', 'commentary'])" ] }, { @@ -2368,7 +2316,7 @@ }, "outputs": [], "source": [ - "eg_ut_new = fw_pipe.transform_utterance(\"How old were you when you left ?\")\n", + "eg_ut_new = fw_pipe.transform_utterance('How old were you when you left ?')\n", "eg_ut_new = bk_pipe.transform_utterance(eg_ut_new)" ] }, @@ -2409,7 +2357,7 @@ } ], "source": [ - "eg_ut_new.meta[\"fw_repr\"]" + "eg_ut_new.meta['fw_repr']" ] }, { @@ -2430,10 +2378,10 @@ ], "source": [ "# note these attributes have the exact same values as those of eg_ut, computed above\n", - "print(\"Forwards range:\", eg_ut_new.meta[\"fw_range\"])\n", - "print(\"Backwards range:\", eg_ut_new.meta[\"bk_range\"])\n", - "print(\"Forwards cluster:\", eg_ut_new.meta[\"fw_clustering.cluster\"])\n", - "print(\"Backwards cluster:\", eg_ut_new.meta[\"bk_clustering.cluster\"])" + "print('Forwards range:', eg_ut_new.meta['fw_range'])\n", + "print('Backwards range:', eg_ut_new.meta['bk_range'])\n", + "print('Forwards cluster:', eg_ut_new.meta['fw_clustering.cluster'])\n", + "print('Backwards cluster:', eg_ut_new.meta['bk_clustering.cluster'])" ] } ], diff --git a/convokit/expected_context_framework/demos/switchboard_exploration_dual_demo.ipynb b/convokit/expected_context_framework/demos/switchboard_exploration_dual_demo.ipynb index cd7da4c2..7e7ed112 100644 --- a/convokit/expected_context_framework/demos/switchboard_exploration_dual_demo.ipynb +++ b/convokit/expected_context_framework/demos/switchboard_exploration_dual_demo.ipynb @@ -23,8 +23,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -81,7 +80,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# SW_CORPUS_PATH = download('switchboard-processed-corpus', data_dir=DATA_DIR)\n", @@ -129,7 +128,7 @@ }, "outputs": [], "source": [ - "utt_eg_id = \"3496-79\"" + "utt_eg_id = '3496-79'" ] }, { @@ -156,7 +155,7 @@ } ], "source": [ - "sw_corpus.get_utterance(utt_eg_id).meta[\"alpha_text\"]" + "sw_corpus.get_utterance(utt_eg_id).meta['alpha_text']" ] }, { @@ -196,19 +195,19 @@ "source": [ "topic_counts = defaultdict(set)\n", "for ut in sw_corpus.iter_utterances():\n", - " topic = sw_corpus.get_conversation(ut.conversation_id).meta[\"topic\"]\n", - " for x in set(ut.meta[\"alpha_text\"].lower().split()):\n", + " topic = sw_corpus.get_conversation(ut.conversation_id).meta['topic']\n", + " for x in set(ut.meta['alpha_text'].lower().split()):\n", " topic_counts[x].add(topic)\n", "topic_counts = {x: len(y) for x, y in topic_counts.items()}\n", "\n", "word_convo_counts = defaultdict(set)\n", "for ut in sw_corpus.iter_utterances():\n", - " for x in set(ut.meta[\"alpha_text\"].lower().split()):\n", + " for x in set(ut.meta['alpha_text'].lower().split()):\n", " word_convo_counts[x].add(ut.conversation_id)\n", - "word_convo_counts = {x: len(y) for x, y in word_convo_counts.items()}\n", + "word_convo_counts = {x: len(y) for x, y in word_convo_counts.items()}\n", "\n", - "min_topic_words = set(x for x, y in topic_counts.items() if y >= 33)\n", - "min_convo_words = set(x for x, y in word_convo_counts.items() if y >= 200)\n", + "min_topic_words = set(x for x,y in topic_counts.items() if y >= 33)\n", + "min_convo_words = set(x for x,y in word_convo_counts.items() if y >= 200)\n", "vocab = sorted(min_topic_words.intersection(min_convo_words))" ] }, @@ -269,9 +268,7 @@ }, "outputs": [], "source": [ - "tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"alpha_text\", output_field=\"col_normed_tfidf\", binary=True, vocabulary=vocab\n", - ")\n", + "tfidf_obj = ColNormedTfidfTransformer(input_field='alpha_text', output_field='col_normed_tfidf', binary=True, vocabulary=vocab)\n", "_ = tfidf_obj.fit(sw_corpus)\n", "_ = tfidf_obj.transform(sw_corpus)" ] @@ -299,16 +296,10 @@ }, "outputs": [], "source": [ - "dual_context_model = DualContextWrapper(\n", - " context_fields=[\"reply_to\", \"next_id\"],\n", - " output_prefixes=[\"bk\", \"fw\"],\n", - " vect_field=\"col_normed_tfidf\",\n", - " context_vect_field=\"col_normed_tfidf\",\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "dual_context_model = DualContextWrapper(context_fields=['reply_to','next_id'], output_prefixes=['bk','fw'],\n", + " vect_field='col_normed_tfidf', context_vect_field='col_normed_tfidf', \n", + " n_svd_dims=15, n_clusters=2,\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -326,11 +317,8 @@ }, "outputs": [], "source": [ - "dual_context_model.fit(\n", - " sw_corpus,\n", - " selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5,\n", - " context_selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5,\n", - ")" + "dual_context_model.fit(sw_corpus,selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5, \n", + " context_selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>= 5)" ] }, { @@ -637,8 +625,8 @@ }, "outputs": [], "source": [ - "dual_context_model.ec_models[0].set_cluster_names([\"personal\", \"commentary\"])\n", - "dual_context_model.ec_models[1].set_cluster_names([\"commentary\", \"personal\"])" + "dual_context_model.ec_models[0].set_cluster_names(['personal', 'commentary'])\n", + "dual_context_model.ec_models[1].set_cluster_names(['commentary', 'personal'])" ] }, { @@ -1171,15 +1159,15 @@ } ], "source": [ - "k = 10\n", - "print(\"low orientation\")\n", - "display(term_df.sort_values(\"orn\").head(k)[[\"orn\"]])\n", - "print(\"high orientation\")\n", - "display(term_df.sort_values(\"orn\").tail(k)[[\"orn\"]])\n", - "print(\"\\nlow shift\")\n", - "display(term_df.sort_values(\"shift\").head(k)[[\"shift\"]])\n", - "print(\"high shift\")\n", - "display(term_df.sort_values(\"shift\").tail(k)[[\"shift\"]])" + "k=10\n", + "print('low orientation')\n", + "display(term_df.sort_values('orn').head(k)[['orn']])\n", + "print('high orientation')\n", + "display(term_df.sort_values('orn').tail(k)[['orn']])\n", + "print('\\nlow shift')\n", + "display(term_df.sort_values('shift').head(k)[['shift']])\n", + "print('high shift')\n", + "display(term_df.sort_values('shift').tail(k)[['shift']])" ] }, { @@ -1208,9 +1196,7 @@ }, "outputs": [], "source": [ - "_ = dual_context_model.transform(\n", - " sw_corpus, selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5\n", - ")" + "_ = dual_context_model.transform(sw_corpus, selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5)" ] }, { @@ -1265,8 +1251,8 @@ ], "source": [ "eg_ut = sw_corpus.get_utterance(utt_eg_id)\n", - "print(\"Forwards range:\", eg_ut.meta[\"fw_range\"])\n", - "print(\"Backwards range:\", eg_ut.meta[\"bk_range\"])" + "print('Forwards range:', eg_ut.meta['fw_range'])\n", + "print('Backwards range:', eg_ut.meta['bk_range'])" ] }, { @@ -1291,8 +1277,8 @@ } ], "source": [ - "print(\"Forwards cluster:\", eg_ut.meta[\"fw_clustering.cluster\"])\n", - "print(\"Backwards cluster:\", eg_ut.meta[\"bk_clustering.cluster\"])" + "print('Forwards cluster:', eg_ut.meta['fw_clustering.cluster'])\n", + "print('Backwards cluster:', eg_ut.meta['bk_clustering.cluster'])" ] }, { @@ -1317,8 +1303,8 @@ } ], "source": [ - "print(\"shift:\", eg_ut.meta[\"shift\"])\n", - "print(\"orientation:\", eg_ut.meta[\"orn\"])" + "print('shift:', eg_ut.meta['shift'])\n", + "print('orientation:', eg_ut.meta['orn'])" ] }, { @@ -1349,10 +1335,10 @@ }, "outputs": [], "source": [ - "df = sw_corpus.get_attribute_table(\n", - " \"utterance\", [\"bk_clustering.cluster\", \"fw_clustering.cluster\", \"orn\", \"shift\", \"tags\"]\n", - ")\n", - "df = df[df[\"bk_clustering.cluster\"].notnull()]" + "df = sw_corpus.get_attribute_table('utterance',\n", + " ['bk_clustering.cluster', 'fw_clustering.cluster',\n", + " 'orn', 'shift', 'tags'])\n", + "df = df[df['bk_clustering.cluster'].notnull()]" ] }, { @@ -1370,9 +1356,9 @@ }, "outputs": [], "source": [ - "tag_subset = [\"aa\", \"b\", \"ba\", \"h\", \"ny\", \"qw\", \"qy\", \"sd\", \"sv\"]\n", + "tag_subset = ['aa', 'b', 'ba', 'h', 'ny', 'qw', 'qy', 'sd', 'sv'] \n", "for tag in tag_subset:\n", - " df[\"has_\" + tag] = df.tags.apply(lambda x: tag in x.split())" + " df['has_' + tag] = df.tags.apply(lambda x: tag in x.split())" ] }, { @@ -1401,9 +1387,7 @@ " val_false = sum((col == val) & ~bool_col)\n", " nval_true = sum((col != val) & bool_col)\n", " nval_false = sum((col != val) & ~bool_col)\n", - " log_odds_entries.append(\n", - " {\"val\": val, \"log_odds\": np.log((val_true / val_false) / (nval_true / nval_false))}\n", - " )\n", + " log_odds_entries.append({'val': val, 'log_odds': np.log((val_true/val_false)/(nval_true/nval_false))})\n", " return log_odds_entries" ] }, @@ -1417,10 +1401,10 @@ "source": [ "bk_log_odds = []\n", "for tag in tag_subset:\n", - " entry = compute_log_odds(df[\"bk_clustering.cluster\"], df[\"has_\" + tag], [\"commentary\"])[0]\n", - " entry[\"tag\"] = tag\n", + " entry = compute_log_odds(df['bk_clustering.cluster'],df['has_' + tag], ['commentary'])[0]\n", + " entry['tag'] = tag\n", " bk_log_odds.append(entry)\n", - "bk_log_odds_df = pd.DataFrame(bk_log_odds).set_index(\"tag\").sort_values(\"log_odds\")[[\"log_odds\"]]" + "bk_log_odds_df = pd.DataFrame(bk_log_odds).set_index('tag').sort_values('log_odds')[['log_odds']]" ] }, { @@ -1433,10 +1417,10 @@ "source": [ "fw_log_odds = []\n", "for tag in tag_subset:\n", - " entry = compute_log_odds(df[\"fw_clustering.cluster\"], df[\"has_\" + tag], [\"commentary\"])[0]\n", - " entry[\"tag\"] = tag\n", + " entry = compute_log_odds(df['fw_clustering.cluster'],df['has_' + tag], ['commentary'])[0]\n", + " entry['tag'] = tag\n", " fw_log_odds.append(entry)\n", - "fw_log_odds_df = pd.DataFrame(fw_log_odds).set_index(\"tag\").sort_values(\"log_odds\")[[\"log_odds\"]]" + "fw_log_odds_df = pd.DataFrame(fw_log_odds).set_index('tag').sort_values('log_odds')[['log_odds']]" ] }, { @@ -1581,10 +1565,10 @@ } ], "source": [ - "print(\"forwards types vs labels\")\n", + "print('forwards types vs labels')\n", "display(fw_log_odds_df.T)\n", - "print(\"--------------------------\")\n", - "print(\"backwards types vs labels\")\n", + "print('--------------------------')\n", + "print('backwards types vs labels')\n", "display(bk_log_odds_df.T)" ] }, @@ -1636,17 +1620,14 @@ " s = np.sqrt(((n1 - 1) * s1 + (n2 - 1) * s2) / (n1 + n2 - 2))\n", " u1, u2 = np.mean(d1), np.mean(d2)\n", " return (u1 - u2) / s\n", - "\n", - "\n", "def get_pstars(p):\n", - " if p < 0.001:\n", - " return \"***\"\n", + " if p < 0.001:\n", + " return '***'\n", " elif p < 0.01:\n", - " return \"**\"\n", + " return '**'\n", " elif p < 0.05:\n", - " return \"*\"\n", - " else:\n", - " return \"\"" + " return '*'\n", + " else: return ''" ] }, { @@ -1657,16 +1638,17 @@ }, "outputs": [], "source": [ - "stat_col = \"orn\"\n", + "stat_col = 'orn'\n", "entries = []\n", "for tag in tag_subset:\n", - " has = df[df[\"has_\" + tag]][stat_col]\n", - " hasnt = df[~df[\"has_\" + tag]][stat_col]\n", - " entry = {\"tag\": tag, \"pval\": stats.mannwhitneyu(has, hasnt)[1], \"cd\": cohend(has, hasnt)}\n", - " entry[\"ps\"] = get_pstars(entry[\"pval\"] * len(tag_subset))\n", + " has = df[df['has_' + tag]][stat_col]\n", + " hasnt = df[~df['has_' + tag]][stat_col]\n", + " entry = {'tag': tag, 'pval': stats.mannwhitneyu(has, hasnt)[1],\n", + " 'cd': cohend(has, hasnt)}\n", + " entry['ps'] = get_pstars(entry['pval'] * len(tag_subset))\n", " entries.append(entry)\n", - "orn_stat_df = pd.DataFrame(entries).set_index(\"tag\").sort_values(\"cd\")\n", - "orn_stat_df = orn_stat_df[np.abs(orn_stat_df.cd) >= 0.1]" + "orn_stat_df = pd.DataFrame(entries).set_index('tag').sort_values('cd')\n", + "orn_stat_df = orn_stat_df[np.abs(orn_stat_df.cd) >= .1]" ] }, { @@ -1677,16 +1659,17 @@ }, "outputs": [], "source": [ - "stat_col = \"shift\"\n", + "stat_col = 'shift'\n", "entries = []\n", "for tag in tag_subset:\n", - " has = df[df[\"has_\" + tag]][stat_col]\n", - " hasnt = df[~df[\"has_\" + tag]][stat_col]\n", - " entry = {\"tag\": tag, \"pval\": stats.mannwhitneyu(has, hasnt)[1], \"cd\": cohend(has, hasnt)}\n", - " entry[\"ps\"] = get_pstars(entry[\"pval\"] * len(tag_subset))\n", + " has = df[df['has_' + tag]][stat_col]\n", + " hasnt = df[~df['has_' + tag]][stat_col]\n", + " entry = {'tag': tag, 'pval': stats.mannwhitneyu(has, hasnt)[1],\n", + " 'cd': cohend(has, hasnt)}\n", + " entry['ps'] = get_pstars(entry['pval'] * len(tag_subset))\n", " entries.append(entry)\n", - "shift_stat_df = pd.DataFrame(entries).set_index(\"tag\").sort_values(\"cd\")\n", - "shift_stat_df = shift_stat_df[np.abs(shift_stat_df.cd) >= 0.1]" + "shift_stat_df = pd.DataFrame(entries).set_index('tag').sort_values('cd')\n", + "shift_stat_df = shift_stat_df[np.abs(shift_stat_df.cd) >= .1]" ] }, { @@ -1857,10 +1840,10 @@ } ], "source": [ - "print(\"orientation vs labels\")\n", + "print('orientation vs labels')\n", "display(orn_stat_df.T)\n", - "print(\"--------------------------\")\n", - "print(\"shift vs labels\")\n", + "print('--------------------------')\n", + "print('shift vs labels')\n", "display(shift_stat_df.T)" ] }, @@ -1906,7 +1889,7 @@ }, "outputs": [], "source": [ - "DUAL_MODEL_PATH = os.path.join(SW_CORPUS_PATH, \"dual_model\")" + "DUAL_MODEL_PATH = os.path.join(SW_CORPUS_PATH, 'dual_model')" ] }, { @@ -1959,17 +1942,11 @@ }, "outputs": [], "source": [ - "dual_model_new = DualContextWrapper(\n", - " context_fields=[\"reply_to\", \"next_id\"],\n", - " output_prefixes=[\"bk_new\", \"fw_new\"],\n", - " vect_field=\"col_normed_tfidf\",\n", - " context_vect_field=\"col_normed_tfidf\",\n", - " wrapper_output_prefix=\"new\",\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "dual_model_new = DualContextWrapper(context_fields=['reply_to','next_id'], output_prefixes=['bk_new','fw_new'],\n", + " vect_field='col_normed_tfidf', context_vect_field='col_normed_tfidf', \n", + " wrapper_output_prefix='new',\n", + " n_svd_dims=15, n_clusters=2,\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -1980,7 +1957,7 @@ }, "outputs": [], "source": [ - "dual_model_new.load(DUAL_MODEL_PATH, model_dirs=[\"bk\", \"fw\"])" + "dual_model_new.load(DUAL_MODEL_PATH, model_dirs=['bk','fw'])" ] }, { @@ -1998,9 +1975,7 @@ }, "outputs": [], "source": [ - "_ = dual_model_new.transform(\n", - " sw_corpus, selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5\n", - ")" + "_ = dual_model_new.transform(sw_corpus, selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5)" ] }, { @@ -2040,7 +2015,7 @@ } ], "source": [ - "np.allclose(sw_corpus.get_vectors(\"bk_new_repr\"), sw_corpus.get_vectors(\"bk_repr\"))" + "np.allclose(sw_corpus.get_vectors('bk_new_repr'), sw_corpus.get_vectors('bk_repr'))" ] }, { @@ -2060,7 +2035,7 @@ } ], "source": [ - "np.allclose(sw_corpus.get_vectors(\"fw_new_repr\"), sw_corpus.get_vectors(\"fw_repr\"))" + "np.allclose(sw_corpus.get_vectors('fw_new_repr'), sw_corpus.get_vectors('fw_repr'))" ] }, { @@ -2071,11 +2046,9 @@ }, "outputs": [], "source": [ - "for ut in sw_corpus.iter_utterances(\n", - " selector=lambda x: x.meta.get(\"col_normed_tfidf__n_feats\", 0) >= 5\n", - "):\n", - " assert ut.meta[\"orn\"] == ut.meta[\"new_orn\"]\n", - " assert ut.meta[\"shift\"] == ut.meta[\"new_shift\"]" + "for ut in sw_corpus.iter_utterances(selector=lambda x: x.meta.get('col_normed_tfidf__n_feats',0)>=5):\n", + " assert ut.meta['orn'] == ut.meta['new_orn']\n", + " assert ut.meta['shift'] == ut.meta['new_shift']" ] }, { @@ -2120,7 +2093,7 @@ "source": [ "# see `demo_text_pipelines.py` in this demo's directory for details\n", "# in short, this pipeline will either output the `alpha_text` metadata field\n", - "# of an utterance, or write the utterance's `text` attribute into the `alpha_text`\n", + "# of an utterance, or write the utterance's `text` attribute into the `alpha_text` \n", "# metadata field\n", "from demo_text_pipelines import switchboard_text_pipeline" ] @@ -2146,18 +2119,13 @@ }, "outputs": [], "source": [ - "pipe_obj = DualContextPipeline(\n", - " context_fields=[\"reply_to\", \"next_id\"],\n", - " output_prefixes=[\"bk\", \"fw\"],\n", - " text_field=\"alpha_text\",\n", - " text_pipe=switchboard_text_pipeline(),\n", - " tfidf_params={\"binary\": True, \"vocabulary\": vocab},\n", - " min_terms=5,\n", - " n_svd_dims=15,\n", - " n_clusters=2,\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "pipe_obj = DualContextPipeline(context_fields=['reply_to','next_id'], \n", + " output_prefixes=['bk','fw'],\n", + " text_field='alpha_text', text_pipe=switchboard_text_pipeline(), \n", + " tfidf_params={'binary': True, 'vocabulary': vocab}, \n", + " min_terms=5,\n", + " n_svd_dims=15, n_clusters=2,\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -2194,7 +2162,7 @@ }, "outputs": [], "source": [ - "eg_ut_new = pipe_obj.transform_utterance(\"How old were you when you left ?\")" + "eg_ut_new = pipe_obj.transform_utterance('How old were you when you left ?')" ] }, { @@ -2215,8 +2183,8 @@ ], "source": [ "# note these attributes have the exact same values as those of eg_ut, computed above\n", - "print(\"shift:\", eg_ut_new.meta[\"shift\"])\n", - "print(\"orientation:\", eg_ut_new.meta[\"orn\"])" + "print('shift:', eg_ut_new.meta['shift'])\n", + "print('orientation:', eg_ut_new.meta['orn'])" ] }, { diff --git a/convokit/expected_context_framework/demos/wiki_awry_demo.ipynb b/convokit/expected_context_framework/demos/wiki_awry_demo.ipynb index e0ffecb1..1f06b867 100644 --- a/convokit/expected_context_framework/demos/wiki_awry_demo.ipynb +++ b/convokit/expected_context_framework/demos/wiki_awry_demo.ipynb @@ -32,8 +32,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -86,7 +85,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# WIKI_CORPUS_PATH = download('wiki-corpus', data_dir=DATA_DIR)\n", @@ -141,7 +140,7 @@ }, "outputs": [], "source": [ - "wiki_corpus.load_info(\"utterance\", [\"arcs_censored\"])" + "wiki_corpus.load_info('utterance',['arcs_censored'])" ] }, { @@ -160,10 +159,8 @@ "outputs": [], "source": [ "from convokit.text_processing import TextProcessor\n", - "\n", - "join_arcs = TextProcessor(\n", - " input_field=\"arcs_censored\", output_field=\"arcs\", proc_fn=lambda sents: \"\\n\".join(sents)\n", - ")\n", + "join_arcs = TextProcessor(input_field='arcs_censored', output_field='arcs',\n", + " proc_fn=lambda sents: '\\n'.join(sents))\n", "wiki_corpus = join_arcs.transform(wiki_corpus)" ] }, @@ -183,7 +180,7 @@ "outputs": [], "source": [ "for ut in wiki_corpus.iter_utterances(selector=lambda x: x.reply_to is not None):\n", - " wiki_corpus.get_utterance(ut.reply_to).meta[\"next_id\"] = ut.id" + " wiki_corpus.get_utterance(ut.reply_to).meta['next_id'] = ut.id" ] }, { @@ -210,10 +207,7 @@ }, "outputs": [], "source": [ - "from convokit.expected_context_framework import (\n", - " ColNormedTfidfTransformer,\n", - " ExpectedContextModelTransformer,\n", - ")" + "from convokit.expected_context_framework import ColNormedTfidfTransformer, ExpectedContextModelTransformer" ] }, { @@ -235,15 +229,11 @@ }, "outputs": [], "source": [ - "first_tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"arcs\", output_field=\"first_tfidf\", binary=True, min_df=50\n", - ")\n", - "_ = first_tfidf_obj.fit(wiki_corpus, selector=lambda x: x.meta.get(\"next_id\", None) is not None)\n", + "first_tfidf_obj = ColNormedTfidfTransformer(input_field='arcs', output_field='first_tfidf', binary=True, min_df=50)\n", + "_ = first_tfidf_obj.fit(wiki_corpus, selector=lambda x: x.meta.get('next_id',None) is not None)\n", "_ = first_tfidf_obj.transform(wiki_corpus)\n", "\n", - "second_tfidf_obj = ColNormedTfidfTransformer(\n", - " input_field=\"arcs\", output_field=\"second_tfidf\", binary=True, min_df=50\n", - ")\n", + "second_tfidf_obj = ColNormedTfidfTransformer(input_field='arcs', output_field='second_tfidf', binary=True, min_df=50)\n", "_ = second_tfidf_obj.fit(wiki_corpus, selector=lambda x: x.reply_to is not None)\n", "_ = second_tfidf_obj.transform(wiki_corpus)" ] @@ -268,16 +258,10 @@ "outputs": [], "source": [ "ec_fw = ExpectedContextModelTransformer(\n", - " context_field=\"next_id\",\n", - " output_prefix=\"fw\",\n", - " vect_field=\"first_tfidf\",\n", - " context_vect_field=\"second_tfidf\",\n", - " n_svd_dims=25,\n", - " n_clusters=6,\n", - " cluster_on=\"terms\",\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + " context_field='next_id', output_prefix='fw', \n", + " vect_field='first_tfidf', context_vect_field='second_tfidf', \n", + " n_svd_dims=25, n_clusters=6, cluster_on='terms',\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -295,13 +279,10 @@ }, "outputs": [], "source": [ - "ec_fw.fit(\n", - " wiki_corpus,\n", - " selector=lambda x: (x.meta.get(\"first_tfidf__n_feats\", 0) >= 1)\n", - " and (x.meta.get(\"next_id\", None) is not None),\n", - " context_selector=lambda x: (x.meta.get(\"second_tfidf__n_feats\", 0) >= 1)\n", - " and (x.reply_to is not None),\n", - ")" + "ec_fw.fit(wiki_corpus, selector=lambda x: (x.meta.get('first_tfidf__n_feats',0)>=1)\n", + " and (x.meta.get('next_id',None) is not None), \n", + " context_selector=lambda x: (x.meta.get('second_tfidf__n_feats',0)>= 1)\n", + " and (x.reply_to is not None))" ] }, { @@ -683,7 +664,7 @@ } ], "source": [ - "ec_fw.print_clusters(k=10, corpus=wiki_corpus, max_chars=200)" + "ec_fw.print_clusters(k=10,corpus=wiki_corpus,max_chars=200)" ] }, { @@ -708,9 +689,9 @@ }, "outputs": [], "source": [ - "ec_fw.set_cluster_names(\n", - " [\"casual\", \"coordination\", \"procedures\", \"contention\", \"editing\", \"moderation\"]\n", - ")" + "ec_fw.set_cluster_names(['casual', 'coordination', \n", + " 'procedures', 'contention',\n", + " 'editing', 'moderation'])" ] }, { @@ -837,7 +818,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# AWRY_CORPUS_PATH = download('conversations-gone-awry-corpus', data_dir=DATA_DIR)\n", @@ -866,9 +847,7 @@ }, "outputs": [], "source": [ - "awry_corpus = awry_corpus.filter_conversations_by(\n", - " lambda convo: convo.meta[\"annotation_year\"] == \"2018\"\n", - ")\n", + "awry_corpus = awry_corpus.filter_conversations_by(lambda convo: convo.meta['annotation_year'] == '2018')\n", "# here we filter to consider only the conversations from the original paper" ] }, @@ -906,7 +885,7 @@ }, "outputs": [], "source": [ - "awry_corpus.load_info(\"utterance\", [\"parsed\"])" + "awry_corpus.load_info('utterance',['parsed'])" ] }, { @@ -918,7 +897,6 @@ "outputs": [], "source": [ "from demo_text_pipelines import wiki_arc_pipeline\n", - "\n", "# see `demo_text_pipelines.py` in this demo's directory for details\n", "# in short, this pipeline will compute the dependency-parse arcs we use as input features,\n", "# but will skip over utterances for which these attributes already exist\n", @@ -977,15 +955,10 @@ }, "outputs": [], "source": [ - "cluster_assign_df = awry_corpus.get_attribute_table(\"utterance\", [\"fw_clustering.cluster_id_\"])\n", + "cluster_assign_df = awry_corpus.get_attribute_table('utterance',['fw_clustering.cluster_id_'])\n", "type_assignments = np.zeros((len(cluster_assign_df), 6))\n", - "type_assignments[\n", - " np.arange(len(cluster_assign_df)),\n", - " cluster_assign_df[\"fw_clustering.cluster_id_\"].values.astype(int),\n", - "] = 1\n", - "cluster_assign_df = pd.DataFrame(\n", - " columns=np.arange(6), index=cluster_assign_df.index, data=type_assignments\n", - ")\n", + "type_assignments[np.arange(len(cluster_assign_df)),cluster_assign_df['fw_clustering.cluster_id_'].values.astype(int)] = 1\n", + "cluster_assign_df = pd.DataFrame(columns=np.arange(6), index=cluster_assign_df.index, data=type_assignments)\n", "cluster_assign_df.columns = ec_fw.get_cluster_names()" ] }, @@ -1149,9 +1122,7 @@ " convo_ids.append(comment.root)\n", " timestamps.append(comment.timestamp)\n", " page_ids.append(conversation.meta[\"page_id\"])\n", - "comment_df = pd.DataFrame(\n", - " {\"conversation_id\": convo_ids, \"timestamp\": timestamps, \"page_id\": page_ids}, index=comment_ids\n", - ")\n", + "comment_df = pd.DataFrame({\"conversation_id\": convo_ids, \"timestamp\": timestamps, \"page_id\": page_ids}, index=comment_ids)\n", "\n", "# we'll do our construction using awry conversation ID's as the reference key\n", "awry_convo_ids = set()\n", @@ -1159,21 +1130,14 @@ "good_convo_map = {}\n", "page_id_map = {}\n", "for conversation in awry_corpus.iter_conversations():\n", - " if (\n", - " conversation.meta[\"conversation_has_personal_attack\"]\n", - " and conversation.id not in awry_convo_ids\n", - " ):\n", + " if conversation.meta[\"conversation_has_personal_attack\"] and conversation.id not in awry_convo_ids:\n", " awry_convo_ids.add(conversation.id)\n", " good_convo_map[conversation.id] = conversation.meta[\"pair_id\"]\n", " page_id_map[conversation.id] = conversation.meta[\"page_id\"]\n", "awry_convo_ids = list(awry_convo_ids)\n", - "pairs_df = pd.DataFrame(\n", - " {\n", - " \"bad_conversation_id\": awry_convo_ids,\n", - " \"conversation_id\": [good_convo_map[cid] for cid in awry_convo_ids],\n", - " \"page_id\": [page_id_map[cid] for cid in awry_convo_ids],\n", - " }\n", - ")\n", + "pairs_df = pd.DataFrame({\"bad_conversation_id\": awry_convo_ids,\n", + " \"conversation_id\": [good_convo_map[cid] for cid in awry_convo_ids],\n", + " \"page_id\": [page_id_map[cid] for cid in awry_convo_ids]})\n", "# finally, we will augment the pairs dataframe with the IDs of the first and second comment for both\n", "# the bad and good conversation. This will come in handy for constructing the feature matrix.\n", "first_ids = []\n", @@ -1182,22 +1146,14 @@ "second_ids_bad = []\n", "for row in pairs_df.itertuples():\n", " # \"first two\" is defined in terms of time of posting\n", - " comments_sorted = comment_df[comment_df.conversation_id == row.conversation_id].sort_values(\n", - " by=\"timestamp\"\n", - " )\n", + " comments_sorted = comment_df[comment_df.conversation_id==row.conversation_id].sort_values(by=\"timestamp\")\n", " first_ids.append(comments_sorted.iloc[0].name)\n", " second_ids.append(comments_sorted.iloc[1].name)\n", - " comments_sorted_bad = comment_df[\n", - " comment_df.conversation_id == row.bad_conversation_id\n", - " ].sort_values(by=\"timestamp\")\n", + " comments_sorted_bad = comment_df[comment_df.conversation_id==row.bad_conversation_id].sort_values(by=\"timestamp\")\n", " first_ids_bad.append(comments_sorted_bad.iloc[0].name)\n", " second_ids_bad.append(comments_sorted_bad.iloc[1].name)\n", - "pairs_df = pairs_df.assign(\n", - " first_id=first_ids,\n", - " second_id=second_ids,\n", - " bad_first_id=first_ids_bad,\n", - " bad_second_id=second_ids_bad,\n", - ")" + "pairs_df = pairs_df.assign(first_id=first_ids, second_id=second_ids, \n", + " bad_first_id=first_ids_bad, bad_second_id=second_ids_bad)" ] }, { @@ -1208,19 +1164,11 @@ }, "outputs": [], "source": [ - "tox_first_comment_features = pairs_df[[\"bad_first_id\"]].join(\n", - " cluster_assign_df, how=\"left\", on=\"bad_first_id\"\n", - ")[cluster_assign_df.columns]\n", - "ntox_first_comment_features = pairs_df[[\"first_id\"]].join(\n", - " cluster_assign_df, how=\"left\", on=\"first_id\"\n", - ")[cluster_assign_df.columns]\n", + "tox_first_comment_features =pairs_df[['bad_first_id']].join(cluster_assign_df, how='left', on='bad_first_id')[cluster_assign_df.columns]\n", + "ntox_first_comment_features =pairs_df[['first_id']].join(cluster_assign_df, how='left', on='first_id')[cluster_assign_df.columns]\n", "\n", - "tox_second_comment_features = pairs_df[[\"bad_second_id\"]].join(\n", - " cluster_assign_df, how=\"left\", on=\"bad_second_id\"\n", - ")[cluster_assign_df.columns]\n", - "ntox_second_comment_features = pairs_df[[\"second_id\"]].join(\n", - " cluster_assign_df, how=\"left\", on=\"second_id\"\n", - ")[cluster_assign_df.columns]" + "tox_second_comment_features =pairs_df[['bad_second_id']].join(cluster_assign_df, how='left', on='bad_second_id')[cluster_assign_df.columns]\n", + "ntox_second_comment_features =pairs_df[['second_id']].join(cluster_assign_df, how='left', on='second_id')[cluster_assign_df.columns]" ] }, { @@ -1259,51 +1207,36 @@ "outputs": [], "source": [ "def get_p_stars(x):\n", - " if x < 0.001:\n", - " return \"***\"\n", - " elif x < 0.01:\n", - " return \"**\"\n", - " elif x < 0.05:\n", - " return \"*\"\n", - " else:\n", - " return \"\"\n", - "\n", - "\n", - "def compare_tox(df_ntox, df_tox, min_n=0):\n", + " if x < .001: return '***'\n", + " elif x < .01: return '**'\n", + " elif x < .05: return '*'\n", + " else: return ''\n", + "def compare_tox(df_ntox, df_tox, min_n=0):\n", " cols = df_ntox.columns\n", - " num_feats_in_tox = df_tox[cols].sum().astype(int).rename(\"num_feat_tox\")\n", - " num_nfeats_in_tox = (1 - df_tox[cols]).sum().astype(int).rename(\"num_nfeat_tox\")\n", - " num_feats_in_ntox = df_ntox[cols].sum().astype(int).rename(\"num_feat_ntox\")\n", - " num_nfeats_in_ntox = (1 - df_ntox[cols]).sum().astype(int).rename(\"num_nfeat_ntox\")\n", - " prop_tox = df_tox[cols].mean().rename(\"prop_tox\")\n", - " ref_prop_ntox = df_ntox[cols].mean().rename(\"prop_ntox\")\n", + " num_feats_in_tox = df_tox[cols].sum().astype(int).rename('num_feat_tox')\n", + " num_nfeats_in_tox = (1 - df_tox[cols]).sum().astype(int).rename('num_nfeat_tox')\n", + " num_feats_in_ntox = df_ntox[cols].sum().astype(int).rename('num_feat_ntox')\n", + " num_nfeats_in_ntox = (1 - df_ntox[cols]).sum().astype(int).rename('num_nfeat_ntox')\n", + " prop_tox = df_tox[cols].mean().rename('prop_tox')\n", + " ref_prop_ntox = df_ntox[cols].mean().rename('prop_ntox')\n", " n_tox = len(df_tox)\n", - " df = pd.concat(\n", - " [\n", - " num_feats_in_tox,\n", - " num_nfeats_in_tox,\n", - " num_feats_in_ntox,\n", - " num_nfeats_in_ntox,\n", - " prop_tox,\n", - " ref_prop_ntox,\n", - " ],\n", - " axis=1,\n", - " )\n", - " df[\"num_total\"] = df.num_feat_tox + df.num_feat_ntox\n", - " df[\"log_odds\"] = (\n", - " np.log(df.num_feat_tox)\n", - " - np.log(df.num_nfeat_tox)\n", - " + np.log(df.num_nfeat_ntox)\n", - " - np.log(df.num_feat_ntox)\n", - " )\n", - " df[\"abs_log_odds\"] = np.abs(df.log_odds)\n", - " df[\"binom_p\"] = df.apply(\n", - " lambda x: stats.binom_test(x.num_feat_tox, n_tox, x.prop_ntox), axis=1\n", - " ) # *5\n", + " df = pd.concat([\n", + " num_feats_in_tox, \n", + " num_nfeats_in_tox,\n", + " num_feats_in_ntox,\n", + " num_nfeats_in_ntox,\n", + " prop_tox,\n", + " ref_prop_ntox,\n", + " ], axis=1)\n", + " df['num_total'] = df.num_feat_tox + df.num_feat_ntox\n", + " df['log_odds'] = np.log(df.num_feat_tox) - np.log(df.num_nfeat_tox) \\\n", + " + np.log(df.num_nfeat_ntox) - np.log(df.num_feat_ntox)\n", + " df['abs_log_odds'] = np.abs(df.log_odds)\n", + " df['binom_p'] = df.apply(lambda x: stats.binom_test(x.num_feat_tox, n_tox, x.prop_ntox), axis=1)#*5\n", " df = df[df.num_total >= min_n]\n", - " df[\"p\"] = df[\"binom_p\"].apply(lambda x: \"%.3f\" % x)\n", - " df[\"pstars\"] = df[\"binom_p\"].apply(get_p_stars)\n", - " return df.sort_values(\"log_odds\", ascending=False)" + " df['p'] = df['binom_p'].apply(lambda x: '%.3f' % x)\n", + " df['pstars'] = df['binom_p'].apply(get_p_stars)\n", + " return df.sort_values('log_odds', ascending=False)" ] }, { @@ -1343,7 +1276,6 @@ "outputs": [], "source": [ "from matplotlib import pyplot as plt\n", - "\n", "%matplotlib inline" ] }, @@ -1355,102 +1287,84 @@ }, "outputs": [], "source": [ - "# we are now ready to plot these comparisons. the following (rather intimidating) helper function\n", + "# we are now ready to plot these comparisons. the following (rather intimidating) helper function \n", "# produces a nicely-formatted plot:\n", - "def draw_figure(\n", - " ax, first_cmp, second_cmp, title=\"\", prompt_types=6, min_log_odds=0.2, min_freq=50, xlim=0.85\n", - "):\n", + "def draw_figure(ax, first_cmp, second_cmp, title='', prompt_types=6, min_log_odds=.2, min_freq=50,xlim=.85):\n", + "\n", " # selecting and sorting the features to plot, given minimum effect sizes and statistical significance\n", - " frequent_feats = first_cmp[first_cmp.num_total >= min_freq].index.union(\n", - " second_cmp[second_cmp.num_total >= min_freq].index\n", - " )\n", - " lrg_effect_feats = first_cmp[\n", - " (first_cmp.abs_log_odds >= 0.2) & (first_cmp.binom_p < 0.05)\n", - " ].index.union(second_cmp[(second_cmp.abs_log_odds >= 0.2) & (second_cmp.binom_p < 0.05)].index)\n", - " # feats_to_include = frequent_feats.intersection(lrg_effect_feats)\n", + " frequent_feats = first_cmp[first_cmp.num_total >= min_freq].index.union(second_cmp[second_cmp.num_total >= min_freq].index)\n", + " lrg_effect_feats = first_cmp[(first_cmp.abs_log_odds >= .2)\n", + " & (first_cmp.binom_p < .05)].index.union(second_cmp[(second_cmp.abs_log_odds >= .2)\n", + " & (second_cmp.binom_p < .05)].index)\n", + "# feats_to_include = frequent_feats.intersection(lrg_effect_feats)\n", " feats_to_include = first_cmp.index\n", " feat_order = sorted(feats_to_include, key=lambda x: first_cmp.loc[x].log_odds, reverse=True)\n", "\n", " # parameters determining the look of the figure\n", - " colors = [\"blue\", \"grey\"]\n", - " shapes = [\"^\", \"s\"]\n", - " eps = 0.02\n", - " star_eps = 0.035\n", + " colors = ['blue', 'grey']\n", + " shapes = ['^', 's'] \n", + " eps = .02\n", + " star_eps = .035\n", " xlim = xlim\n", - " min_log = 0.2\n", + " min_log = .2\n", " gap_prop = 2\n", " label_size = 14\n", - " title_size = 18\n", + " title_size=18\n", " radius = 256\n", " features = feat_order\n", " ax.invert_yaxis()\n", - " ax.plot([0, 0], [0, len(features) / gap_prop], color=\"black\")\n", - "\n", - " # for each figure we plot the point according to effect size in the first and second comment,\n", + " ax.plot([0,0], [0, len(features)/gap_prop], color='black')\n", + " \n", + " # for each figure we plot the point according to effect size in the first and second comment, \n", " # and add axis labels denoting statistical significance\n", " yticks = []\n", " yticklabels = []\n", " for f_idx, feat in enumerate(features):\n", - " curr_y = (f_idx + 0.5) / gap_prop\n", + " curr_y = (f_idx + .5)/gap_prop\n", " yticks.append(curr_y)\n", " try:\n", + " \n", " first_p = first_cmp.loc[feat].binom_p\n", - " second_p = second_cmp.loc[feat].binom_p\n", + " second_p = second_cmp.loc[feat].binom_p \n", " if first_cmp.loc[feat].abs_log_odds < min_log:\n", " first_face = \"white\"\n", " elif first_p >= 0.05:\n", - " first_face = \"white\"\n", + " first_face = 'white'\n", " else:\n", " first_face = colors[0]\n", " if second_cmp.loc[feat].abs_log_odds < min_log:\n", " second_face = \"white\"\n", " elif second_p >= 0.05:\n", - " second_face = \"white\"\n", + " second_face = 'white'\n", " else:\n", " second_face = colors[1]\n", - " ax.plot(\n", - " [-1 * xlim, xlim], [curr_y, curr_y], \"--\", color=\"grey\", zorder=0, linewidth=0.5\n", - " )\n", - "\n", - " ax.scatter(\n", - " [first_cmp.loc[feat].log_odds],\n", - " [curr_y + eps],\n", - " s=radius,\n", - " edgecolor=colors[0],\n", - " marker=shapes[0],\n", - " zorder=20,\n", - " facecolors=first_face,\n", - " )\n", - " ax.scatter(\n", - " [second_cmp.loc[feat].log_odds],\n", - " [curr_y + eps],\n", - " s=radius,\n", - " edgecolor=colors[1],\n", - " marker=shapes[1],\n", - " zorder=10,\n", - " facecolors=second_face,\n", - " )\n", - "\n", + " ax.plot([-1 * xlim, xlim], [curr_y, curr_y], '--', color='grey', zorder=0, linewidth=.5)\n", + " \n", + " ax.scatter([first_cmp.loc[feat].log_odds], [curr_y + eps], s=radius, edgecolor=colors[0], marker=shapes[0],\n", + " zorder=20, facecolors=first_face)\n", + " ax.scatter([second_cmp.loc[feat].log_odds], [curr_y + eps], s=radius, edgecolor=colors[1], marker=shapes[1], \n", + " zorder=10, facecolors=second_face)\n", + " \n", " first_pstr_len = len(get_p_stars(first_p))\n", " second_pstr_len = len(get_p_stars(second_p))\n", - " p_str = np.array([\" \"] * 8)\n", + " p_str = np.array([' '] * 8)\n", " if first_pstr_len > 0:\n", - " p_str[:first_pstr_len] = \"*\"\n", + " p_str[:first_pstr_len] = '*'\n", " if second_pstr_len > 0:\n", - " p_str[-second_pstr_len:] = \"⁺\"\n", - "\n", - " feat_str = str(feat) + \"\\n\" + \"\".join(p_str)\n", + " p_str[-second_pstr_len:] = '⁺'\n", + " \n", + " feat_str = str(feat) + '\\n' + ''.join(p_str)\n", " yticklabels.append(feat_str)\n", " except Exception as e:\n", - " yticklabels.append(\"\")\n", - "\n", + " yticklabels.append('')\n", + " \n", " # add the axis labels\n", - " ax.set_xlabel(\"log-odds ratio\", fontsize=28)\n", - " ax.set_xticks([-xlim - 0.05, -0.5, 0, 0.5, xlim])\n", - " ax.set_xticklabels([\"on-track\", -0.5, 0, 0.5, \"awry\"], fontsize=24)\n", + " ax.set_xlabel('log-odds ratio', fontsize=28)\n", + " ax.set_xticks([-xlim-.05, -.5, 0, .5, xlim])\n", + " ax.set_xticklabels(['on-track', -.5, 0, .5, 'awry'], fontsize=24)\n", " ax.set_yticks(yticks)\n", " ax.set_yticklabels(yticklabels, fontsize=32)\n", - " ax.tick_params(axis=\"both\", which=\"both\", bottom=\"off\", top=\"off\", left=\"off\")\n", + " ax.tick_params(axis='both', which='both', bottom='off', top='off',left='off')\n", " return feat_order" ] }, @@ -1471,8 +1385,8 @@ } ], "source": [ - "f, ax = plt.subplots(1, 1, figsize=(10, 10))\n", - "_ = draw_figure(ax, first_comparisons, second_comparisons, \"\")" + "f, ax = plt.subplots(1,1, figsize=(10,10))\n", + "_ = draw_figure(ax, first_comparisons, second_comparisons, '')" ] }, { @@ -1529,20 +1443,13 @@ }, "outputs": [], "source": [ - "fw_pipe = ExpectedContextModelPipeline(\n", - " context_field=\"next_id\",\n", - " output_prefix=\"fw\",\n", - " text_field=\"arcs\",\n", - " share_tfidf_models=False,\n", - " text_pipe=wiki_arc_pipeline(),\n", - " tfidf_params={\"binary\": True, \"min_df\": 50},\n", - " min_terms=1,\n", - " n_svd_dims=25,\n", - " n_clusters=6,\n", - " cluster_on=\"terms\",\n", - " random_state=1000,\n", - " cluster_random_state=1000,\n", - ")" + "fw_pipe = ExpectedContextModelPipeline(context_field='next_id', output_prefix='fw',\n", + " text_field='arcs', share_tfidf_models=False,\n", + " text_pipe=wiki_arc_pipeline(), \n", + " tfidf_params={'binary': True, 'min_df': 50}, \n", + " min_terms=1,\n", + " n_svd_dims=25, n_clusters=6, cluster_on='terms',\n", + " random_state=1000, cluster_random_state=1000)" ] }, { @@ -1553,11 +1460,9 @@ }, "outputs": [], "source": [ - "fw_pipe.fit(\n", - " wiki_corpus,\n", - " selector=lambda x: x.meta.get(\"next_id\", None) is not None,\n", - " context_selector=lambda x: x.reply_to is not None,\n", - ")" + "fw_pipe.fit(wiki_corpus,\n", + " selector=lambda x: x.meta.get('next_id',None) is not None,\n", + " context_selector=lambda x: x.reply_to is not None)" ] }, { @@ -1771,9 +1676,9 @@ }, "outputs": [], "source": [ - "fw_pipe.set_cluster_names(\n", - " [\"casual\", \"coordination\", \"procedures\", \"contention\", \"editing\", \"moderation\"]\n", - ")" + "fw_pipe.set_cluster_names(['casual', 'coordination', \n", + " 'procedures', 'contention',\n", + " 'editing', 'moderation'])" ] }, { @@ -1791,7 +1696,7 @@ }, "outputs": [], "source": [ - "new_ut = fw_pipe.transform_utterance(\"Let me help you out with that\")" + "new_ut = fw_pipe.transform_utterance('Let me help you out with that')" ] }, { @@ -1808,7 +1713,7 @@ } ], "source": [ - "print(\"type:\", new_ut.meta[\"fw_clustering.cluster\"])" + "print('type:', new_ut.meta['fw_clustering.cluster'])" ] }, { @@ -1860,7 +1765,7 @@ "source": [ "# note that different versions of SpaCy may produce different outputs, since the\n", "# dependency parses may change from version to version\n", - "new_ut.meta[\"fw_repr\"]" + "new_ut.meta['fw_repr']" ] }, { diff --git a/convokit/fighting_words/demos/fightingwords_demo.ipynb b/convokit/fighting_words/demos/fightingwords_demo.ipynb index 733f26c2..2f4b37ff 100644 --- a/convokit/fighting_words/demos/fightingwords_demo.ipynb +++ b/convokit/fighting_words/demos/fightingwords_demo.ipynb @@ -24,7 +24,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"reddit-corpus-small\"))" + "corpus = Corpus(filename=download('reddit-corpus-small'))" ] }, { @@ -48,7 +48,7 @@ } ], "source": [ - "fw = FightingWords(ngram_range=(1, 1))" + "fw = FightingWords(ngram_range=(1,1))" ] }, { @@ -78,11 +78,8 @@ } ], "source": [ - "fw.fit(\n", - " corpus,\n", - " class1_func=lambda utt: utt.meta[\"subreddit\"] == \"Christianity\",\n", - " class2_func=lambda utt: utt.meta[\"subreddit\"] == \"atheism\",\n", - ")" + "fw.fit(corpus, class1_func=lambda utt: utt.meta['subreddit'] == 'Christianity', \n", + " class2_func=lambda utt: utt.meta['subreddit'] == \"atheism\",)" ] }, { @@ -104,7 +101,7 @@ } ], "source": [ - "df = fw.summarize(corpus, plot=True, class1_name=\"r/Christianity\", class2_name=\"r/atheism\")" + "df = fw.summarize(corpus, plot=True, class1_name='r/Christianity', class2_name='r/atheism')" ] }, { @@ -941,7 +938,7 @@ } ], "source": [ - "fw.get_zscore(\"education\")" + "fw.get_zscore('education')" ] }, { @@ -961,7 +958,7 @@ } ], "source": [ - "fw.get_zscore(\"morals\")" + "fw.get_zscore('morals')" ] }, { @@ -981,7 +978,7 @@ } ], "source": [ - "fw.transform(corpus, config={\"annot_method\": \"top_k\", \"top_k\": 10})" + "fw.transform(corpus, config={'annot_method': 'top_k', 'top_k': 10})" ] }, { @@ -1041,10 +1038,10 @@ "source": [ "for utt in corpus.iter_utterances():\n", " if utt.meta[\"subreddit\"] in [\"atheism\", \"Christianity\"]:\n", - " if len(utt.meta[\"fighting_words_class1\"]) > 0:\n", - " print(utt.meta[\"subreddit\"])\n", - " print(utt.meta[\"fighting_words_class1\"])\n", - " print(utt.meta[\"fighting_words_class2\"])\n", + " if len(utt.meta['fighting_words_class1']) > 0:\n", + " print(utt.meta['subreddit'])\n", + " print(utt.meta['fighting_words_class1'])\n", + " print(utt.meta['fighting_words_class2'])\n", " print(utt.text)\n", " break" ] diff --git a/convokit/forecaster/CRAFT/demos/craft_demo.ipynb b/convokit/forecaster/CRAFT/demos/craft_demo.ipynb index 079d9fc8..2514dd40 100644 --- a/convokit/forecaster/CRAFT/demos/craft_demo.ipynb +++ b/convokit/forecaster/CRAFT/demos/craft_demo.ipynb @@ -45,7 +45,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"conversations-gone-awry-corpus\"))" + "corpus = Corpus(filename=download('conversations-gone-awry-corpus'))" ] }, { @@ -125,18 +125,16 @@ "metadata": {}, "outputs": [], "source": [ - "MAX_LENGTH = 80 # this constant controls the maximum number of tokens to consider; it must be set to 80 since that's what CRAFT was trained one.\n", - "forecaster = Forecaster(\n", - " forecaster_model=craft_model,\n", - " forecast_mode=\"past\",\n", - " convo_structure=\"linear\",\n", - " text_func=lambda utt: utt.meta[\"tokens\"][: (MAX_LENGTH - 1)],\n", - " label_func=lambda utt: int(utt.meta[\"comment_has_personal_attack\"]),\n", - " forecast_attribute_name=\"prediction\",\n", - " forecast_prob_attribute_name=\"pred_score\",\n", - " use_last_only=False,\n", - " skip_broken_convos=False,\n", - ")" + "MAX_LENGTH = 80 # this constant controls the maximum number of tokens to consider; it must be set to 80 since that's what CRAFT was trained one.\n", + "forecaster = Forecaster(forecaster_model = craft_model,\n", + " forecast_mode = \"past\",\n", + " convo_structure=\"linear\",\n", + " text_func = lambda utt: utt.meta[\"tokens\"][:(MAX_LENGTH-1)],\n", + " label_func = lambda utt: int(utt.meta['comment_has_personal_attack']),\n", + " forecast_attribute_name=\"prediction\", forecast_prob_attribute_name=\"pred_score\",\n", + " use_last_only = False,\n", + " skip_broken_convos=False\n", + " )" ] }, { @@ -244,11 +242,7 @@ "# comments, but rather the \"section header\" (something akin to a conversation title in Wikipedia talk pages). Since they\n", "# are not real comments, we do not want to include them in forecasting. We use the ignore_utterances parameter to\n", "# specify this behavior.\n", - "forecaster.transform(\n", - " corpus,\n", - " selector=lambda convo: convo.meta[\"split\"] in [\"test\"],\n", - " ignore_utterances=lambda utt: utt.meta[\"is_section_header\"],\n", - ")" + "forecaster.transform(corpus, selector=lambda convo: convo.meta[\"split\"] in ['test'], ignore_utterances=lambda utt: utt.meta['is_section_header'])" ] }, { @@ -282,15 +276,15 @@ "metadata": {}, "outputs": [], "source": [ - "FORECAST_THRESH = 0.570617 # Threshold learned on a validation set. Try playing with this to see how it affects the precision-recall tradeoff!\n", + "FORECAST_THRESH = 0.570617 # Threshold learned on a validation set. Try playing with this to see how it affects the precision-recall tradeoff!\n", "preds = []\n", "labels = []\n", "# Iterate at a conversation level and consolidate predictions for each conversation\n", - "for convo in corpus.iter_conversations(selector=lambda c: c.meta[\"split\"] == \"test\"):\n", - " labels.append(int(convo.meta[\"conversation_has_personal_attack\"]))\n", + "for convo in corpus.iter_conversations(selector=lambda c: c.meta['split'] == 'test'):\n", + " labels.append(int(convo.meta['conversation_has_personal_attack']))\n", " prediction = 0\n", " for utt in convo.iter_utterances():\n", - " if utt.meta[\"pred_score\"] is not None and utt.meta[\"pred_score\"] > FORECAST_THRESH:\n", + " if utt.meta['pred_score'] is not None and utt.meta['pred_score'] > FORECAST_THRESH:\n", " prediction = 1\n", " preds.append(prediction)\n", "preds = np.asarray(preds)\n", @@ -313,13 +307,9 @@ "source": [ "# Compute accuracy, precision, recall, F1, and false positive rate\n", "acc = np.mean(preds == labels)\n", - "precision, recall, f1, _ = precision_recall_fscore_support(preds, labels, average=\"binary\")\n", - "fpr = np.mean(preds[labels == 0])\n", - "print(\n", - " \"Accuracy = {:.2%}, Precision = {:.2%}, Recall = {:.2%}, FPR = {:.2%}, F1 = {:.2%}\".format(\n", - " acc, precision, recall, fpr, f1\n", - " )\n", - ")" + "precision, recall, f1, _ = precision_recall_fscore_support(preds, labels, average='binary')\n", + "fpr = np.mean(preds[labels==0])\n", + "print(\"Accuracy = {:.2%}, Precision = {:.2%}, Recall = {:.2%}, FPR = {:.2%}, F1 = {:.2%}\".format(acc, precision, recall, fpr, f1))" ] }, { @@ -336,24 +326,20 @@ "metadata": {}, "outputs": [], "source": [ - "comments_until_derail = (\n", - " {}\n", - ") # store the \"number of comments until derailment\" metric for each conversation\n", + "comments_until_derail = {} # store the \"number of comments until derailment\" metric for each conversation\n", "\n", - "for convo in corpus.iter_conversations(\n", - " selector=lambda c: c.meta[\"split\"] == \"test\" and c.meta[\"conversation_has_personal_attack\"]\n", - "):\n", + "for convo in corpus.iter_conversations(selector=lambda c: c.meta['split'] == 'test' and c.meta['conversation_has_personal_attack']):\n", " # filter out the section header as usual\n", - " utts = [utt for utt in convo.iter_utterances() if not utt.meta[\"is_section_header\"]]\n", + " utts = [utt for utt in convo.iter_utterances() if not utt.meta['is_section_header']]\n", " # by construction, the last comment is the one with the personal attack\n", " derail_idx = len(utts) - 1\n", " # now scan the utterances in order until we find the first derailment prediction (if any)\n", " for idx in range(1, len(utts)):\n", - " if utts[idx].meta[\"pred_score\"] > FORECAST_THRESH:\n", + " if utts[idx].meta['pred_score'] > FORECAST_THRESH:\n", " # recall that the forecast_score meta field specifies what CRAFT thought this comment would look like BEFORE it\n", - " # saw this comment. So the actual CRAFT forecast is made during the previous comment; we account for this by\n", + " # saw this comment. So the actual CRAFT forecast is made during the previous comment; we account for this by \n", " # subtracting 1 from idx\n", - " comments_until_derail[convo.id] = derail_idx - (idx - 1)\n", + " comments_until_derail[convo.id] = derail_idx - (idx-1)\n", " break" ] }, @@ -378,14 +364,12 @@ "source": [ "# visualize the distribution of \"number of comments until derailment\" as a histogram (reproducing Figure 4 from the paper)\n", "comments_until_derail_vals = np.asarray(list(comments_until_derail.values()))\n", - "plt.rcParams[\"figure.figsize\"] = (10.0, 5.0)\n", - "plt.rcParams[\"font.size\"] = 24\n", - "plt.hist(\n", - " comments_until_derail_vals, bins=range(1, np.max(comments_until_derail_vals)), density=True\n", - ")\n", - "plt.xlim(1, 10)\n", - "plt.xticks(np.arange(1, 10) + 0.5, np.arange(1, 10))\n", - "plt.yticks(np.arange(0, 0.25, 0.05), np.arange(0, 25, 5))\n", + "plt.rcParams['figure.figsize'] = (10.0, 5.0)\n", + "plt.rcParams['font.size'] = 24\n", + "plt.hist(comments_until_derail_vals, bins=range(1, np.max(comments_until_derail_vals)), density=True)\n", + "plt.xlim(1,10)\n", + "plt.xticks(np.arange(1,10)+0.5, np.arange(1,10))\n", + "plt.yticks(np.arange(0,0.25,0.05), np.arange(0,25,5))\n", "plt.xlabel(\"Number of comments elapsed\")\n", "plt.ylabel(\"% of conversations\")\n", "plt.show()" diff --git a/convokit/forecaster/CRAFT/demos/craft_demo_new.ipynb b/convokit/forecaster/CRAFT/demos/craft_demo_new.ipynb index 43a7bfcf..2f86af97 100644 --- a/convokit/forecaster/CRAFT/demos/craft_demo_new.ipynb +++ b/convokit/forecaster/CRAFT/demos/craft_demo_new.ipynb @@ -84,17 +84,15 @@ "metadata": {}, "outputs": [], "source": [ - "forecaster = Forecaster(\n", - " forecaster_model=craft_model,\n", - " forecast_mode=\"future\",\n", - " convo_structure=\"linear\",\n", - " text_func=lambda utt: utt.meta[\"tokens\"][: (MAX_LENGTH - 1)],\n", - " label_func=lambda utt: int(utt.meta[\"comment_has_personal_attack\"]),\n", - " forecast_attribute_name=\"prediction\",\n", - " forecast_prob_attribute_name=\"pred_score\",\n", - " use_last_only=True,\n", - " skip_broken_convos=False,\n", - ")" + "forecaster = Forecaster(forecaster_model = craft_model,\n", + " forecast_mode = \"future\",\n", + " convo_structure=\"linear\",\n", + " text_func = lambda utt: utt.meta[\"tokens\"][:(MAX_LENGTH-1)],\n", + " label_func = lambda utt: int(utt.meta['comment_has_personal_attack']),\n", + " forecast_attribute_name=\"prediction\", forecast_prob_attribute_name=\"pred_score\",\n", + " use_last_only = True,\n", + " skip_broken_convos=False\n", + " )" ] }, { @@ -210,11 +208,8 @@ } ], "source": [ - "forecaster.transform(\n", - " corpus,\n", - " selector=lambda convo: convo.meta[\"split\"] == \"train\",\n", - " ignore_utterances=lambda utt: utt.meta[\"is_section_header\"],\n", - ")" + "forecaster.transform(corpus, selector=lambda convo: convo.meta[\"split\"] == \"train\",\n", + " ignore_utterances=lambda utt: utt.meta[\"is_section_header\"])" ] }, { diff --git a/convokit/forecaster/CRAFT/demos/craft_demo_original.ipynb b/convokit/forecaster/CRAFT/demos/craft_demo_original.ipynb index cbe588db..85a91c26 100644 --- a/convokit/forecaster/CRAFT/demos/craft_demo_original.ipynb +++ b/convokit/forecaster/CRAFT/demos/craft_demo_original.ipynb @@ -40,7 +40,6 @@ "import itertools\n", "from urllib.request import urlretrieve\n", "from convokit import download, Corpus\n", - "\n", "%matplotlib inline" ] }, @@ -116,20 +115,14 @@ "\n", " def __init__(self, name, word2index=None, index2word=None):\n", " self.name = name\n", - " self.trimmed = (\n", - " False if not word2index else True\n", - " ) # if a precomputed vocab is specified assume the user wants to use it as-is\n", + " self.trimmed = False if not word2index else True # if a precomputed vocab is specified assume the user wants to use it as-is\n", " self.word2index = word2index if word2index else {\"UNK\": UNK_token}\n", " self.word2count = {}\n", - " self.index2word = (\n", - " index2word\n", - " if index2word\n", - " else {PAD_token: \"PAD\", SOS_token: \"SOS\", EOS_token: \"EOS\", UNK_token: \"UNK\"}\n", - " )\n", + " self.index2word = index2word if index2word else {PAD_token: \"PAD\", SOS_token: \"SOS\", EOS_token: \"EOS\", UNK_token: \"UNK\"}\n", " self.num_words = 4 if not index2word else len(index2word) # Count SOS, EOS, PAD, UNK\n", "\n", " def addSentence(self, sentence):\n", - " for word in sentence.split(\" \"):\n", + " for word in sentence.split(' '):\n", " self.addWord(word)\n", "\n", " def addWord(self, word):\n", @@ -153,22 +146,19 @@ " if v >= min_count:\n", " keep_words.append(k)\n", "\n", - " print(\n", - " \"keep_words {} / {} = {:.4f}\".format(\n", - " len(keep_words), len(self.word2index), len(keep_words) / len(self.word2index)\n", - " )\n", - " )\n", + " print('keep_words {} / {} = {:.4f}'.format(\n", + " len(keep_words), len(self.word2index), len(keep_words) / len(self.word2index)\n", + " ))\n", "\n", " # Reinitialize dictionaries\n", " self.word2index = {\"UNK\": UNK_token}\n", " self.word2count = {}\n", " self.index2word = {PAD_token: \"PAD\", SOS_token: \"SOS\", EOS_token: \"EOS\", UNK_token: \"UNK\"}\n", - " self.num_words = 4 # Count default tokens\n", + " self.num_words = 4 # Count default tokens\n", "\n", " for word in keep_words:\n", " self.addWord(word)\n", "\n", - "\n", "# Create a Voc object from precomputed data structures\n", "def loadPrecomputedVoc(corpus_name, word2index_url, index2word_url):\n", " # load the word-to-index lookup map\n", @@ -192,26 +182,26 @@ "source": [ "# Helper functions for preprocessing and tokenizing text\n", "\n", - "\n", "# Turn a Unicode string to plain ASCII, thanks to\n", "# https://stackoverflow.com/a/518232/2809427\n", "def unicodeToAscii(s):\n", - " return \"\".join(c for c in unicodedata.normalize(\"NFD\", s) if unicodedata.category(c) != \"Mn\")\n", - "\n", + " return ''.join(\n", + " c for c in unicodedata.normalize('NFD', s)\n", + " if unicodedata.category(c) != 'Mn'\n", + " )\n", "\n", "# Tokenize the string using NLTK\n", "def tokenize(text):\n", - " tokenizer = nltk.tokenize.RegexpTokenizer(pattern=r\"\\w+|[^\\w\\s]\")\n", + " tokenizer = nltk.tokenize.RegexpTokenizer(pattern=r'\\w+|[^\\w\\s]')\n", " # simplify the problem space by considering only ASCII data\n", " cleaned_text = unicodeToAscii(text.lower())\n", "\n", " # if the resulting string is empty, nothing else to do\n", " if not cleaned_text.strip():\n", " return []\n", - "\n", + " \n", " return tokenizer.tokenize(cleaned_text)\n", "\n", - "\n", "# Given a ConvoKit conversation, preprocess each utterance's text by tokenizing and truncating.\n", "# Returns the processed dialog entry where text has been replaced with a list of\n", "# tokens, each no longer than MAX_LENGTH - 1 (to leave space for the EOS token)\n", @@ -219,27 +209,20 @@ " processed = []\n", " for utterance in dialog.iter_utterances():\n", " # skip the section header, which does not contain conversational content\n", - " if utterance.meta[\"is_section_header\"]:\n", + " if utterance.meta['is_section_header']:\n", " continue\n", " tokens = tokenize(utterance.text)\n", " # replace out-of-vocabulary tokens\n", " for i in range(len(tokens)):\n", " if tokens[i] not in voc.word2index:\n", " tokens[i] = \"UNK\"\n", - " processed.append(\n", - " {\n", - " \"tokens\": tokens,\n", - " \"is_attack\": int(utterance.meta[\"comment_has_personal_attack\"]),\n", - " \"id\": utterance.id,\n", - " }\n", - " )\n", + " processed.append({\"tokens\": tokens, \"is_attack\": int(utterance.meta['comment_has_personal_attack']), \"id\": utterance.id})\n", " return processed\n", "\n", - "\n", "# Load context-reply pairs from the Corpus, optionally filtering to only conversations\n", "# from the specified split (train, val, or test).\n", "# Each conversation, which has N comments (not including the section header) will\n", - "# get converted into N-1 comment-reply pairs, one pair for each reply\n", + "# get converted into N-1 comment-reply pairs, one pair for each reply \n", "# (the first comment does not reply to anything).\n", "# Each comment-reply pair is a tuple consisting of the conversational context\n", "# (that is, all comments prior to the reply), the reply itself, the label (that\n", @@ -250,14 +233,14 @@ " pairs = []\n", " for convo in corpus.iter_conversations():\n", " # consider only conversations in the specified split of the data\n", - " if split is None or convo.meta[\"split\"] == split:\n", + " if split is None or convo.meta['split'] == split:\n", " dialog = processDialog(voc, convo)\n", " for idx in range(1, len(dialog)):\n", - " reply = dialog[idx][\"tokens\"][: (MAX_LENGTH - 1)]\n", + " reply = dialog[idx][\"tokens\"][:(MAX_LENGTH-1)]\n", " label = dialog[idx][\"is_attack\"]\n", " comment_id = dialog[idx][\"id\"]\n", " # gather as context all utterances preceding the reply\n", - " context = [u[\"tokens\"][: (MAX_LENGTH - 1)] for u in dialog[:idx]]\n", + " context = [u[\"tokens\"][:(MAX_LENGTH-1)] for u in dialog[:idx]]\n", " pairs.append((context, reply, label, comment_id))\n", " return pairs" ] @@ -274,15 +257,12 @@ "source": [ "# Helper functions for turning dialog and text sequences into tensors, and manipulating those tensors\n", "\n", - "\n", "def indexesFromSentence(voc, sentence):\n", " return [voc.word2index[word] for word in sentence] + [EOS_token]\n", "\n", - "\n", "def zeroPadding(l, fillvalue=PAD_token):\n", " return list(itertools.zip_longest(*l, fillvalue=fillvalue))\n", "\n", - "\n", "def binaryMatrix(l, value=PAD_token):\n", " m = []\n", " for i, seq in enumerate(l):\n", @@ -294,14 +274,11 @@ " m[i].append(1)\n", " return m\n", "\n", - "\n", "# Takes a batch of dialogs (lists of lists of tokens) and converts it into a\n", "# batch of utterances (lists of tokens) sorted by length, while keeping track of\n", "# the information needed to reconstruct the original batch of dialogs\n", "def dialogBatch2UtteranceBatch(dialog_batch):\n", - " utt_tuples = (\n", - " []\n", - " ) # will store tuples of (utterance, original position in batch, original position in dialog)\n", + " utt_tuples = [] # will store tuples of (utterance, original position in batch, original position in dialog)\n", " for batch_idx in range(len(dialog_batch)):\n", " dialog = dialog_batch[batch_idx]\n", " for dialog_idx in range(len(dialog)):\n", @@ -315,7 +292,6 @@ " dialog_indices = [u[2] for u in utt_tuples]\n", " return utt_batch, batch_indices, dialog_indices\n", "\n", - "\n", "# Returns padded input sequence tensor and lengths\n", "def inputVar(l, voc):\n", " indexes_batch = [indexesFromSentence(voc, sentence) for sentence in l]\n", @@ -324,7 +300,6 @@ " padVar = torch.LongTensor(padList)\n", " return padVar, lengths\n", "\n", - "\n", "# Returns padded target sequence tensor, padding mask, and max target length\n", "def outputVar(l, voc):\n", " indexes_batch = [indexesFromSentence(voc, sentence) for sentence in l]\n", @@ -335,7 +310,6 @@ " padVar = torch.LongTensor(padList)\n", " return padVar, mask, max_target_len\n", "\n", - "\n", "# Returns all items for a given batch of pairs\n", "def batch2TrainData(voc, pair_batch, already_sorted=False):\n", " if not already_sorted:\n", @@ -351,19 +325,7 @@ " inp, utt_lengths = inputVar(input_utterances, voc)\n", " output, mask, max_target_len = outputVar(output_batch, voc)\n", " label_batch = torch.FloatTensor(label_batch) if label_batch[0] is not None else None\n", - " return (\n", - " inp,\n", - " dialog_lengths,\n", - " utt_lengths,\n", - " batch_indices,\n", - " dialog_indices,\n", - " label_batch,\n", - " id_batch,\n", - " output,\n", - " mask,\n", - " max_target_len,\n", - " )\n", - "\n", + " return inp, dialog_lengths, utt_lengths, batch_indices, dialog_indices, label_batch, id_batch, output, mask, max_target_len\n", "\n", "def batchIterator(voc, source_data, batch_size, shuffle=True):\n", " cur_idx = 0\n", @@ -374,7 +336,7 @@ " cur_idx = 0\n", " if shuffle:\n", " random.shuffle(source_data)\n", - " batch = source_data[cur_idx : (cur_idx + batch_size)]\n", + " batch = source_data[cur_idx:(cur_idx+batch_size)]\n", " # the true batch size may be smaller than the given batch size if there is not enough data left\n", " true_batch_size = len(batch)\n", " # ensure that the dialogs in this batch are sorted by length, as expected by the padding module\n", @@ -384,7 +346,7 @@ " batch_labels = [x[2] for x in batch]\n", " # convert batch to tensors\n", " batch_tensors = batch2TrainData(voc, batch, already_sorted=True)\n", - " yield (batch_tensors, batch_dialogs, batch_labels, true_batch_size)\n", + " yield (batch_tensors, batch_dialogs, batch_labels, true_batch_size) \n", " cur_idx += batch_size" ] }, @@ -521,9 +483,7 @@ ], "source": [ "# Inspect the Voc object to make sure it loaded correctly\n", - "print(\n", - " voc.num_words\n", - ") # expected vocab size is 50004: it was built using a fixed vocab size of 50k plus 4 spots for special tokens PAD, SOS, EOS, and UNK.\n", + "print(voc.num_words) # expected vocab size is 50004: it was built using a fixed vocab size of 50k plus 4 spots for special tokens PAD, SOS, EOS, and UNK.\n", "print(list(voc.word2index.items())[:10])\n", "print(list(voc.index2word.items())[:10])" ] @@ -627,7 +587,7 @@ } ], "source": [ - "for token_list in uttid_to_test_pair[\"201082648.33321.33321\"][0]:\n", + "for token_list in uttid_to_test_pair['201082648.33321.33321'][0]:\n", " print(token_list)\n", " print()" ] @@ -656,7 +616,6 @@ "source": [ "class EncoderRNN(nn.Module):\n", " \"\"\"This module represents the utterance encoder component of CRAFT, responsible for creating vector representations of utterances\"\"\"\n", - "\n", " def __init__(self, hidden_size, embedding, n_layers=1, dropout=0):\n", " super(EncoderRNN, self).__init__()\n", " self.n_layers = n_layers\n", @@ -665,13 +624,8 @@ "\n", " # Initialize GRU; the input_size and hidden_size params are both set to 'hidden_size'\n", " # because our input size is a word embedding with number of features == hidden_size\n", - " self.gru = nn.GRU(\n", - " hidden_size,\n", - " hidden_size,\n", - " n_layers,\n", - " dropout=(0 if n_layers == 1 else dropout),\n", - " bidirectional=True,\n", - " )\n", + " self.gru = nn.GRU(hidden_size, hidden_size, n_layers,\n", + " dropout=(0 if n_layers == 1 else dropout), bidirectional=True)\n", "\n", " def forward(self, input_seq, input_lengths, hidden=None):\n", " # Convert word indexes to embeddings\n", @@ -683,28 +637,21 @@ " # Unpack padding\n", " outputs, _ = torch.nn.utils.rnn.pad_packed_sequence(outputs)\n", " # Sum bidirectional GRU outputs\n", - " outputs = outputs[:, :, : self.hidden_size] + outputs[:, :, self.hidden_size :]\n", + " outputs = outputs[:, :, :self.hidden_size] + outputs[:, : ,self.hidden_size:]\n", " # Return output and final hidden state\n", " return outputs, hidden\n", "\n", - "\n", "class ContextEncoderRNN(nn.Module):\n", " \"\"\"This module represents the context encoder component of CRAFT, responsible for creating an order-sensitive vector representation of conversation context\"\"\"\n", - "\n", " def __init__(self, hidden_size, n_layers=1, dropout=0):\n", " super(ContextEncoderRNN, self).__init__()\n", " self.n_layers = n_layers\n", " self.hidden_size = hidden_size\n", - "\n", + " \n", " # only unidirectional GRU for context encoding\n", - " self.gru = nn.GRU(\n", - " hidden_size,\n", - " hidden_size,\n", - " n_layers,\n", - " dropout=(0 if n_layers == 1 else dropout),\n", - " bidirectional=False,\n", - " )\n", - "\n", + " self.gru = nn.GRU(hidden_size, hidden_size, n_layers,\n", + " dropout=(0 if n_layers == 1 else dropout), bidirectional=False)\n", + " \n", " def forward(self, input_seq, input_lengths, hidden=None):\n", " # Pack padded batch of sequences for RNN module\n", " packed = torch.nn.utils.rnn.pack_padded_sequence(input_seq, input_lengths)\n", @@ -715,15 +662,13 @@ " # return output and final hidden state\n", " return outputs, hidden\n", "\n", - "\n", "class SingleTargetClf(nn.Module):\n", " \"\"\"This module represents the CRAFT classifier head, which takes the context encoding and uses it to make a forecast\"\"\"\n", - "\n", " def __init__(self, hidden_size, dropout=0.1):\n", " super(SingleTargetClf, self).__init__()\n", - "\n", + " \n", " self.hidden_size = hidden_size\n", - "\n", + " \n", " # initialize classifier\n", " self.layer1 = nn.Linear(hidden_size, hidden_size)\n", " self.layer1_act = nn.LeakyReLU()\n", @@ -731,7 +676,7 @@ " self.layer2_act = nn.LeakyReLU()\n", " self.clf = nn.Linear(hidden_size // 2, 1)\n", " self.dropout = nn.Dropout(p=dropout)\n", - "\n", + " \n", " def forward(self, encoder_outputs, encoder_input_lengths):\n", " # from stackoverflow (https://stackoverflow.com/questions/50856936/taking-the-last-state-from-bilstm-bigru-in-pytorch)\n", " # First we unsqueeze seqlengths two times so it has the same number of\n", @@ -739,11 +684,11 @@ " # (batch_size) -> (1, batch_size, 1)\n", " lengths = encoder_input_lengths.unsqueeze(0).unsqueeze(2)\n", " # Then we expand it accordingly\n", - " # (1, batch_size, 1) -> (1, batch_size, hidden_size)\n", + " # (1, batch_size, 1) -> (1, batch_size, hidden_size) \n", " lengths = lengths.expand((1, -1, encoder_outputs.size(2)))\n", "\n", " # take only the last state of the encoder for each batch\n", - " last_outputs = torch.gather(encoder_outputs, 0, lengths - 1).squeeze()\n", + " last_outputs = torch.gather(encoder_outputs, 0, lengths-1).squeeze()\n", " # forward pass through hidden layers\n", " layer1_out = self.layer1_act(self.layer1(self.dropout(last_outputs)))\n", " layer2_out = self.layer2_act(self.layer2(self.dropout(layer1_out)))\n", @@ -751,70 +696,51 @@ " logits = self.clf(self.dropout(layer2_out)).squeeze()\n", " return logits\n", "\n", - "\n", "class Predictor(nn.Module):\n", " \"\"\"This helper module encapsulates the CRAFT pipeline, defining the logic of passing an input through each consecutive sub-module.\"\"\"\n", - "\n", " def __init__(self, encoder, context_encoder, classifier):\n", " super(Predictor, self).__init__()\n", " self.encoder = encoder\n", " self.context_encoder = context_encoder\n", " self.classifier = classifier\n", - "\n", - " def forward(\n", - " self,\n", - " input_batch,\n", - " dialog_lengths,\n", - " dialog_lengths_list,\n", - " utt_lengths,\n", - " batch_indices,\n", - " dialog_indices,\n", - " batch_size,\n", - " max_length,\n", - " ):\n", + " \n", + " def forward(self, input_batch, dialog_lengths, dialog_lengths_list, utt_lengths, batch_indices, dialog_indices, batch_size, max_length):\n", " # Forward input through encoder model\n", " _, utt_encoder_hidden = self.encoder(input_batch, utt_lengths)\n", - "\n", + " \n", " # Convert utterance encoder final states to batched dialogs for use by context encoder\n", - " context_encoder_input = makeContextEncoderInput(\n", - " utt_encoder_hidden, dialog_lengths_list, batch_size, batch_indices, dialog_indices\n", - " )\n", - "\n", + " context_encoder_input = makeContextEncoderInput(utt_encoder_hidden, dialog_lengths_list, batch_size, batch_indices, dialog_indices)\n", + " \n", " # Forward pass through context encoder\n", - " context_encoder_outputs, context_encoder_hidden = self.context_encoder(\n", - " context_encoder_input, dialog_lengths\n", - " )\n", - "\n", + " context_encoder_outputs, context_encoder_hidden = self.context_encoder(context_encoder_input, dialog_lengths)\n", + " \n", " # Forward pass through classifier to get prediction logits\n", " logits = self.classifier(context_encoder_outputs, dialog_lengths)\n", - "\n", + " \n", " # Apply sigmoid activation\n", " predictions = F.sigmoid(logits)\n", " return predictions\n", "\n", - "\n", - "def makeContextEncoderInput(\n", - " utt_encoder_hidden, dialog_lengths, batch_size, batch_indices, dialog_indices\n", - "):\n", + "def makeContextEncoderInput(utt_encoder_hidden, dialog_lengths, batch_size, batch_indices, dialog_indices):\n", " \"\"\"The utterance encoder takes in utterances in combined batches, with no knowledge of which ones go where in which conversation.\n", - " Its output is therefore also unordered. We correct this by using the information computed during tensor conversion to regroup\n", - " the utterance vectors into their proper conversational order.\"\"\"\n", + " Its output is therefore also unordered. We correct this by using the information computed during tensor conversion to regroup\n", + " the utterance vectors into their proper conversational order.\"\"\"\n", " # first, sum the forward and backward encoder states\n", - " utt_encoder_summed = utt_encoder_hidden[-2, :, :] + utt_encoder_hidden[-1, :, :]\n", + " utt_encoder_summed = utt_encoder_hidden[-2,:,:] + utt_encoder_hidden[-1,:,:]\n", " # we now have hidden state of shape [utterance_batch_size, hidden_size]\n", " # split it into a list of [hidden_size,] x utterance_batch_size\n", " last_states = [t.squeeze() for t in utt_encoder_summed.split(1, dim=0)]\n", - "\n", + " \n", " # create a placeholder list of tensors to group the states by source dialog\n", " states_dialog_batched = [[None for _ in range(dialog_lengths[i])] for i in range(batch_size)]\n", - "\n", + " \n", " # group the states by source dialog\n", " for hidden_state, batch_idx, dialog_idx in zip(last_states, batch_indices, dialog_indices):\n", " states_dialog_batched[batch_idx][dialog_idx] = hidden_state\n", - "\n", + " \n", " # stack each dialog into a tensor of shape [dialog_length, hidden_size]\n", " states_dialog_batched = [torch.stack(d) for d in states_dialog_batched]\n", - "\n", + " \n", " # finally, condense all the dialog tensors into a single zero-padded tensor\n", " # of shape [max_dialog_length, batch_size, hidden_size]\n", " return torch.nn.utils.rnn.pad_sequence(states_dialog_batched)" @@ -842,77 +768,36 @@ }, "outputs": [], "source": [ - "def _evaluate_batch(\n", - " encoder,\n", - " context_encoder,\n", - " predictor,\n", - " voc,\n", - " input_batch,\n", - " dialog_lengths,\n", - " dialog_lengths_list,\n", - " utt_lengths,\n", - " batch_indices,\n", - " dialog_indices,\n", - " batch_size,\n", - " device,\n", - " max_length=MAX_LENGTH,\n", - "):\n", + "def _evaluate_batch(encoder, context_encoder, predictor, voc, input_batch, dialog_lengths, \n", + " dialog_lengths_list, utt_lengths, batch_indices, dialog_indices, batch_size, device, max_length=MAX_LENGTH):\n", " # Set device options\n", " input_batch = input_batch.to(device)\n", " dialog_lengths = dialog_lengths.to(device)\n", " utt_lengths = utt_lengths.to(device)\n", " # Predict future attack using predictor\n", - " scores = predictor(\n", - " input_batch,\n", - " dialog_lengths,\n", - " dialog_lengths_list,\n", - " utt_lengths,\n", - " batch_indices,\n", - " dialog_indices,\n", - " batch_size,\n", - " max_length,\n", - " )\n", + " scores = predictor(input_batch, dialog_lengths, dialog_lengths_list, utt_lengths, batch_indices, dialog_indices, batch_size, max_length)\n", " predictions = (scores > 0.5).float()\n", " return predictions, scores\n", "\n", - "\n", "def _evaluate_dataset(dataset, encoder, context_encoder, predictor, voc, batch_size, device):\n", " # create a batch iterator for the given data\n", " batch_iterator = batchIterator(voc, dataset, batch_size, shuffle=False)\n", " # find out how many iterations we will need to cover the whole dataset\n", " n_iters = len(dataset) // batch_size + int(len(dataset) % batch_size > 0)\n", - " output_df = {\"id\": [], \"prediction\": [], \"score\": []}\n", - " for iteration in range(1, n_iters + 1):\n", + " output_df = {\n", + " \"id\": [],\n", + " \"prediction\": [],\n", + " \"score\": []\n", + " }\n", + " for iteration in range(1, n_iters+1):\n", " batch, batch_dialogs, _, true_batch_size = next(batch_iterator)\n", " # Extract fields from batch\n", - " (\n", - " input_variable,\n", - " dialog_lengths,\n", - " utt_lengths,\n", - " batch_indices,\n", - " dialog_indices,\n", - " labels,\n", - " convo_ids,\n", - " target_variable,\n", - " mask,\n", - " max_target_len,\n", - " ) = batch\n", + " input_variable, dialog_lengths, utt_lengths, batch_indices, dialog_indices, labels, convo_ids, target_variable, mask, max_target_len = batch\n", " dialog_lengths_list = [len(x) for x in batch_dialogs]\n", " # run the model\n", - " predictions, scores = _evaluate_batch(\n", - " encoder,\n", - " context_encoder,\n", - " predictor,\n", - " voc,\n", - " input_variable,\n", - " dialog_lengths,\n", - " dialog_lengths_list,\n", - " utt_lengths,\n", - " batch_indices,\n", - " dialog_indices,\n", - " true_batch_size,\n", - " device,\n", - " )\n", + " predictions, scores = _evaluate_batch(encoder, context_encoder, predictor, voc, input_variable,\n", + " dialog_lengths, dialog_lengths_list, utt_lengths, batch_indices, dialog_indices,\n", + " true_batch_size, device)\n", "\n", " # format the output as a dataframe (which we can later re-join with the corpus)\n", " for i in range(true_batch_size):\n", @@ -922,10 +807,8 @@ " output_df[\"id\"].append(convo_id)\n", " output_df[\"prediction\"].append(pred)\n", " output_df[\"score\"].append(score)\n", - "\n", - " print(\n", - " \"Iteration: {}; Percent complete: {:.1f}%\".format(iteration, iteration / n_iters * 100)\n", - " )\n", + " \n", + " print(\"Iteration: {}; Percent complete: {:.1f}%\".format(iteration, iteration / n_iters * 100))\n", "\n", " return pd.DataFrame(output_df).set_index(\"id\")" ] @@ -1055,7 +938,7 @@ "random.seed(2019)\n", "\n", "# Tell torch to use GPU. Note that if you are running this notebook in a non-GPU environment, you can change 'cuda' to 'cpu' to get the code to run.\n", - "device = torch.device(\"cpu\")\n", + "device = torch.device('cpu')\n", "\n", "print(\"Loading saved parameters...\")\n", "if not os.path.isfile(\"model.tar\"):\n", @@ -1065,14 +948,14 @@ "# checkpoint = torch.load(\"model.tar\")\n", "# If running in a non-GPU environment, you need to tell PyTorch to convert the parameters to CPU tensor format.\n", "# To do so, replace the previous line with the following:\n", - "checkpoint = torch.load(\"model.tar\", map_location=torch.device(\"cpu\"))\n", - "encoder_sd = checkpoint[\"en\"]\n", - "context_sd = checkpoint[\"ctx\"]\n", - "attack_clf_sd = checkpoint[\"atk_clf\"]\n", - "embedding_sd = checkpoint[\"embedding\"]\n", - "voc.__dict__ = checkpoint[\"voc_dict\"]\n", - "\n", - "print(\"Building encoders, decoder, and classifier...\")\n", + "checkpoint = torch.load(\"model.tar\", map_location=torch.device('cpu'))\n", + "encoder_sd = checkpoint['en']\n", + "context_sd = checkpoint['ctx']\n", + "attack_clf_sd = checkpoint['atk_clf']\n", + "embedding_sd = checkpoint['embedding']\n", + "voc.__dict__ = checkpoint['voc_dict']\n", + "\n", + "print('Building encoders, decoder, and classifier...')\n", "# Initialize word embeddings\n", "embedding = nn.Embedding(voc.num_words, hidden_size)\n", "embedding.load_state_dict(embedding_sd)\n", @@ -1088,7 +971,7 @@ "encoder = encoder.to(device)\n", "context_encoder = context_encoder.to(device)\n", "attack_clf = attack_clf.to(device)\n", - "print(\"Models built and ready to go!\")\n", + "print('Models built and ready to go!')\n", "\n", "# Set dropout layers to eval mode\n", "encoder.eval()\n", @@ -1099,9 +982,7 @@ "predictor = Predictor(encoder, context_encoder, attack_clf)\n", "\n", "# Run the pipeline!\n", - "forecasts_df = _evaluate_dataset(\n", - " test_pairs, encoder, context_encoder, predictor, voc, batch_size, device\n", - ")" + "forecasts_df = _evaluate_dataset(test_pairs, encoder, context_encoder, predictor, voc, batch_size, device)" ] }, { @@ -1316,10 +1197,10 @@ "# prior to actually seeing this utterance, that this utterance *would be* a derailment\".\n", "for convo in corpus.iter_conversations():\n", " # only consider test set conversations (we did not make predictions for the other ones)\n", - " if convo.meta[\"split\"] == \"test\":\n", + " if convo.meta['split'] == \"test\":\n", " for utt in convo.iter_utterances():\n", " if utt.id in forecasts_df.index:\n", - " utt.meta[\"forecast_score\"] = forecasts_df.loc[utt.id].score" + " utt.meta['forecast_score'] = forecasts_df.loc[utt.id].score" ] }, { @@ -1355,23 +1236,20 @@ "# set up to not look at the last comment, meaning that all forecasts we obtained are forecasts made prior to derailment. This simplifies\n", "# the computation of forecast accuracy as we now do not need to explicitly consider when a forecast was made.\n", "\n", - "conversational_forecasts_df = {\"convo_id\": [], \"label\": [], \"score\": [], \"prediction\": []}\n", + "conversational_forecasts_df = {\n", + " \"convo_id\": [],\n", + " \"label\": [],\n", + " \"score\": [],\n", + " \"prediction\": []\n", + "}\n", "\n", "for convo in corpus.iter_conversations():\n", - " if convo.meta[\"split\"] == \"test\":\n", - " conversational_forecasts_df[\"convo_id\"].append(convo.id)\n", - " conversational_forecasts_df[\"label\"].append(\n", - " int(convo.meta[\"conversation_has_personal_attack\"])\n", - " )\n", - " forecast_scores = [\n", - " utt.meta[\"forecast_score\"]\n", - " for utt in convo.iter_utterances()\n", - " if \"forecast_score\" in utt.meta\n", - " ]\n", - " conversational_forecasts_df[\"score\"] = np.max(forecast_scores)\n", - " conversational_forecasts_df[\"prediction\"].append(\n", - " int(np.max(forecast_scores) > FORECAST_THRESH)\n", - " )\n", + " if convo.meta['split'] == \"test\":\n", + " conversational_forecasts_df['convo_id'].append(convo.id)\n", + " conversational_forecasts_df['label'].append(int(convo.meta['conversation_has_personal_attack']))\n", + " forecast_scores = [utt.meta['forecast_score'] for utt in convo.iter_utterances() if 'forecast_score' in utt.meta]\n", + " conversational_forecasts_df['score'] = np.max(forecast_scores)\n", + " conversational_forecasts_df['prediction'].append(int(np.max(forecast_scores) > FORECAST_THRESH))\n", "\n", "conversational_forecasts_df = pd.DataFrame(conversational_forecasts_df).set_index(\"convo_id\")\n", "print((conversational_forecasts_df.label == conversational_forecasts_df.prediction).mean())" @@ -1403,15 +1281,14 @@ "source": [ "# in addition to accuracy, we can also consider applying other metrics at the conversation level, such as precision/recall\n", "def get_pr_stats(preds, labels):\n", - " tp = ((labels == 1) & (preds == 1)).sum()\n", - " fp = ((labels == 0) & (preds == 1)).sum()\n", - " tn = ((labels == 0) & (preds == 0)).sum()\n", - " fn = ((labels == 1) & (preds == 0)).sum()\n", + " tp = ((labels==1)&(preds==1)).sum()\n", + " fp = ((labels==0)&(preds==1)).sum()\n", + " tn = ((labels==0)&(preds==0)).sum()\n", + " fn = ((labels==1)&(preds==0)).sum()\n", " print(\"Precision = {0:.4f}, recall = {1:.4f}\".format(tp / (tp + fp), tp / (tp + fn)))\n", " print(\"False positive rate =\", fp / (fp + tn))\n", " print(\"F1 =\", 2 / (((tp + fp) / tp) + ((tp + fn) / tp)))\n", "\n", - "\n", "get_pr_stats(conversational_forecasts_df.prediction, conversational_forecasts_df.label)" ] }, @@ -1497,25 +1374,23 @@ }, "outputs": [], "source": [ - "comments_until_derail = (\n", - " {}\n", - ") # store the \"number of comments until derailment\" metric for each conversation\n", - "time_until_derail = {} # store the \"time until derailment\" metric for each conversation\n", + "comments_until_derail = {} # store the \"number of comments until derailment\" metric for each conversation\n", + "time_until_derail = {} # store the \"time until derailment\" metric for each conversation\n", "\n", "for convo in corpus.iter_conversations():\n", - " if convo.meta[\"split\"] == \"test\" and convo.meta[\"conversation_has_personal_attack\"]:\n", + " if convo.meta['split'] == \"test\" and convo.meta['conversation_has_personal_attack']:\n", " # filter out the section header as usual\n", - " utts = [utt for utt in convo.iter_utterances() if not utt.meta[\"is_section_header\"]]\n", + " utts = [utt for utt in convo.iter_utterances() if not utt.meta['is_section_header']]\n", " # by construction, the last comment is the one with the personal attack\n", " derail_idx = len(utts) - 1\n", " # now scan the utterances in order until we find the first derailment prediction (if any)\n", " for idx in range(1, len(utts)):\n", - " if utts[idx].meta[\"forecast_score\"] > FORECAST_THRESH:\n", + " if utts[idx].meta['forecast_score'] > FORECAST_THRESH:\n", " # recall that the forecast_score meta field specifies what CRAFT thought this comment would look like BEFORE it\n", - " # saw this comment. So the actual CRAFT forecast is made during the previous comment; we account for this by\n", + " # saw this comment. So the actual CRAFT forecast is made during the previous comment; we account for this by \n", " # subtracting 1 from idx\n", - " comments_until_derail[convo.id] = derail_idx - (idx - 1)\n", - " time_until_derail[convo.id] = utts[derail_idx].timestamp - utts[(idx - 1)].timestamp\n", + " comments_until_derail[convo.id] = derail_idx - (idx-1)\n", + " time_until_derail[convo.id] = utts[derail_idx].timestamp - utts[(idx-1)].timestamp\n", " break" ] }, @@ -1543,12 +1418,7 @@ "source": [ "# compute some quick statistics about the distribution of the \"number of comments until derailment\" metric\n", "comments_until_derail_vals = np.asarray(list(comments_until_derail.values()))\n", - "print(\n", - " np.min(comments_until_derail_vals),\n", - " np.max(comments_until_derail_vals),\n", - " np.median(comments_until_derail_vals),\n", - " np.mean(comments_until_derail_vals),\n", - ")" + "print(np.min(comments_until_derail_vals), np.max(comments_until_derail_vals), np.median(comments_until_derail_vals), np.mean(comments_until_derail_vals))" ] }, { @@ -1576,12 +1446,7 @@ "# compute some quick statistics about the distribution of the \"time until derailment\" metric\n", "# note that since timestamps are in seconds, we convert to hours by dividing by 3600, to make it more human readable\n", "time_until_derail_vals = np.asarray(list(time_until_derail.values())) / 3600\n", - "print(\n", - " np.min(time_until_derail_vals),\n", - " np.max(time_until_derail_vals),\n", - " np.median(time_until_derail_vals),\n", - " np.mean(time_until_derail_vals),\n", - ")" + "print(np.min(time_until_derail_vals), np.max(time_until_derail_vals), np.median(time_until_derail_vals), np.mean(time_until_derail_vals))" ] }, { @@ -1612,14 +1477,12 @@ ], "source": [ "# visualize the distribution of \"number of comments until derailment\" as a histogram (reproducing Figure 4 from the paper)\n", - "plt.rcParams[\"figure.figsize\"] = (10.0, 5.0)\n", - "plt.rcParams[\"font.size\"] = 24\n", - "plt.hist(\n", - " comments_until_derail_vals, bins=range(1, np.max(comments_until_derail_vals)), density=True\n", - ")\n", - "plt.xlim(1, 10)\n", - "plt.xticks(np.arange(1, 10) + 0.5, np.arange(1, 10))\n", - "plt.yticks(np.arange(0, 0.25, 0.05), np.arange(0, 25, 5))\n", + "plt.rcParams['figure.figsize'] = (10.0, 5.0)\n", + "plt.rcParams['font.size'] = 24\n", + "plt.hist(comments_until_derail_vals, bins=range(1, np.max(comments_until_derail_vals)), density=True)\n", + "plt.xlim(1,10)\n", + "plt.xticks(np.arange(1,10)+0.5, np.arange(1,10))\n", + "plt.yticks(np.arange(0,0.25,0.05), np.arange(0,25,5))\n", "plt.xlabel(\"Number of comments elapsed\")\n", "plt.ylabel(\"% of conversations\")\n", "plt.show()" diff --git a/convokit/forecaster/CRAFT/demos/craft_demo_training.ipynb b/convokit/forecaster/CRAFT/demos/craft_demo_training.ipynb index 7ebce0f7..441f18cb 100644 --- a/convokit/forecaster/CRAFT/demos/craft_demo_training.ipynb +++ b/convokit/forecaster/CRAFT/demos/craft_demo_training.ipynb @@ -54,7 +54,9 @@ } ], "source": [ - "craft_model = CRAFTModel(device_type=\"cpu\", options={\"validation_size\": 0.2, \"train_epochs\": 5})" + "craft_model = CRAFTModel(device_type=\"cpu\", options={'validation_size': 0.2,\n", + " 'train_epochs': 5\n", + " })" ] }, { @@ -63,17 +65,15 @@ "metadata": {}, "outputs": [], "source": [ - "forecaster = Forecaster(\n", - " forecaster_model=craft_model,\n", - " forecast_mode=\"past\",\n", - " convo_structure=\"linear\",\n", - " text_func=lambda utt: utt.meta[\"tokens\"][: (MAX_LENGTH - 1)],\n", - " label_func=lambda utt: int(utt.meta[\"comment_has_personal_attack\"]),\n", - " forecast_attribute_name=\"prediction\",\n", - " forecast_prob_attribute_name=\"pred_score\",\n", - " use_last_only=True,\n", - " skip_broken_convos=False,\n", - ")" + "forecaster = Forecaster(forecaster_model = craft_model,\n", + " forecast_mode = 'past',\n", + " convo_structure=\"linear\",\n", + " text_func = lambda utt: utt.meta[\"tokens\"][:(MAX_LENGTH-1)],\n", + " label_func = lambda utt: int(utt.meta['comment_has_personal_attack']),\n", + " forecast_attribute_name=\"prediction\", forecast_prob_attribute_name=\"pred_score\",\n", + " use_last_only = True,\n", + " skip_broken_convos=False\n", + " )" ] }, { @@ -200,11 +200,8 @@ } ], "source": [ - "forecaster.fit(\n", - " corpus,\n", - " selector=lambda convo: convo.meta[\"split\"] == \"train\",\n", - " ignore_utterances=lambda utt: utt.meta[\"is_section_header\"],\n", - ")" + "forecaster.fit(corpus, selector = lambda convo: convo.meta[\"split\"] == \"train\",\n", + " ignore_utterances = lambda utt: utt.meta[\"is_section_header\"])" ] }, { diff --git a/convokit/forecaster/tests/cumulativeBoW_demo.ipynb b/convokit/forecaster/tests/cumulativeBoW_demo.ipynb index 6b08b5ec..cbfd7f18 100644 --- a/convokit/forecaster/tests/cumulativeBoW_demo.ipynb +++ b/convokit/forecaster/tests/cumulativeBoW_demo.ipynb @@ -23,7 +23,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"subreddit-Cornell\"))" + "corpus = Corpus(filename=download('subreddit-Cornell'))" ] }, { @@ -58,7 +58,7 @@ "metadata": {}, "outputs": [], "source": [ - "convo = corpus.get_conversation(\"o31u0\")" + "convo = corpus.get_conversation('o31u0')" ] }, { @@ -224,7 +224,7 @@ "source": [ "# Adding a 'y' feature to fit to\n", "for utt in corpus.iter_utterances():\n", - " utt.add_meta(\"pos_score\", int(utt.meta[\"score\"] > 0))" + " utt.add_meta('pos_score', int(utt.meta['score'] > 0))" ] }, { @@ -243,7 +243,7 @@ } ], "source": [ - "forecaster = Forecaster(label_func=lambda utt: utt.meta[\"pos_score\"], skip_broken_convos=True)" + "forecaster = Forecaster(label_func=lambda utt: utt.meta['pos_score'], skip_broken_convos=True)" ] }, { @@ -330,7 +330,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus.get_utterance(\"dpn8e4v\")" + "corpus.get_utterance('dpn8e4v')" ] }, { @@ -339,7 +339,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus.get_utterance(\"dpn8e4v\").root" + "corpus.get_utterance('dpn8e4v').root" ] }, { @@ -348,7 +348,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus.get_conversation(corpus.get_utterance(\"dpn8e4v\").root).print_conversation_structure()" + "corpus.get_conversation(corpus.get_utterance('dpn8e4v').root).print_conversation_structure()" ] }, { @@ -364,9 +364,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus.get_conversation(corpus.get_utterance(\"dpn8e4v\").root).print_conversation_structure(\n", - " lambda utt: str(utt.meta[\"forecast\"])\n", - ")" + "corpus.get_conversation(corpus.get_utterance('dpn8e4v').root).print_conversation_structure(lambda utt: str(utt.meta['forecast']))" ] }, { @@ -382,9 +380,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus.get_conversation(corpus.get_utterance(\"dpn8e4v\").root).print_conversation_structure(\n", - " lambda utt: str(utt.meta[\"pos_score\"])\n", - ")" + "corpus.get_conversation(corpus.get_utterance('dpn8e4v').root).print_conversation_structure(lambda utt: str(utt.meta['pos_score']))" ] }, { @@ -393,8 +389,8 @@ "metadata": {}, "outputs": [], "source": [ - "forecasts = [utt.meta[\"forecast\"] for utt in corpus.iter_utterances()]\n", - "actual = [utt.meta[\"pos_score\"] for utt in corpus.iter_utterances()]" + "forecasts = [utt.meta['forecast'] for utt in corpus.iter_utterances()]\n", + "actual = [utt.meta['pos_score'] for utt in corpus.iter_utterances()]" ] }, { @@ -403,9 +399,7 @@ "metadata": {}, "outputs": [], "source": [ - "y_true_pred = [\n", - " (forecast, actual) for forecast, actual in zip(forecasts, actual) if forecast is not None\n", - "]" + "y_true_pred = [(forecast, actual) for forecast, actual in zip(forecasts, actual) if forecast is not None]" ] }, { diff --git a/convokit/model/storageManager.py b/convokit/model/backendMapper.py similarity index 89% rename from convokit/model/storageManager.py rename to convokit/model/backendMapper.py index d7ffa441..8cb0665a 100644 --- a/convokit/model/storageManager.py +++ b/convokit/model/backendMapper.py @@ -6,17 +6,18 @@ import pickle -class StorageManager(metaclass=ABCMeta): +class BackendMapper(metaclass=ABCMeta): """ Abstraction layer for the concrete representation of data and metadata within corpus components (e.g., Utterance text and timestamps). All requests to access or modify corpusComponent fields (with the exception of ID) are - actually routed through one of StorageManager's concrete subclasses. Each - subclass implements a storage backend that contains the actual data. + actually routed through one of BackendMapper's concrete subclasses. Each + subclass implements a concrete backend mapping from ConvoKit operations to actual data. + (These mappings are referred to as collections.) """ def __init__(self): - # concrete data storage (i.e., collections) for each component type + # concrete data backend (i.e., collections) for each component type # this will be assigned in subclasses self.data = {"utterance": None, "conversation": None, "speaker": None, "meta": None} @@ -84,7 +85,7 @@ def delete_data( self, component_type: str, component_id: str, property_name: Optional[str] = None ): """ - Delete a data entry from this StorageManager for the component of type + Delete a data entry from this BackendMapper for the component of type component_type with id component_id. If property_name is specified delete only that property, otherwise delete the entire entry. """ @@ -93,7 +94,7 @@ def delete_data( @abstractmethod def clear_all_data(self): """ - Erase all data from this StorageManager (i.e., reset self.data to its + Erase all data from this BackendMapper (i.e., reset self.data to its initial empty state; Python will garbage-collect the now-unreferenced old data entries). This is used for cleanup after destructive Corpus operations. @@ -104,7 +105,7 @@ def clear_all_data(self): def count_entries(self, component_type: str): """ Count the number of entries held for the specified component type by - this StorageManager instance + this BackendMapper instance """ return NotImplemented @@ -117,7 +118,7 @@ def get_collection(self, component_type: str): def purge_obsolete_entries(self, utterance_ids, conversation_ids, speaker_ids, meta_ids): """ - Compare the entries in this StorageManager to the existing component ids + Compare the entries in this BackendMapper to the existing component ids provided as parameters, and delete any entries that are not found in the parameter ids. """ @@ -133,9 +134,9 @@ def purge_obsolete_entries(self, utterance_ids, conversation_ids, speaker_ids, m self.delete_data(obj_type, obj_id) -class MemStorageManager(StorageManager): +class MemMapper(BackendMapper): """ - Concrete StorageManager implementation for in-memory data storage. + Concrete BackendMapper implementation for in-memory data storage. Collections are implemented as vanilla Python dicts. """ @@ -170,7 +171,7 @@ def get_data( collection = self.get_collection(component_type) if component_id not in collection: raise KeyError( - f"This StorageManager does not have an entry for the {component_type} with id {component_id}." + f"This BackendMapper does not have an entry for the {component_type} with id {component_id}." ) if property_name is None: return collection[component_id] @@ -190,7 +191,7 @@ def update_data( # CorpusComponent constructor so if the ID is missing that indicates something is wrong if component_id not in collection: raise KeyError( - f"This StorageManager does not have an entry for the {component_type} with id {component_id}." + f"This BackendMapper does not have an entry for the {component_type} with id {component_id}." ) collection[component_id][property_name] = new_value @@ -200,7 +201,7 @@ def delete_data( collection = self.get_collection(component_type) if component_id not in collection: raise KeyError( - f"This StorageManager does not have an entry for the {component_type} with id {component_id}." + f"This BackendMapper does not have an entry for the {component_type} with id {component_id}." ) if property_name is None: del collection[component_id] @@ -215,9 +216,9 @@ def count_entries(self, component_type: str): return len(self.get_collection(component_type)) -class DBStorageManager(StorageManager): +class DBMapper(BackendMapper): """ - Concrete StorageManager implementation for database-backed data storage. + Concrete BackendMapper implementation for database-backed data storage. Collections are implemented as MongoDB collections. """ @@ -272,7 +273,7 @@ def get_data( all_fields = collection.find_one({"_id": component_id}) if all_fields is None: raise KeyError( - f"This StorageManager does not have an entry for the {component_type} with id {component_id}." + f"This BackendMapper does not have an entry for the {component_type} with id {component_id}." ) if property_name is None: # if some data is known to be binary type, unpack it diff --git a/convokit/model/convoKitMeta.py b/convokit/model/convoKitMeta.py index 72a36e2e..ea444e6d 100644 --- a/convokit/model/convoKitMeta.py +++ b/convokit/model/convoKitMeta.py @@ -7,6 +7,7 @@ from .convoKitIndex import ConvoKitIndex import json from typing import Union +import copy # See reference: https://stackoverflow.com/questions/7760916/correct-usage-of-a-getter-setter-for-dictionary-values @@ -21,29 +22,38 @@ def __init__(self, owner, convokit_index, obj_type, overwrite=False): self.index: ConvoKitIndex = convokit_index self.obj_type = obj_type - self._get_storage().initialize_data_for_component( - "meta", self.storage_key, overwrite=overwrite + self._get_backend().initialize_data_for_component( + "meta", self.backend_key, overwrite=overwrite ) @property - def storage_key(self) -> str: + def backend_key(self) -> str: return f"{self.obj_type}_{self.owner.id}" def __getitem__(self, item): - return self._get_storage().get_data( - "meta", self.storage_key, item, self.index.get_index(self.obj_type) + # in DB mode, metadata field mutation would not be updated. (ex. mutating dict/list metadata fields) + # we align MEM mode behavior and DB mode by making deepcopy of metadata fields, so mutation no longer + # affect corpus metadata backend, but only acting on the copy of it. + item = self._get_backend().get_data( + "meta", self.backend_key, item, self.index.get_index(self.obj_type) ) + immutable_types = (int, float, bool, complex, str, tuple, frozenset) + if isinstance(item, immutable_types): + return item + else: + # return copy.deepcopy(item) if item is not common python immutable type + return copy.deepcopy(item) - def _get_storage(self): + def _get_backend(self): # special case for Corpus meta since that's the only time owner is not a CorpusComponent # since cannot directly import Corpus to check the type (circular import), as a proxy we # check for the obj_type attribute which is common to all CorpusComponent but not # present in Corpus if not hasattr(self.owner, "obj_type"): - return self.owner.storage + return self.owner.backend_mapper # self.owner -> CorpusComponent - # self.owner.owner -> Corpus that owns the CorpusComponent (only Corpus has direct pointer to storage) - return self.owner.owner.storage + # self.owner.owner -> Corpus that owns the CorpusComponent (only Corpus has direct pointer to backend) + return self.owner.owner.backend_mapper @staticmethod def _check_type_and_update_index(index, obj_type, key, value): @@ -72,14 +82,14 @@ def __setitem__(self, key, value): if self.index.type_check: ConvoKitMeta._check_type_and_update_index(self.index, self.obj_type, key, value) - self._get_storage().update_data( - "meta", self.storage_key, key, value, self.index.get_index(self.obj_type) + self._get_backend().update_data( + "meta", self.backend_key, key, value, self.index.get_index(self.obj_type) ) def __delitem__(self, key): if self.obj_type == "corpus": self.index.del_from_index(self.obj_type, key) - self._get_storage().delete_data("meta", self.storage_key, key) + self._get_backend().delete_data("meta", self.backend_key, key) else: if self.index.lock_metadata_deletion[self.obj_type]: warn( @@ -91,26 +101,26 @@ def __delitem__(self, key): ) ) else: - self._get_storage().delete_data("meta", self.storage_key, key) + self._get_backend().delete_data("meta", self.backend_key, key) def __iter__(self): return ( - self._get_storage() - .get_data("meta", self.storage_key, index=self.index.get_index(self.obj_type)) + self._get_backend() + .get_data("meta", self.backend_key, index=self.index.get_index(self.obj_type)) .__iter__() ) def __len__(self): return ( - self._get_storage() - .get_data("meta", self.storage_key, index=self.index.get_index(self.obj_type)) + self._get_backend() + .get_data("meta", self.backend_key, index=self.index.get_index(self.obj_type)) .__len__() ) def __contains__(self, x): return ( - self._get_storage() - .get_data("meta", self.storage_key, index=self.index.get_index(self.obj_type)) + self._get_backend() + .get_data("meta", self.backend_key, index=self.index.get_index(self.obj_type)) .__contains__(x) ) @@ -119,8 +129,8 @@ def __repr__(self) -> str: def to_dict(self): return dict( - self._get_storage().get_data( - "meta", self.storage_key, index=self.index.get_index(self.obj_type) + self._get_backend().get_data( + "meta", self.backend_key, index=self.index.get_index(self.obj_type) ) ) @@ -134,8 +144,8 @@ def reinitialize_from(self, other: Union["ConvoKitMeta", dict]): raise TypeError( "ConvoKitMeta can only be reinitialized from a dict instance or another ConvoKitMeta" ) - self._get_storage().initialize_data_for_component( - "meta", self.storage_key, overwrite=True, initial_value=other + self._get_backend().initialize_data_for_component( + "meta", self.backend_key, overwrite=True, initial_value=other ) diff --git a/convokit/model/corpus.py b/convokit/model/corpus.py index dc59e87d..7ec3883e 100644 --- a/convokit/model/corpus.py +++ b/convokit/model/corpus.py @@ -10,7 +10,7 @@ from .convoKitMatrix import ConvoKitMatrix from .corpusUtil import * from .corpus_helpers import * -from .storageManager import StorageManager +from .backendMapper import BackendMapper class Corpus: @@ -19,6 +19,8 @@ class Corpus: :param filename: Path to a folder containing a Corpus or to an utterances.jsonl / utterances.json file to load :param utterances: list of utterances to initialize Corpus from + :param db_collection_prefix: if a db backend is used, this determines how the database will be named. If not specified, a random name will be used. + :param db_host: if specified, and a db backend is used, connect to the database at this URL. If not specified, will default to the db_host in the ConvoKit global configuration file. :param preload_vectors: list of names of vectors to be preloaded from directory; by default, no vectors are loaded but can be loaded any time after corpus initialization (i.e. vectors are lazy-loaded). :param utterance_start_index: if loading from directory and the corpus folder contains utterances.jsonl, specify the @@ -36,6 +38,9 @@ class Corpus: index.json is already accurate and disabling it will allow for a faster corpus load. This parameter is set to True by default, i.e. type-checking is not carried out. + :param backend: specify the backend type, either “mem” or “db”, default to “mem”. + :param backend_mapper: (advanced usage only) if provided, use this as the BackendMapper instance instead of initializing a new one. + :ivar meta_index: index of Corpus metadata :ivar vectors: the vectors stored in the Corpus :ivar corpus_dirpath: path to the directory the corpus was loaded from @@ -56,29 +61,29 @@ def __init__( exclude_speaker_meta: Optional[List[str]] = None, exclude_overall_meta: Optional[List[str]] = None, disable_type_check=True, - storage_type: Optional[str] = None, - storage: Optional[StorageManager] = None, + backend: Optional[str] = None, + backend_mapper: Optional[BackendMapper] = None, ): self.config = ConvoKitConfig() self.corpus_dirpath = get_corpus_dirpath(filename) # configure corpus ID (optional for mem mode, required for DB mode) - if storage_type is None: - storage_type = self.config.default_storage_mode - if db_collection_prefix is None and filename is None and storage_type == "db": + if backend is None: + backend = self.config.default_backend + if db_collection_prefix is None and filename is None and backend == "db": db_collection_prefix = create_safe_id() warn( "You are in DB mode, but no collection prefix was specified and no filename was given from which to infer one." "Will use a randomly generated unique prefix " + db_collection_prefix ) - self.id = get_corpus_id(db_collection_prefix, filename, storage_type) - self.storage_type = storage_type - self.storage = initialize_storage(self, storage, storage_type, db_host) + self.id = get_corpus_id(db_collection_prefix, filename, backend) + self.backend = backend + self.backend_mapper = initialize_backend(self, backend_mapper, backend, db_host) self.meta_index = ConvoKitIndex(self) self.meta = ConvoKitMeta(self, self.meta_index, "corpus") - # private storage + # private backend self._vector_matrices = dict() convos_data = defaultdict(dict) @@ -91,10 +96,10 @@ def __init__( if exclude_overall_meta is None: exclude_overall_meta = [] - if filename is not None and storage_type == "db": + if filename is not None and backend == "db": # JSON-to-DB construction mode uses a specialized code branch, which # optimizes for this use case by using direct batch insertions into the - # DB rather than going through the StorageManager, hence improving + # DB rather than going through the BackendMapper, hence improving # efficiency. with open(os.path.join(filename, "index.json"), "r") as f: @@ -104,7 +109,7 @@ def __init__( # populate the DB with the contents of the source file ids_in_db = populate_db_from_file( filename, - self.storage.db, + self.backend_mapper.db, self.id, self.meta_index, utterance_start_index, @@ -115,9 +120,9 @@ def __init__( exclude_overall_meta, ) - # with the StorageManager's DB now populated, initialize the corresponding + # with the BackendMapper's DB now populated, initialize the corresponding # CorpusComponent instances. - init_corpus_from_storage_manager(self, ids_in_db) + init_corpus_from_backend_manager(self, ids_in_db) self.meta_index.enable_type_check() # load preload_vectors @@ -216,10 +221,10 @@ def reconnect_to_db(cls, db_collection_prefix: str, db_host: Optional[str] = Non resume where you left off. """ # create a blank Corpus that will hold the data - result = cls(db_collection_prefix=db_collection_prefix, db_host=db_host, storage_type="db") - # through the constructor, the blank Corpus' StorageManager is now connected + result = cls(db_collection_prefix=db_collection_prefix, db_host=db_host, backend="db") + # through the constructor, the blank Corpus' BackendMapper is now connected # to the DB. Next use the DB contents to populate the corpus components. - init_corpus_from_storage_manager(result) + init_corpus_from_backend_manager(result) return result @@ -613,15 +618,15 @@ def filter_conversations_by(self, selector: Callable[[Conversation], bool]): self.update_speakers_data() self.reinitialize_index() - # clear all storage entries corresponding to filtered-out components - meta_ids = [self.meta.storage_key] + # clear all backend entries corresponding to filtered-out components + meta_ids = [self.meta.backend_key] for utt in self.iter_utterances(): - meta_ids.append(utt.meta.storage_key) + meta_ids.append(utt.meta.backend_key) for convo in self.iter_conversations(): - meta_ids.append(convo.meta.storage_key) + meta_ids.append(convo.meta.backend_key) for speaker in self.iter_speakers(): - meta_ids.append(speaker.meta.storage_key) - self.storage.purge_obsolete_entries( + meta_ids.append(speaker.meta.backend_key) + self.backend_mapper.purge_obsolete_entries( self.get_utterance_ids(), self.get_conversation_ids(), self.get_speaker_ids(), meta_ids ) @@ -645,8 +650,8 @@ def filter_utterances(source_corpus: "Corpus", selector: Callable[[Utterance], b convo.meta.update(source_corpus.get_conversation(convo.id).meta) # original Corpus is invalidated and no longer usable; clear all data from - # its now-orphaned StorageManager to avoid having duplicates in memory - source_corpus.storage.clear_all_data() + # its now-orphaned BackendMapper to avoid having duplicates in memory + source_corpus.backend_mapper.clear_all_data() return new_corpus @@ -720,8 +725,8 @@ def reindex_conversations( print(missing_convo_roots) # original Corpus is invalidated and no longer usable; clear all data from - # its now-orphaned StorageManager to avoid having duplicates in memory - source_corpus.storage.clear_all_data() + # its now-orphaned BackendMapper to avoid having duplicates in memory + source_corpus.backend_mapper.clear_all_data() return new_corpus @@ -1027,10 +1032,10 @@ def merge(primary: "Corpus", secondary: "Corpus", warnings: bool = True): new_corpus.reinitialize_index() # source corpora are now invalidated and all needed data has been copied - # into the new merged corpus; clear the source corpora's storage to + # into the new merged corpus; clear the source corpora's backend mapper to # prevent having duplicates in memory - primary.storage.clear_all_data() - secondary.storage.clear_all_data() + primary.backend_mapper.clear_all_data() + secondary.backend_mapper.clear_all_data() return new_corpus @@ -1295,9 +1300,9 @@ def load_info(self, obj_type, fields=None, dir_name=None): for field in fields: # self.aux_info[field] = self.load_jsonlist_to_dict( # os.path.join(dir_name, 'feat.%s.jsonl' % field)) - if self.storage_type == "mem": + if self.backend == "mem": load_info_to_mem(self, dir_name, obj_type, field) - elif self.storage_type == "db": + elif self.backend == "db": load_info_to_db(self, dir_name, obj_type, field) def dump_info(self, obj_type, fields, dir_name=None): diff --git a/convokit/model/corpusComponent.py b/convokit/model/corpusComponent.py index 83064ab3..1ae8dd49 100644 --- a/convokit/model/corpusComponent.py +++ b/convokit/model/corpusComponent.py @@ -20,12 +20,12 @@ def __init__( self.vectors = vectors if vectors is not None else [] # if the CorpusComponent is initialized with an owner set up an entry - # in the owner's storage; if it is not initialized with an owner - # (i.e. it is a standalone object) set up a dict-based temp storage + # in the owner's backend; if it is not initialized with an owner + # (i.e. it is a standalone object) set up a dict-based temp backend if self.owner is None: - self._temp_storage = initial_data if initial_data is not None else {} + self._temp_backend = initial_data if initial_data is not None else {} else: - self.owner.storage.initialize_data_for_component( + self.owner.backend_mapper.initialize_data_for_component( self.obj_type, self._id, initial_value=(initial_data if initial_data is not None else {}), @@ -42,28 +42,28 @@ def set_owner(self, owner): if owner is self._owner: # no action needed return - # stash the metadata first since reassigning self._owner will break its storage connection + # stash the metadata first since reassigning self._owner will break its backend connection meta_vals = {k: v for k, v in self.meta.items()} previous_owner = self._owner self._owner = owner if owner is not None: # when a new owner Corpus is assigned, we must take the following steps: - # (1) transfer this component's data to the new owner's StorageManager - # (2) avoid duplicates by removing the data from the old owner (or temp storage if there was no prior owner) + # (1) transfer this component's data to the new owner's BackendMapper + # (2) avoid duplicates by removing the data from the old owner (or temp backend if there was no prior owner) # (3) reinitialize the metadata instance data_dict = ( - dict(previous_owner.storage.get_data(self.obj_type, self.id)) + dict(previous_owner.backend_mapper.get_data(self.obj_type, self.id)) if previous_owner is not None - else self._temp_storage + else self._temp_backend ) - self.owner.storage.initialize_data_for_component( + self.owner.backend_mapper.initialize_data_for_component( self.obj_type, self.id, initial_value=data_dict ) if previous_owner is not None: - previous_owner.storage.delete_data(self.obj_type, self.id) - previous_owner.storage.delete_data("meta", self.meta.storage_key) + previous_owner.backend_mapper.delete_data(self.obj_type, self.id) + previous_owner.backend_mapper.delete_data("meta", self.meta.backend_key) else: - del self._temp_storage + del self._temp_backend self._meta = self.init_meta(meta_vals) owner = property(get_owner, set_owner) @@ -71,7 +71,7 @@ def set_owner(self, owner): def init_meta(self, meta, overwrite=False): if self._owner is None: # ConvoKitMeta instances are not allowed for ownerless (standalone) - # components since they must be backed by a StorageManager. In this + # components since they must be backed by a BackendMapper. In this # case we must forcibly convert the ConvoKitMeta instance to dict if isinstance(meta, ConvoKitMeta): meta = meta.to_dict() @@ -110,14 +110,14 @@ def set_meta(self, new_meta): def get_data(self, property_name): if self._owner is None: - return self._temp_storage[property_name] - return self.owner.storage.get_data(self.obj_type, self.id, property_name) + return self._temp_backend[property_name] + return self.owner.backend_mapper.get_data(self.obj_type, self.id, property_name) def set_data(self, property_name, value): if self._owner is None: - self._temp_storage[property_name] = value + self._temp_backend[property_name] = value else: - self.owner.storage.update_data(self.obj_type, self.id, property_name, value) + self.owner.backend_mapper.update_data(self.obj_type, self.id, property_name, value) # def __eq__(self, other): # if type(self) != type(other): return False diff --git a/convokit/model/corpus_helpers.py b/convokit/model/corpus_helpers.py index 056230a3..62a90aee 100644 --- a/convokit/model/corpus_helpers.py +++ b/convokit/model/corpus_helpers.py @@ -16,7 +16,7 @@ from .convoKitIndex import ConvoKitIndex from .convoKitMeta import ConvoKitMeta from .speaker import Speaker -from .storageManager import StorageManager, MemStorageManager, DBStorageManager +from .backendMapper import BackendMapper, MemMapper, DBMapper from .utterance import Utterance BIN_DELIM_L, BIN_DELIM_R = "<##bin{", "}&&@**>" @@ -34,7 +34,7 @@ def get_corpus_id( - db_collection_prefix: Optional[str], filename: Optional[str], storage_type: str + db_collection_prefix: Optional[str], filename: Optional[str], backend: str ) -> Optional[str]: if db_collection_prefix is not None: # treat the unique collection prefix as the ID (even if a filename is specified) @@ -45,7 +45,7 @@ def get_corpus_id( else: corpus_id = None - if storage_type == "db" and corpus_id is not None: + if backend == "db" and corpus_id is not None: compatibility_msg = check_id_for_mongodb(corpus_id) if compatibility_msg is not None: random_id = create_safe_id() @@ -82,21 +82,21 @@ def get_corpus_dirpath(filename: str) -> Optional[str]: return os.path.dirname(filename) -def initialize_storage( - corpus: "Corpus", storage: Optional[StorageManager], storage_type: str, db_host: Optional[str] +def initialize_backend( + corpus: "Corpus", backend_mapper: Optional[BackendMapper], backend: str, db_host: Optional[str] ): - if storage is not None: - return storage + if backend_mapper is not None: + return backend_mapper else: - if storage_type == "mem": - return MemStorageManager() - elif storage_type == "db": + if backend == "mem": + return MemMapper() + elif backend == "db": if db_host is None: db_host = corpus.config.db_host - return DBStorageManager(corpus.id, db_host) + return DBMapper(corpus.id, db_host) else: raise ValueError( - f"Unrecognized setting '{storage_type}' for storage type; should be either 'mem' or 'db'." + f"Unrecognized setting '{backend}' for backend type; should be either 'mem' or 'db'." ) @@ -820,7 +820,7 @@ def load_info_to_db(corpus, dir_name, obj_type, field, index_key="id", value_key its contents into the DB, and updates the Corpus' metadata index """ filename = os.path.join(dir_name, "info.%s.jsonl" % field) - meta_collection = corpus.storage.get_collection("meta") + meta_collection = corpus.backend_mapper.get_collection("meta") # attept to use saved type information index_file = os.path.join(dir_name, "index.json") @@ -886,7 +886,7 @@ def populate_db_from_file( ): """ Populate all necessary collections of a MongoDB database so that it can be - used by a DBStorageManager, sourcing data from the valid ConvoKit Corpus + used by a DBMapper, sourcing data from the valid ConvoKit Corpus data pointed to by the filename parameter. """ binary_meta, updated_exclude_meta = load_binary_metadata( @@ -947,19 +947,19 @@ def populate_db_from_file( return inserted_utt_ids -def init_corpus_from_storage_manager(corpus, utt_ids=None): +def init_corpus_from_backend_manager(corpus, utt_ids=None): """ Use an already-populated MongoDB database to initialize the components of the specified Corpus (which should be empty before this function is called) """ # we will bypass the initialization step when constructing components since # we know their necessary data already exists within the db - corpus.storage.bypass_init = True + corpus.backend_mapper.bypass_init = True # fetch object ids from the DB and initialize corpus components for them # create speakers first so we can refer to them when initializing utterances speakers = {} - for speaker_doc in corpus.storage.data["speaker"].find(projection=["_id"]): + for speaker_doc in corpus.backend_mapper.data["speaker"].find(projection=["_id"]): speaker_id = speaker_doc["_id"] speakers[speaker_id] = Speaker(owner=corpus, id=speaker_id) corpus.speakers = speakers @@ -967,7 +967,7 @@ def init_corpus_from_storage_manager(corpus, utt_ids=None): # next, create utterances utterances = {} convo_to_utts = defaultdict(list) - for utt_doc in corpus.storage.data["utterance"].find( + for utt_doc in corpus.backend_mapper.data["utterance"].find( projection=["_id", "speaker_id", "conversation_id"] ): utt_id = utt_doc["_id"] @@ -983,5 +983,5 @@ def init_corpus_from_storage_manager(corpus, utt_ids=None): corpus.meta_index.enable_type_check() corpus.update_speakers_data() - # restore the StorageManager's init behavior to default - corpus.storage.bypass_init = False + # restore the BackendMapper's init behavior to default + corpus.backend_mapper.bypass_init = False diff --git a/convokit/paired_prediction/pairer.py b/convokit/paired_prediction/pairer.py index 0d39d7ed..67381a9e 100644 --- a/convokit/paired_prediction/pairer.py +++ b/convokit/paired_prediction/pairer.py @@ -70,10 +70,22 @@ def _pair_objs(self, pos_objects, neg_objects): pair_feat_to_neg_objs = defaultdict(list) for obj in pos_objects: - pair_feat_to_pos_objs[self.pairing_func(obj)].append(obj) + if self.pair_mode == "maximize": + try: + pair_feat_to_pos_objs[str(self.pairing_func(obj))].append(obj) + except Exception as e: + pair_feat_to_pos_objs[self.pairing_func(obj)].append(obj) + else: + pair_feat_to_pos_objs[self.pairing_func(obj)].append(obj) for obj in neg_objects: - pair_feat_to_neg_objs[self.pairing_func(obj)].append(obj) + if self.pair_mode == "maximize": + try: + pair_feat_to_neg_objs[str(self.pairing_func(obj))].append(obj) + except Exception as e: + pair_feat_to_neg_objs[self.pairing_func(obj)].append(obj) + else: + pair_feat_to_neg_objs[self.pairing_func(obj)].append(obj) valid_pairs = set(pair_feat_to_neg_objs).intersection(set(pair_feat_to_pos_objs)) diff --git a/convokit/ranker/demos/ranker_demo.ipynb b/convokit/ranker/demos/ranker_demo.ipynb index d9dc1deb..aa045f56 100644 --- a/convokit/ranker/demos/ranker_demo.ipynb +++ b/convokit/ranker/demos/ranker_demo.ipynb @@ -24,7 +24,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"subreddit-Cornell\"))" + "corpus = Corpus(filename=download('subreddit-Cornell'))" ] }, { @@ -187,7 +187,7 @@ "# Sanity check of (rank, score) pairings\n", "utt_sample = list(corpus.iter_utterances())[:10]\n", "\n", - "sorted([(utt.meta[\"rank\"], utt.meta[\"score\"]) for utt in utt_sample], key=lambda x: x[0])" + "sorted([(utt.meta['rank'], utt.meta['score']) for utt in utt_sample], key=lambda x: x[0]) " ] }, { @@ -203,12 +203,11 @@ "metadata": {}, "outputs": [], "source": [ - "ranker = convokit.Ranker(\n", - " obj_type=\"speaker\",\n", - " score_func=lambda user: len(list(user.iter_utterances())),\n", - " score_attribute_name=\"num_utts\",\n", - " rank_attribute_name=\"num_utts_rank\",\n", - ")" + "ranker = convokit.Ranker(obj_type=\"speaker\", \n", + " score_func=lambda user: len(list(user.iter_utterances())), \n", + " score_attribute_name=\"num_utts\",\n", + " rank_attribute_name=\"num_utts_rank\"\n", + " )" ] }, { @@ -344,10 +343,7 @@ "# Sanity check of (rank, score) pairings\n", "speaker_sample = list(corpus.iter_speakers())[:10]\n", "\n", - "sorted(\n", - " [(spkr.meta[\"num_utts_rank\"], spkr.meta[\"num_utts\"]) for spkr in speaker_sample],\n", - " key=lambda x: x[0],\n", - ")" + "sorted([(spkr.meta['num_utts_rank'], spkr.meta['num_utts']) for spkr in speaker_sample], key=lambda x: x[0]) " ] }, { diff --git a/convokit/surprise/demos/surprise_demo.ipynb b/convokit/surprise/demos/surprise_demo.ipynb index 0cf249f9..92946d6c 100644 --- a/convokit/surprise/demos/surprise_demo.ipynb +++ b/convokit/surprise/demos/surprise_demo.ipynb @@ -49,7 +49,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"subreddit-Cornell\"))" + "corpus = Corpus(filename=download('subreddit-Cornell'))" ] }, { @@ -86,9 +86,7 @@ "metadata": {}, "outputs": [], "source": [ - "SPEAKER_BLACKLIST = [\"[deleted]\", \"DeltaBot\", \"AutoModerator\"]\n", - "\n", - "\n", + "SPEAKER_BLACKLIST = ['[deleted]', 'DeltaBot', 'AutoModerator']\n", "def utterance_is_valid(utterance):\n", " return utterance.speaker.id not in SPEAKER_BLACKLIST and utterance.text" ] @@ -119,7 +117,7 @@ "metadata": {}, "outputs": [], "source": [ - "speaker_activities = corpus.get_attribute_table(\"speaker\", [\"n_convos\"])" + "speaker_activities = corpus.get_attribute_table('speaker', ['n_convos'])" ] }, { @@ -221,7 +219,7 @@ } ], "source": [ - "speaker_activities.sort_values(\"n_convos\", ascending=False).head(10)" + "speaker_activities.sort_values('n_convos', ascending=False).head(10)" ] }, { @@ -230,7 +228,7 @@ "metadata": {}, "outputs": [], "source": [ - "top_speakers = speaker_activities.sort_values(\"n_convos\", ascending=False).head(100).index" + "top_speakers = speaker_activities.sort_values('n_convos', ascending=False).head(100).index" ] }, { @@ -241,10 +239,7 @@ "source": [ "import itertools\n", "\n", - "subset_utts = [\n", - " list(corpus.get_speaker(speaker).iter_utterances(selector=utterance_is_valid))\n", - " for speaker in top_speakers\n", - "]\n", + "subset_utts = [list(corpus.get_speaker(speaker).iter_utterances(selector=utterance_is_valid)) for speaker in top_speakers]\n", "subset_corpus = Corpus(utterances=list(itertools.chain(*subset_utts)))" ] }, @@ -292,9 +287,9 @@ "source": [ "import spacy\n", "\n", - "spacy_nlp = spacy.load(\"en_core_web_sm\", disable=[\"ner\", \"parser\", \"tagger\", \"lemmatizer\"])\n", + "spacy_nlp = spacy.load('en_core_web_sm', disable=['ner','parser', 'tagger', 'lemmatizer'])\n", "for utt in subset_corpus.iter_utterances():\n", - " utt.meta[\"joined_tokens\"] = [t.text.lower() for t in spacy_nlp(utt.text)]" + " utt.meta['joined_tokens'] = [t.text.lower() for t in spacy_nlp(utt.text)]" ] }, { @@ -303,14 +298,7 @@ "metadata": {}, "outputs": [], "source": [ - "surp = Surprise(\n", - " tokenizer=lambda x: x,\n", - " model_key_selector=lambda utt: \"_\".join([utt.speaker.id, utt.conversation_id]),\n", - " target_sample_size=100,\n", - " context_sample_size=1000,\n", - " n_samples=50,\n", - " smooth=True,\n", - ")" + "surp = Surprise(tokenizer=lambda x: x, model_key_selector=lambda utt: '_'.join([utt.speaker.id, utt.conversation_id]), target_sample_size=100, context_sample_size=1000, n_samples=50, smooth=True)" ] }, { @@ -328,20 +316,7 @@ } ], "source": [ - "surp = surp.fit(\n", - " subset_corpus,\n", - " text_func=lambda utt: [\n", - " list(\n", - " itertools.chain(\n", - " *[\n", - " u.meta[\"joined_tokens\"]\n", - " for u in utt.speaker.iter_utterances()\n", - " if u.conversation_id != utt.conversation_id\n", - " ]\n", - " )\n", - " )\n", - " ],\n", - ")" + "surp = surp.fit(subset_corpus, text_func=lambda utt: [list(itertools.chain(*[u.meta['joined_tokens'] for u in utt.speaker.iter_utterances() if u.conversation_id != utt.conversation_id]))])" ] }, { @@ -369,7 +344,7 @@ } ], "source": [ - "transformed_corpus = surp.transform(subset_corpus, obj_type=\"speaker\")" + "transformed_corpus = surp.transform(subset_corpus, obj_type='speaker')" ] }, { @@ -388,16 +363,10 @@ "source": [ "import pandas as pd\n", "from functools import reduce\n", - "\n", - "\n", - "def combine_dicts(x, y):\n", + "def combine_dicts(x,y):\n", " x.update(y)\n", " return x\n", - "\n", - "\n", - "surprise_scores = reduce(\n", - " combine_dicts, transformed_corpus.get_speakers_dataframe()[\"meta.surprise\"].values\n", - ")\n", + "surprise_scores = reduce(combine_dicts, transformed_corpus.get_speakers_dataframe()['meta.surprise'].values)\n", "suprise_series = pd.Series(surprise_scores).dropna()" ] }, diff --git a/convokit/surprise/demos/tennis_demo.ipynb b/convokit/surprise/demos/tennis_demo.ipynb index a3e9135d..a5012807 100644 --- a/convokit/surprise/demos/tennis_demo.ipynb +++ b/convokit/surprise/demos/tennis_demo.ipynb @@ -36,8 +36,8 @@ "metadata": {}, "outputs": [], "source": [ - "PATH = \"/home/axl4\" # replace with your path to tennis_data directory\n", - "data_dir = f\"{PATH}/tennis_data/\"" + "PATH = '/home/axl4' # replace with your path to tennis_data directory\n", + "data_dir = f'{PATH}/tennis_data/'" ] }, { @@ -46,7 +46,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus_speakers = {\"COMMENTATOR\": Speaker(id=\"COMMENTATOR\", meta={})}" + "corpus_speakers = {'COMMENTATOR': Speaker(id = 'COMMENTATOR', meta = {})}" ] }, { @@ -55,7 +55,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(data_dir + \"text_commentaries.json\", \"r\") as f:\n", + "with open(data_dir + 'text_commentaries.json', 'r') as f:\n", " commentaries = json.load(f)" ] }, @@ -76,17 +76,9 @@ "utterances = []\n", "count = 0\n", "for c in tqdm(commentaries):\n", - " idx = \"c{}\".format(count)\n", - " meta = {\"player_gender\": c[\"gender\"], \"scoreline\": c[\"scoreline\"]}\n", - " utterances.append(\n", - " Utterance(\n", - " id=idx,\n", - " speaker=corpus_speakers[\"COMMENTATOR\"],\n", - " conversation_id=idx,\n", - " text=c[\"commentary\"],\n", - " meta=meta,\n", - " )\n", - " )\n", + " idx = 'c{}'.format(count)\n", + " meta = {'player_gender': c['gender'], 'scoreline': c['scoreline']}\n", + " utterances.append(Utterance(id=idx, speaker=corpus_speakers['COMMENTATOR'], conversation_id=idx, text=c['commentary'], meta=meta))\n", " count += 1" ] }, @@ -121,7 +113,7 @@ } ], "source": [ - "interview_corpus = Corpus(filename=download(\"tennis-corpus\"))" + "interview_corpus = Corpus(filename=download('tennis-corpus'))" ] }, { @@ -156,11 +148,8 @@ "metadata": {}, "outputs": [], "source": [ - "for utt in interview_corpus.iter_utterances(selector=lambda u: u.meta[\"is_question\"]):\n", - " utt.add_meta(\n", - " \"player_gender\",\n", - " utt.get_conversation().get_utterance(utt.id.replace(\"q\", \"a\")).get_speaker().meta[\"gender\"],\n", - " )" + "for utt in interview_corpus.iter_utterances(selector=lambda u: u.meta['is_question']):\n", + " utt.add_meta('player_gender', utt.get_conversation().get_utterance(utt.id.replace('q', 'a')).get_speaker().meta['gender'])" ] }, { @@ -180,18 +169,10 @@ "source": [ "from nltk import word_tokenize\n", "\n", - "\n", "def tokenizer(text):\n", " return list(filter(lambda w: w.isalnum(), word_tokenize(text.lower())))\n", "\n", - "\n", - "surp = Surprise(\n", - " model_key_selector=lambda utt: \"corpus\",\n", - " tokenizer=tokenizer,\n", - " target_sample_size=10,\n", - " context_sample_size=None,\n", - " n_samples=3,\n", - ")" + "surp = Surprise(model_key_selector=lambda utt: 'corpus', tokenizer=tokenizer, target_sample_size=10, context_sample_size=None, n_samples=3)" ] }, { @@ -226,10 +207,7 @@ } ], "source": [ - "surp.fit(\n", - " game_commentary_corpus,\n", - " text_func=lambda utt: [\" \".join([u.text for u in game_commentary_corpus.iter_utterances()])],\n", - ")" + "surp.fit(game_commentary_corpus, text_func=lambda utt: [' '.join([u.text for u in game_commentary_corpus.iter_utterances()])])" ] }, { @@ -248,18 +226,9 @@ "import itertools\n", "\n", "SAMPLE = True\n", - "SAMPLE_SIZE = (\n", - " 10000 # edit this to change the number of interview questions to calculate surprise for\n", - ")\n", + "SAMPLE_SIZE = 10000 # edit this to change the number of interview questions to calculate surprise for\n", "\n", - "subset_utts = [\n", - " interview_corpus.get_utterance(utt)\n", - " for utt in interview_corpus.get_utterances_dataframe(\n", - " selector=lambda utt: utt.meta[\"is_question\"]\n", - " )\n", - " .sample(SAMPLE_SIZE)\n", - " .index\n", - "]\n", + "subset_utts = [interview_corpus.get_utterance(utt) for utt in interview_corpus.get_utterances_dataframe(selector=lambda utt: utt.meta['is_question']).sample(SAMPLE_SIZE).index]\n", "subset_corpus = Corpus(utterances=subset_utts) if SAMPLE else interview_corpus" ] }, @@ -294,7 +263,7 @@ } ], "source": [ - "surp.transform(subset_corpus, obj_type=\"utterance\", selector=lambda utt: utt.meta[\"is_question\"])" + "surp.transform(subset_corpus, obj_type='utterance', selector=lambda utt: utt.meta['is_question'])" ] }, { @@ -311,7 +280,7 @@ "metadata": {}, "outputs": [], "source": [ - "utterances = subset_corpus.get_utterances_dataframe(selector=lambda utt: utt.meta[\"is_question\"])" + "utterances = subset_corpus.get_utterances_dataframe(selector=lambda utt: utt.meta['is_question'])" ] }, { @@ -333,9 +302,7 @@ "source": [ "import pandas as pd\n", "\n", - "female_qs = pd.to_numeric(\n", - " utterances[utterances[\"meta.player_gender\"] == \"F\"][\"meta.surprise\"]\n", - ").dropna()\n", + "female_qs = pd.to_numeric(utterances[utterances['meta.player_gender'] == 'F']['meta.surprise']).dropna()\n", "female_qs.median()" ] }, @@ -356,9 +323,7 @@ } ], "source": [ - "male_qs = pd.to_numeric(\n", - " utterances[utterances[\"meta.player_gender\"] == \"M\"][\"meta.surprise\"]\n", - ").dropna()\n", + "male_qs = pd.to_numeric(utterances[utterances['meta.player_gender'] == 'M']['meta.surprise']).dropna()\n", "male_qs.median()" ] }, @@ -489,12 +454,7 @@ "metadata": {}, "outputs": [], "source": [ - "gender_models_surp = Surprise(\n", - " model_key_selector=lambda utt: utt.meta[\"player_gender\"],\n", - " target_sample_size=10,\n", - " context_sample_size=5000,\n", - " surprise_attr_name=\"surprise_gender_model\",\n", - ")" + "gender_models_surp = Surprise(model_key_selector=lambda utt: utt.meta['player_gender'], target_sample_size=10, context_sample_size=5000, surprise_attr_name='surprise_gender_model')" ] }, { @@ -522,7 +482,7 @@ } ], "source": [ - "gender_models_surp.fit(interview_corpus, selector=lambda utt: utt.meta[\"is_question\"])" + "gender_models_surp.fit(interview_corpus, selector=lambda utt: utt.meta['is_question'])" ] }, { @@ -558,13 +518,7 @@ } ], "source": [ - "gender_models_surp.transform(\n", - " subset_corpus,\n", - " obj_type=\"utterance\",\n", - " group_and_models=lambda utt: (utt.id, [\"M\", \"F\"]),\n", - " group_model_attr_key=lambda _, m: m,\n", - " selector=lambda utt: utt.meta[\"is_question\"],\n", - ")" + "gender_models_surp.transform(subset_corpus, obj_type='utterance', group_and_models=lambda utt: (utt.id, ['M', 'F']), group_model_attr_key=lambda _, m: m, selector=lambda utt: utt.meta['is_question'])" ] }, { @@ -581,7 +535,7 @@ "metadata": {}, "outputs": [], "source": [ - "utterances = subset_corpus.get_utterances_dataframe(selector=lambda utt: utt.meta[\"is_question\"])" + "utterances = subset_corpus.get_utterances_dataframe(selector=lambda utt: utt.meta['is_question'])" ] }, { @@ -601,9 +555,7 @@ } ], "source": [ - "utterances[utterances[\"meta.player_gender\"] == \"F\"][\"meta.surprise_gender_model\"].map(\n", - " lambda x: x[\"M\"]\n", - ").dropna().mean()" + "utterances[utterances['meta.player_gender'] == 'F']['meta.surprise_gender_model'].map(lambda x: x['M']).dropna().mean()" ] }, { @@ -623,9 +575,7 @@ } ], "source": [ - "utterances[utterances[\"meta.player_gender\"] == \"F\"][\"meta.surprise_gender_model\"].map(\n", - " lambda x: x[\"F\"]\n", - ").dropna().mean()" + "utterances[utterances['meta.player_gender'] == 'F']['meta.surprise_gender_model'].map(lambda x: x['F']).dropna().mean()" ] }, { @@ -645,9 +595,7 @@ } ], "source": [ - "utterances[utterances[\"meta.player_gender\"] == \"M\"][\"meta.surprise_gender_model\"].map(\n", - " lambda x: x[\"M\"]\n", - ").dropna().mean()" + "utterances[utterances['meta.player_gender'] == 'M']['meta.surprise_gender_model'].map(lambda x: x['M']).dropna().mean()" ] }, { @@ -667,9 +615,7 @@ } ], "source": [ - "utterances[utterances[\"meta.player_gender\"] == \"M\"][\"meta.surprise_gender_model\"].map(\n", - " lambda x: x[\"F\"]\n", - ").dropna().mean()" + "utterances[utterances['meta.player_gender'] == 'M']['meta.surprise_gender_model'].map(lambda x: x['F']).dropna().mean()" ] }, { diff --git a/convokit/tests/general/from_pandas/test_from_pandas.py b/convokit/tests/general/from_pandas/test_from_pandas.py index e9970857..58e554a5 100644 --- a/convokit/tests/general/from_pandas/test_from_pandas.py +++ b/convokit/tests/general/from_pandas/test_from_pandas.py @@ -48,7 +48,7 @@ def no_speaker_convo_dfs(self): class TestWithMem(CorpusFromPandas): def setUp(self) -> None: - self.corpus = Corpus(download("subreddit-hey"), storage_type="mem") + self.corpus = Corpus(download("subreddit-hey"), backend="mem") self.utt_df = self.corpus.get_utterances_dataframe() self.convo_df = self.corpus.get_conversations_dataframe() self.speaker_df = self.corpus.get_speakers_dataframe() @@ -72,7 +72,7 @@ def test_no_speaker_convo_dfs(self): class TestWithDB(CorpusFromPandas): def setUp(self) -> None: - self.corpus = Corpus(download("subreddit-hey"), storage_type="db") + self.corpus = Corpus(download("subreddit-hey"), backend="db") self.utt_df = self.corpus.get_utterances_dataframe() self.convo_df = self.corpus.get_conversations_dataframe() self.speaker_df = self.corpus.get_speakers_dataframe() diff --git a/convokit/tests/general/load_and_dump_corpora/test_load_dump_corpora.py b/convokit/tests/general/load_and_dump_corpora/test_load_dump_corpora.py index b3c4c0a3..ebcc55c8 100644 --- a/convokit/tests/general/load_and_dump_corpora/test_load_dump_corpora.py +++ b/convokit/tests/general/load_and_dump_corpora/test_load_dump_corpora.py @@ -10,33 +10,33 @@ """ -@pytest.mark.parametrize("storage_type", ["mem", "db"]) -def test_load_dump_subreddit(storage_type): - corpus = Corpus(download("subreddit-hey"), storage_type=storage_type) +@pytest.mark.parametrize("backend", ["mem", "db"]) +def test_load_dump_subreddit(backend): + corpus = Corpus(download("subreddit-hey"), backend=backend) corpus.dump("subreddit") -@pytest.mark.parametrize("storage_type", ["mem", "db"]) -def test_load_dump_tennis(storage_type): - corpus = Corpus(download("tennis-corpus"), storage_type=storage_type) +@pytest.mark.parametrize("backend", ["mem", "db"]) +def test_load_dump_tennis(backend): + corpus = Corpus(download("tennis-corpus"), backend=backend) corpus.dump("tennis-corpus") -@pytest.mark.parametrize("storage_type", ["mem", "db"]) -def test_load_dump_politeness(storage_type): - corpus = Corpus(download("wikipedia-politeness-corpus"), storage_type=storage_type) +@pytest.mark.parametrize("backend", ["mem", "db"]) +def test_load_dump_politeness(backend): + corpus = Corpus(download("wikipedia-politeness-corpus"), backend=backend) corpus.dump("wikipedia-politeness-corpus") -@pytest.mark.parametrize("storage_type", ["mem", "db"]) -def test_load_dump_switchboard(storage_type): - corpus = Corpus(download("switchboard-corpus"), storage_type=storage_type) +@pytest.mark.parametrize("backend", ["mem", "db"]) +def test_load_dump_switchboard(backend): + corpus = Corpus(download("switchboard-corpus"), backend=backend) corpus.dump("switchboard-corpus") -@pytest.mark.parametrize("storage_type", ["mem", "db"]) -def test_load_wikiconv(storage_type): - corpus = Corpus(download("wikiconv-2004"), storage_type=storage_type) +@pytest.mark.parametrize("backend", ["mem", "db"]) +def test_load_wikiconv(backend): + corpus = Corpus(download("wikiconv-2004"), backend=backend) corpus.dump("switchboard-corpus") diff --git a/convokit/tests/general/merge_corpus/test_merge_corpus.py b/convokit/tests/general/merge_corpus/test_merge_corpus.py index c06489de..c29e91be 100644 --- a/convokit/tests/general/merge_corpus/test_merge_corpus.py +++ b/convokit/tests/general/merge_corpus/test_merge_corpus.py @@ -29,14 +29,16 @@ def no_overlap(self): self.assertEqual(len(list(merged.iter_speakers())), 6) for utt_id in all_utt_ids: - self.assertTrue(merged.storage.has_data_for_component("utterance", utt_id)) + self.assertTrue(merged.backend_mapper.has_data_for_component("utterance", utt_id)) for speaker_id in all_speaker_ids: - self.assertTrue(merged.storage.has_data_for_component("speaker", speaker_id)) + self.assertTrue(merged.backend_mapper.has_data_for_component("speaker", speaker_id)) - for component_type in self.base_corpus.storage.data.keys(): - self.assertEqual(self.base_corpus.storage.count_entries(component_type), 0) - for component_type in self.non_overlapping_corpus.storage.data.keys(): - self.assertEqual(self.non_overlapping_corpus.storage.count_entries(component_type), 0) + for component_type in self.base_corpus.backend_mapper.data.keys(): + self.assertEqual(self.base_corpus.backend_mapper.count_entries(component_type), 0) + for component_type in self.non_overlapping_corpus.backend_mapper.data.keys(): + self.assertEqual( + self.non_overlapping_corpus.backend_mapper.count_entries(component_type), 0 + ) def with_overlap(self): """ @@ -55,14 +57,16 @@ def with_overlap(self): self.assertEqual(len(list(merged.iter_speakers())), 5) for utt_id in all_utt_ids: - self.assertTrue(merged.storage.has_data_for_component("utterance", utt_id)) + self.assertTrue(merged.backend_mapper.has_data_for_component("utterance", utt_id)) for speaker_id in all_speaker_ids: - self.assertTrue(merged.storage.has_data_for_component("speaker", speaker_id)) + self.assertTrue(merged.backend_mapper.has_data_for_component("speaker", speaker_id)) - for component_type in self.base_corpus.storage.data.keys(): - self.assertEqual(self.base_corpus.storage.count_entries(component_type), 0) - for component_type in self.overlapping_corpus.storage.data.keys(): - self.assertEqual(self.overlapping_corpus.storage.count_entries(component_type), 0) + for component_type in self.base_corpus.backend_mapper.data.keys(): + self.assertEqual(self.base_corpus.backend_mapper.count_entries(component_type), 0) + for component_type in self.overlapping_corpus.backend_mapper.data.keys(): + self.assertEqual( + self.overlapping_corpus.backend_mapper.count_entries(component_type), 0 + ) def overlap_diff_data(self): """ @@ -87,19 +91,23 @@ def overlap_diff_data(self): self.assertEqual(merged.get_utterance("2").speaker.id, "charlie") for utt_id in all_utt_ids: - self.assertTrue(merged.storage.has_data_for_component("utterance", utt_id)) + self.assertTrue(merged.backend_mapper.has_data_for_component("utterance", utt_id)) for speaker_id in all_speaker_ids: if ( speaker_id == "candace" ): # this speaker shouldn't be present due to overlap prioritization - self.assertFalse(merged.storage.has_data_for_component("speaker", speaker_id)) + self.assertFalse( + merged.backend_mapper.has_data_for_component("speaker", speaker_id) + ) else: - self.assertTrue(merged.storage.has_data_for_component("speaker", speaker_id)) + self.assertTrue(merged.backend_mapper.has_data_for_component("speaker", speaker_id)) - for component_type in self.base_corpus.storage.data.keys(): - self.assertEqual(self.base_corpus.storage.count_entries(component_type), 0) - for component_type in self.overlapping_corpus.storage.data.keys(): - self.assertEqual(self.overlapping_corpus.storage.count_entries(component_type), 0) + for component_type in self.base_corpus.backend_mapper.data.keys(): + self.assertEqual(self.base_corpus.backend_mapper.count_entries(component_type), 0) + for component_type in self.overlapping_corpus.backend_mapper.data.keys(): + self.assertEqual( + self.overlapping_corpus.backend_mapper.count_entries(component_type), 0 + ) def overlap_diff_metadata(self): """ @@ -129,16 +137,22 @@ def overlap_diff_metadata(self): self.assertEqual(merged.get_utterance("2").meta["the"], "ringo") for utt_id in all_utt_ids: - self.assertTrue(merged.storage.has_data_for_component("utterance", utt_id)) - self.assertTrue(merged.storage.has_data_for_component("meta", f"utterance_{utt_id}")) + self.assertTrue(merged.backend_mapper.has_data_for_component("utterance", utt_id)) + self.assertTrue( + merged.backend_mapper.has_data_for_component("meta", f"utterance_{utt_id}") + ) for speaker_id in all_speaker_ids: - self.assertTrue(merged.storage.has_data_for_component("speaker", speaker_id)) - self.assertTrue(merged.storage.has_data_for_component("meta", f"speaker_{speaker_id}")) + self.assertTrue(merged.backend_mapper.has_data_for_component("speaker", speaker_id)) + self.assertTrue( + merged.backend_mapper.has_data_for_component("meta", f"speaker_{speaker_id}") + ) - for component_type in self.base_corpus.storage.data.keys(): - self.assertEqual(self.base_corpus.storage.count_entries(component_type), 0) - for component_type in self.overlapping_corpus.storage.data.keys(): - self.assertEqual(self.overlapping_corpus.storage.count_entries(component_type), 0) + for component_type in self.base_corpus.backend_mapper.data.keys(): + self.assertEqual(self.base_corpus.backend_mapper.count_entries(component_type), 0) + for component_type in self.overlapping_corpus.backend_mapper.data.keys(): + self.assertEqual( + self.overlapping_corpus.backend_mapper.count_entries(component_type), 0 + ) def overlap_convo_metadata(self): """ @@ -158,24 +172,26 @@ def overlap_convo_metadata(self): self.assertEqual(len(merged.get_conversation("convo1").meta), 3) self.assertEqual(merged.get_conversation("convo1").meta["hello"], "food") - self.assertTrue(merged.storage.has_data_for_component("conversation", "convo1")) - self.assertTrue(merged.storage.has_data_for_component("meta", "conversation_convo1")) + self.assertTrue(merged.backend_mapper.has_data_for_component("conversation", "convo1")) + self.assertTrue(merged.backend_mapper.has_data_for_component("meta", "conversation_convo1")) self.assertFalse( - self.base_corpus_with_convo_id.storage.has_data_for_component("conversation", "convo1") + self.base_corpus_with_convo_id.backend_mapper.has_data_for_component( + "conversation", "convo1" + ) ) self.assertFalse( - self.overlapping_corpus_with_convo_id.storage.has_data_for_component( + self.overlapping_corpus_with_convo_id.backend_mapper.has_data_for_component( "conversation", "convo1" ) ) self.assertFalse( - self.base_corpus_with_convo_id.storage.has_data_for_component( + self.base_corpus_with_convo_id.backend_mapper.has_data_for_component( "meta", "conversation_convo1" ) ) self.assertFalse( - self.overlapping_corpus_with_convo_id.storage.has_data_for_component( + self.overlapping_corpus_with_convo_id.backend_mapper.has_data_for_component( "meta", "conversation_convo1" ) ) @@ -225,7 +241,7 @@ def add_utterance(self): self.assertEqual(added.get_utterance("5").speaker.id, "foxtrot") for utt in added.iter_utterances(): - self.assertFalse(hasattr(utt, "_temp_storage")) + self.assertFalse(hasattr(utt, "_temp_backend")) class TestWithMem(CorpusMerge): diff --git a/convokit/tests/notebook_testers/convokitIndex_issues_demo.ipynb b/convokit/tests/notebook_testers/convokitIndex_issues_demo.ipynb index fc704139..212183fb 100644 --- a/convokit/tests/notebook_testers/convokitIndex_issues_demo.ipynb +++ b/convokit/tests/notebook_testers/convokitIndex_issues_demo.ipynb @@ -7,8 +7,7 @@ "outputs": [], "source": [ "import os\n", - "\n", - "os.chdir(\"../..\")" + "os.chdir('../..')" ] }, { @@ -27,7 +26,7 @@ "metadata": {}, "outputs": [], "source": [ - "utterances = [Utterance(id=str(i), speaker=User(id=\"speaker\" + str(i))) for i in range(10)]" + "utterances = [Utterance(id=str(i), speaker=User(id='speaker'+str(i))) for i in range(10)]" ] }, { @@ -158,7 +157,7 @@ "outputs": [], "source": [ "for utt in corpus.iter_utterances():\n", - " utt.meta[\"good_meta\"] = 1" + " utt.meta['good_meta'] = 1" ] }, { @@ -191,8 +190,8 @@ "metadata": {}, "outputs": [], "source": [ - "for utt in corpus.iter_utterances(): # annotate first utt\n", - " utt.meta[\"okay_meta\"] = 1\n", + "for utt in corpus.iter_utterances(): # annotate first utt\n", + " utt.meta['okay_meta'] = 1\n", " break" ] }, @@ -226,10 +225,10 @@ "metadata": {}, "outputs": [], "source": [ - "idx = 1 ##\n", - "for utt in corpus.iter_utterances(): # annotate second utt\n", + "idx = 1 ## \n", + "for utt in corpus.iter_utterances(): # annotate second utt\n", " if idx == 2:\n", - " utt.meta[\"okay_meta2\"] = 1\n", + " utt.meta['okay_meta2'] = 1\n", " idx += 1" ] }, @@ -263,11 +262,11 @@ "metadata": {}, "outputs": [], "source": [ - "for idx, utt in enumerate(corpus.iter_utterances()): # annotate alternating utts\n", + "for idx, utt in enumerate(corpus.iter_utterances()): # annotate alternating utts\n", " if idx % 2:\n", - " utt.meta[\"bad_meta\"] = 1\n", + " utt.meta['bad_meta'] = 1\n", " else:\n", - " utt.meta[\"bad_meta\"] = None" + " utt.meta['bad_meta'] = None\n" ] }, { @@ -319,7 +318,7 @@ "outputs": [], "source": [ "for utt in corpus.iter_utterances():\n", - " utt.meta[\"to_be_deleted\"] = 1" + " utt.meta['to_be_deleted'] = 1" ] }, { @@ -352,7 +351,7 @@ "metadata": {}, "outputs": [], "source": [ - "del corpus.random_utterance().meta[\"to_be_deleted\"]" + "del corpus.random_utterance().meta['to_be_deleted']" ] }, { @@ -379,7 +378,7 @@ ], "source": [ "for utt in corpus.iter_utterances():\n", - " print(utt.meta.get(\"to_be_deleted\", None))" + " print(utt.meta.get('to_be_deleted', None))" ] }, { diff --git a/convokit/tests/notebook_testers/exclude_meta_tests.ipynb b/convokit/tests/notebook_testers/exclude_meta_tests.ipynb index ce619b81..62c435b0 100644 --- a/convokit/tests/notebook_testers/exclude_meta_tests.ipynb +++ b/convokit/tests/notebook_testers/exclude_meta_tests.ipynb @@ -7,8 +7,7 @@ "outputs": [], "source": [ "import os\n", - "\n", - "os.chdir(\"../../..\")\n", + "os.chdir('../../..')\n", "import convokit" ] }, @@ -35,7 +34,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"subreddit-lol\"))" + "corpus = Corpus(filename=download('subreddit-lol'))" ] }, { @@ -80,13 +79,10 @@ } ], "source": [ - "corpus2 = Corpus(\n", - " filename=download(\"subreddit-lol\"),\n", - " exclude_conversation_meta=[\"subreddit\"],\n", - " exclude_speaker_meta=[\"num_posts\"],\n", - " exclude_utterance_meta=[\"score\"],\n", - " exclude_overall_meta=[\"num_posts\"],\n", - ")" + "corpus2 = Corpus(filename=download('subreddit-lol'), exclude_conversation_meta=['subreddit'],\n", + " exclude_speaker_meta=['num_posts'],\n", + " exclude_utterance_meta=['score'],\n", + " exclude_overall_meta=['num_posts'])" ] }, { diff --git a/convokit/tests/notebook_testers/reindex_conversations_example.ipynb b/convokit/tests/notebook_testers/reindex_conversations_example.ipynb index 5eb93ccb..7132649b 100644 --- a/convokit/tests/notebook_testers/reindex_conversations_example.ipynb +++ b/convokit/tests/notebook_testers/reindex_conversations_example.ipynb @@ -7,8 +7,7 @@ "outputs": [], "source": [ "import os\n", - "\n", - "os.chdir(\"..\")" + "os.chdir('..')" ] }, { @@ -71,23 +70,28 @@ " 4 5 6 7 8 9\n", "10 11\n", "\"\"\"\n", - "corpus = Corpus(\n", - " utterances=[\n", - " Utterance(id=\"0\", reply_to=None, root=\"0\", user=User(name=\"alice\"), timestamp=0),\n", - " Utterance(id=\"2\", reply_to=\"0\", root=\"0\", user=User(name=\"alice\"), timestamp=2),\n", - " Utterance(id=\"1\", reply_to=\"0\", root=\"0\", user=User(name=\"alice\"), timestamp=1),\n", - " Utterance(id=\"3\", reply_to=\"0\", root=\"0\", user=User(name=\"alice\"), timestamp=3),\n", - " Utterance(id=\"4\", reply_to=\"1\", root=\"0\", user=User(name=\"alice\"), timestamp=4),\n", - " Utterance(id=\"5\", reply_to=\"1\", root=\"0\", user=User(name=\"alice\"), timestamp=5),\n", - " Utterance(id=\"6\", reply_to=\"1\", root=\"0\", user=User(name=\"alice\"), timestamp=6),\n", - " Utterance(id=\"7\", reply_to=\"2\", root=\"0\", user=User(name=\"alice\"), timestamp=4),\n", - " Utterance(id=\"8\", reply_to=\"2\", root=\"0\", user=User(name=\"alice\"), timestamp=5),\n", - " Utterance(id=\"9\", reply_to=\"3\", root=\"0\", user=User(name=\"alice\"), timestamp=4),\n", - " Utterance(id=\"10\", reply_to=\"4\", root=\"0\", user=User(name=\"alice\"), timestamp=5),\n", - " Utterance(id=\"11\", reply_to=\"9\", root=\"0\", user=User(name=\"alice\"), timestamp=10),\n", - " Utterance(id=\"other\", reply_to=None, root=\"other\", user=User(name=\"alice\"), timestamp=99),\n", - " ]\n", - ")" + "corpus = Corpus(utterances = [\n", + " Utterance(id=\"0\", reply_to=None, root=\"0\", user=User(name=\"alice\"), timestamp=0),\n", + "\n", + " Utterance(id=\"2\", reply_to=\"0\", root=\"0\", user=User(name=\"alice\"), timestamp=2),\n", + " Utterance(id=\"1\", reply_to=\"0\", root=\"0\", user=User(name=\"alice\"), timestamp=1),\n", + " Utterance(id=\"3\", reply_to=\"0\", root=\"0\", user=User(name=\"alice\"), timestamp=3),\n", + "\n", + " Utterance(id=\"4\", reply_to=\"1\", root=\"0\", user=User(name=\"alice\"), timestamp=4),\n", + " Utterance(id=\"5\", reply_to=\"1\", root=\"0\", user=User(name=\"alice\"), timestamp=5),\n", + " Utterance(id=\"6\", reply_to=\"1\", root=\"0\", user=User(name=\"alice\"), timestamp=6),\n", + "\n", + " Utterance(id=\"7\", reply_to=\"2\", root=\"0\", user=User(name=\"alice\"), timestamp=4),\n", + " Utterance(id=\"8\", reply_to=\"2\", root=\"0\", user=User(name=\"alice\"), timestamp=5),\n", + "\n", + " Utterance(id=\"9\", reply_to=\"3\", root=\"0\", user=User(name=\"alice\"), timestamp=4),\n", + "\n", + " Utterance(id=\"10\", reply_to=\"4\", root=\"0\", user=User(name=\"alice\"), timestamp=5),\n", + "\n", + " Utterance(id=\"11\", reply_to=\"9\", root=\"0\", user=User(name=\"alice\"), timestamp=10),\n", + "\n", + " Utterance(id=\"other\", reply_to=None, root=\"other\", user=User(name=\"alice\"), timestamp=99)\n", + "])" ] }, { @@ -103,8 +107,8 @@ "metadata": {}, "outputs": [], "source": [ - "corpus.get_conversation(\"0\").meta[\"hey\"] = \"jude\"\n", - "corpus.meta[\"foo\"] = \"bar\"" + "corpus.get_conversation(\"0\").meta['hey'] = 'jude'\n", + "corpus.meta['foo'] = 'bar'" ] }, { diff --git a/convokit/tests/phrasing_motifs/test_questionSentences.py b/convokit/tests/phrasing_motifs/test_questionSentences.py index 1509e0c2..07a52e82 100644 --- a/convokit/tests/phrasing_motifs/test_questionSentences.py +++ b/convokit/tests/phrasing_motifs/test_questionSentences.py @@ -1,4 +1,5 @@ import unittest +import copy from convokit.phrasing_motifs.questionSentences import QuestionSentences from convokit.tests.test_utils import small_burr_corpus_parsed, reload_corpus_in_db_mode @@ -7,10 +8,14 @@ def parsed_burr_sir_corpus_with_lowercase_are(): corpus = small_burr_corpus_parsed() for utterance in corpus.iter_utterances(): - parsed = utterance.retrieve_meta("parsed") + # with new DB mode behavior, mutation to metadata fields is no longer supported. + parsed = copy.deepcopy(utterance.retrieve_meta("parsed")) for sentence in parsed: if sentence["toks"][0]["tok"] == "Are": + # trying to mutate here originally. + # solve by deepcopying the entire metadata field, modify it, then replace entire original field. sentence["toks"][0]["tok"] = "are" + utterance.meta["parsed"] = parsed return corpus diff --git a/convokit/tests/test_utils.py b/convokit/tests/test_utils.py index 42bd5759..87b37a48 100644 --- a/convokit/tests/test_utils.py +++ b/convokit/tests/test_utils.py @@ -267,7 +267,7 @@ def reload_corpus_in_db_mode(corpus): corpus_id = "_" + uuid4().hex try: corpus.dump(corpus_id, base_path=".") - db_corpus = Corpus(corpus_id, storage_type="db") + db_corpus = Corpus(corpus_id, backend="db") return db_corpus finally: if os.path.exists(corpus_id): diff --git a/convokit/text_processing/demo/cleaning_text.ipynb b/convokit/text_processing/demo/cleaning_text.ipynb index 64bc4fb5..846ca438 100644 --- a/convokit/text_processing/demo/cleaning_text.ipynb +++ b/convokit/text_processing/demo/cleaning_text.ipynb @@ -7,8 +7,7 @@ "outputs": [], "source": [ "import os\n", - "\n", - "os.chdir(\"../../..\")" + "os.chdir('../../..')" ] }, { @@ -59,7 +58,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"subreddit-Cornell\"))" + "corpus = Corpus(filename=download('subreddit-Cornell'))" ] }, { @@ -98,7 +97,7 @@ } ], "source": [ - "corpus.get_utterance(\"15enm8\").text" + "corpus.get_utterance('15enm8').text" ] }, { @@ -152,7 +151,7 @@ } ], "source": [ - "corpus.get_utterance(\"15enm8\").text" + "corpus.get_utterance('15enm8').text" ] }, { @@ -180,7 +179,7 @@ } ], "source": [ - "corpus.get_utterance(\"15enm8\").meta" + "corpus.get_utterance('15enm8').meta" ] }, { @@ -223,7 +222,7 @@ ], "source": [ "cleaner = TextCleaner(replace_text=True, save_original=True, verbosity=10000)\n", - "corpus = Corpus(filename=download(\"subreddit-Cornell\"))\n", + "corpus = Corpus(filename=download('subreddit-Cornell'))\n", "cleaner.transform(corpus)" ] }, @@ -244,7 +243,7 @@ } ], "source": [ - "corpus.get_utterance(\"15enm8\").text" + "corpus.get_utterance('15enm8').text" ] }, { @@ -264,7 +263,7 @@ } ], "source": [ - "corpus.get_utterance(\"15enm8\").meta[\"original\"]" + "corpus.get_utterance('15enm8').meta['original']" ] }, { @@ -307,7 +306,7 @@ ], "source": [ "cleaner = TextCleaner(replace_text=False, verbosity=10000)\n", - "corpus = Corpus(filename=download(\"subreddit-Cornell\"))\n", + "corpus = Corpus(filename=download('subreddit-Cornell'))\n", "cleaner.transform(corpus)" ] }, @@ -328,7 +327,7 @@ } ], "source": [ - "corpus.get_utterance(\"15enm8\").text" + "corpus.get_utterance('15enm8').text" ] }, { @@ -348,7 +347,7 @@ } ], "source": [ - "corpus.get_utterance(\"15enm8\").meta[\"cleaned\"]" + "corpus.get_utterance('15enm8').meta['cleaned']" ] }, { diff --git a/docs/source/architecture.rst b/docs/source/architecture.rst index db85bde0..0cbf310c 100644 --- a/docs/source/architecture.rst +++ b/docs/source/architecture.rst @@ -24,7 +24,9 @@ In everyday usage, "metadata" simply refers to information for identifying and o Each of the four classes in the hierarchy contains a ``.meta`` field, which is a dictionary that maps from metadata name to value. In the previous example, you would want a Speaker-level metadata entry called something like "start-date". So, if ``spkr`` is an instance of Speaker representing a Reddit user who joined on April 1, 2008, ``spkr.meta`` would be a dictionary looking like ``{"start-date": "04-01-2008"}``. While for simplicity we have represented the start date as a string, in practice there are no restrictions on what kinds of objects can be stored as metadata, so you could have chosen a fancier representation, like a Pandas timestamp. -Note that all of the datasets that ship with ConvoKit have some metadata already present, usually related to the task that the dataset was originally designed for. To learn what metadata is present in a Corpus, you can access its ``meta_index`` attribute. +Note that *all* metadata values should be treated as immutable regardless of type; for more detailed information, refer to `Immutability of Metadata Fields `_. + +All of the datasets that ship with ConvoKit have some metadata already present, usually related to the task that the dataset was originally designed for. To learn what metadata is present in a Corpus, you can access its meta_index attribute. Vectors ^^^^^^^ @@ -35,7 +37,7 @@ Additionally, ConvoKit provides support for adding vector data (e.g. GloVe repre Transformer ----------- -Of course, if we only had Corpus objects (merely large containers of data), we wouldn't really be able to do anything interesting. In practice, we need to manipulate the corpora in some way. This idea is represented in ConvoKit as the Transformer class. +Of course, if we only had Corpus objects (merely large containers of data), we wouldn't really be able to do anything interesting. In practice, we need to manipulate the corpora in some way. This idea is represented in ConvoKit as the Transformer class. At a high level, a Transformer is an object that takes in a Corpus and gives back the same Corpus with some modifications done to it. In almost all cases, these modifications will take the form of changed or added metadata. For example, one kind of Transformer built in to ConvoKit is the TextParser, which is designed to add dependency parses to a Corpus. When you run the TextParser on a Corpus, it adds to each Utterance a metadata entry called "parsed", whose value is the dependency parse of that Utterance's text (represented as a `SpaCy Doc `_). The modified Corpus is then returned so you can continue to do other things with it (including running other Transformers). @@ -61,7 +63,7 @@ more complicated task, like named entity recognition. In general, the code for c Transformers takes the following form:: # Assume that transformer1,transformer2,... have been previously initialized as instances of Transformer subclasses - + base_corpus = Corpus(...) corpus1 = transformer1.transform(base_corpus) diff --git a/docs/source/awry.rst b/docs/source/awry.rst index 5825474f..e758ae24 100644 --- a/docs/source/awry.rst +++ b/docs/source/awry.rst @@ -1,11 +1,11 @@ Conversations Gone Awry Dataset =============================== -A collection of conversations from Wikipedia talk pages that derail into personal attacks (4,188 conversations, 30,021 comments). +A collection of conversations from Wikipedia talk pages that derail into personal attacks (4,188 conversations, 30,021 comments). -Distributed together with: +Distributed together with: -`Conversations gone awry: Detecting early signs of conversational failure `_. Justine Zhang, Jonathan P. Chang, Cristian Danescu-Niculescu-Mizil, Lucas Dixon, Yiqing Hua, Nithum Thain, Dario Taraborelli. ACL 2018. +`Conversations gone awry: Detecting early signs of conversational failure `_. Justine Zhang, Jonathan P. Chang, Cristian Danescu-Niculescu-Mizil, Lucas Dixon, Yiqing Hua, Nithum Thain, Dario Taraborelli. ACL 2018. and @@ -45,10 +45,10 @@ Metadata for each conversation include: * page_title: the title of the talk page the comment came from * page_id: the unique numerical ID of the talk page the comment came from -* pair_id: the id of the conversation that this comment's conversation is paired with +* pair_id: the id of the conversation that this conversation is paired with * conversation_has_personal_attack: whether any comment in this comment's conversation contains a personal attack according to crowdsourced annotators -* verified: whether the personal attack label has been double-checked by an internal annotator and confirmed to be correct -* pair_verified: whether the personal attack label for the paired conversation has been double-checked by an internal annotator and confirmed to be correct +* verified: whether the personal attack label has been double-checked by an internal annotator and confirmed to be correct +* pair_verified: whether the personal attack label for the paired conversation has been double-checked by an internal annotator and confirmed to be correct * annotation_year: which round of annotation the conversation's label came from. Possible values are "2018" for the first annotation round and "2019" for the second annotation round. * split: which split (train, val, or test) this conversation was used in for the experiments described in "Trouble on the Horizon" (not applicable to results from "Conversations Gone Awry", which reports leave-one-out accuracies). @@ -56,7 +56,7 @@ Metadata for each conversation include: Usage ----- -To download directly with ConvoKit: +To download directly with ConvoKit: >>> from convokit import Corpus, download >>> corpus = Corpus(filename=download("conversations-gone-awry-corpus")) @@ -78,7 +78,7 @@ This data was collected from late 2017 to early 2018 and was annotated in two ro Related links ^^^^^^^^^^^^^ -* Fun: Guess whether a `conversation will go awry `_. +* Fun: Guess whether a `conversation will go awry `_. * `Wikipedia editors' talk Pages `_. @@ -87,10 +87,3 @@ Contact ^^^^^^^ Please email any questions to: cristian@cs.cornell.edu (Cristian Danescu-Niculescu-Mizil) - - - - - - - diff --git a/docs/source/conf.py b/docs/source/conf.py index 3b4d5b0f..13130272 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -55,7 +55,7 @@ # General information about the project. project = "convokit" -copyright = "2017-2022 The ConvoKit Developers" +copyright = "2017-2023 The ConvoKit Developers" author = "The ConvoKit Developers" # The version info for the project you're documenting, acts as replacement for @@ -63,9 +63,9 @@ # built documents. # # The short X.Y version. -version = "2.5" +version = "3.0" # The full version, including alpha/beta/rc tags. -release = "2.5.3" +release = "3.0.0" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/source/config.rst b/docs/source/config.rst new file mode 100644 index 00000000..9b076f35 --- /dev/null +++ b/docs/source/config.rst @@ -0,0 +1,8 @@ +Configurations +=================== + +After you import convokit for the first time, a default configuration file will be generated in ~/.convokit/config.yml. +There are currently three variables: +db_host: +data_directory: +default_backend: diff --git a/docs/source/db_setup.rst b/docs/source/db_setup.rst index 13f6b843..a4b9eea5 100644 --- a/docs/source/db_setup.rst +++ b/docs/source/db_setup.rst @@ -1,10 +1,10 @@ Setting Up MongoDB For ConvoKit =============================== -`The MongoDB Documentation `_ provides a complete -guide on installing and running a MongoDB server. Here, we provide a simplified -guide to getting MongoDB setup to use with ConvoKit's DB Storage mode, in a handful -of settings. +`The MongoDB Documentation `_ provides a complete +guide on installing and running a MongoDB server. Here, we provide a simplified +guide to getting MongoDB setup to use with ConvoKit's DB Backend, in a handful +of settings. Running MongoDB with Conda -------------------------- @@ -12,24 +12,24 @@ Running MongoDB with Conda 0. Install conda if needed, following `these instructions `_ for your system. 1. (Optional) Create a new environment where you want to install mongodb: -:: +:: $ conda create --name my_env 2. Activate your newly created environment, or an existing environment where you want to install mongodb: -:: +:: $ conda activate my_env 3. Install the mongodb package. -:: +:: $ conda install mongodb -Check to see if version is at least 5.0. +Check to see if version is at least 5.0. :: @@ -38,28 +38,28 @@ Check to see if version is at least 5.0. If not, utilize: :: - + $ conda install -c conda-forge mongodb=5.0 4. Start the MongoDB server as a daemon process. -:: +:: - $ mongod --fork --logpath --dbpath + $ mongod --fork --logpath --dbpath 5. Use the MongoDB server for ConvoKit! 6. To stop the MongoDB server, on Linux or MacOS, use the ``htop`` command to find the mongod process ID and run: -:: +:: $ kill 6. Alternitivly, to stop the MongoDB server on Linux, run -:: +:: - $ mongod --shutdown + $ mongod --shutdown Sometimes, the above process doesn't work for MacOS. However, there is another solution for MacOS users below. @@ -85,7 +85,7 @@ Running MongoDB on MacOS with Homebrew 3. Use the MongoDB server for ConvoKit! 4. To stop the MongoDB server, run -:: +:: $ brew services stop mongodb-community@5.0 @@ -93,15 +93,15 @@ Using MongoDB Atlas: A remote MongoDB server in the cloud --------------------------------------------------------- MongoDB offers a cloud service version of their database, called MongoDB Atlas. -Atlas provides a free tier that is a good option for starting out with ConvoKit -remote DB storage, and several paid tiers that provide production level performance. +Atlas provides a free tier that is a good option for starting out with ConvoKit +remote DB backend, and several paid tiers that provide production level performance. Follow these instructions, based on `the instructions for getting started with Atlas -provided by the MongoDB team `_, +provided by the MongoDB team `_, to setup a MongoDB server in the cloud for use with ConvoKit. 0. Register a new MongoDB Atlas account here: https://account.mongodb.com/account/register, and log into the Atlas UI. 1. Create a new MongoDB cluster and a database user within the Atlas UI. 2. Add your IP address to the set of approved IP addresses that can connect to cluster, and setup a DB user, within the Atlas UI (as suggested in the "Setup connection security" tab). 3. In the "Choose a connection method" tab, select "Connect your Application" and choose Python as your driver. Then, copy the outputted URI, which should look something like ``mongodb+srv://:@cluster0.m0srt.mongodb.net/myFirstDatabase?retryWrites=true&w=majority`` -4. Paste the aforementioned URI into ~/.convokit/config.yml in the db_host field. Then, replace and with the credentials you setup in step 1, and replace ``myFirstDatabase`` with ``convokit``. -5. Use the remote MongoDB server for ConvoKit! \ No newline at end of file +4. Paste the aforementioned URI into ~/.convokit/config.yml in the db_host field. Then, replace and with the credentials you setup in step 1, and replace ``myFirstDatabase`` with ``convokit``. +5. Use the remote MongoDB server for ConvoKit! diff --git a/docs/source/index.rst b/docs/source/index.rst index 9a389466..2a6f0dec 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -8,7 +8,7 @@ Cornell Conversational Analysis Toolkit (ConvoKit) Documentation This toolkit contains tools to extract conversational features and analyze social phenomena in conversations, using a `single unified interface `_ inspired by (and compatible with) scikit-learn. Several large `conversational datasets `_ are included together with scripts exemplifying the use of the toolkit on these datasets. -More information can be found at our `website `_. The latest version is `2.5.3 `_ (released 16 Jan 2022). +More information can be found at our `website `_. The latest version is `3.0.0 `_ (released July 17, 2023). Contents -------- @@ -21,6 +21,7 @@ Contents Introductory tutorial Core Concepts Data Format + Configurations Troubleshooting .. toctree:: diff --git a/docs/source/install.rst b/docs/source/install.rst index 74a05de1..00630467 100644 --- a/docs/source/install.rst +++ b/docs/source/install.rst @@ -3,7 +3,7 @@ Installing ConvoKit System Requirements =================== -ConvoKit requires Python 3.7 or above. +ConvoKit requires Python 3.8 or above. Package Installation ==================== @@ -31,6 +31,10 @@ For more information on choosing between the two options and setting up the Mong Choosing a Backend: native Python vs MongoDB Setting up MongoDB for ConvoKit +Configuration +================== +ConvoKit configurations are stored in "~/.convokit/config.yml", check out our `Configuration Guide `_ for a list of configuration details. + Troubleshooting =============== -If you run into any issues during or after installation, check out our `Troubleshooting Guide `_ for a list of solutions to common issues. \ No newline at end of file +If you run into any issues during or after installation, check out our `Troubleshooting Guide `_ for a list of solutions to common issues. diff --git a/docs/source/storage_options.rst b/docs/source/storage_options.rst index 0075fc5f..bf0390be 100644 --- a/docs/source/storage_options.rst +++ b/docs/source/storage_options.rst @@ -34,46 +34,20 @@ How to Change Backends Once you have chosen the backend that best suits your purposes, the next step is to tell ConvoKit to use it. This can be done in three ways: -#. Corpus-level: ConvoKit supports specifying a backend on a per-Corpus basis. This is done through the ``storage_type`` parameter when constructing a corpus. You can set this parameter to the string ``"mem"`` for the native Python backend or ``"db"`` for the MongoDB backend. It is possible to mix Python-backed and MongoDB-backed corpora in the same script. +#. Corpus-level: ConvoKit supports specifying a backend on a per-Corpus basis. This is done through the ``backend`` parameter when constructing a corpus. You can set this parameter to the string ``"mem"`` for the native Python backend or ``"db"`` for the MongoDB backend. It is possible to mix Python-backed and MongoDB-backed corpora in the same script. -#. System-level: If you want to change the *default* backend in all ConvoKit code that runs on your computer (i.e., the backend that gets used when the ``storage_type`` parameter is not given), this is controlled by the ConvoKit system setting ``"default_storage_mode"``. This is set to ``"mem"`` when ConvoKit is first installed, but you can change it to ``"db"`` to tell ConvoKit to use the MongoDB backend by default. Note: ConvoKit system settings are found in the ``config.yml`` file, which is located in the hidden directory ``~/.convokit``. +#. System-level: If you want to change the *default* backend in all ConvoKit code that runs on your computer (i.e., the backend that gets used when the ``backend`` parameter is not given), this is controlled by the ConvoKit system setting ``"default_backend"``. This is set to ``"mem"`` when ConvoKit is first installed, but you can change it to ``"db"`` to tell ConvoKit to use the MongoDB backend by default. Note: ConvoKit system settings are found in the ``config.yml`` file, which is located in the hidden directory ``~/.convokit``. -#. Script-level: As an in-between option, if you want to change the default storage option used in a specific Python script but not at the whole-system level, you can do this by setting the environment variable ``CONVOKIT_STORAGE_MODE`` before running your script. For example, if you normally run your script as ``python3 myscript.py``, running it instead as ``CONVOKIT_STORAGE_MODE=db python myscript.py`` will set the default storage mode to MongoDB for that run of the script only. +#. Script-level: As an in-between option, if you want to change the default backend option used in a specific Python script but not at the whole-system level, you can do this by setting the environment variable ``CONVOKIT_BACKEND`` before running your script. For example, if you normally run your script as ``python3 myscript.py``, running it instead as ``CONVOKIT_BACKEND=db python myscript.py`` will set the default backend to MongoDB for that run of the script only. Differences in Corpus behavior between backends =============================================== For the most part, the two backends are designed to be interchangeable; that is, code written for one backend should work in the other backend out-of-the-box. -However, some specifics of MongoDB result in two minor differences in Corpus behavior that you should be aware of when writing your code. +We made deliberate design choices during implementation to ensure consistent behavior of the code between the two supported backends. +However, some specifics of MongoDB result in one minor differences in Corpus behavior that you should be aware of when writing your code. -First, since the MongoDB backend uses a MongoDB database as its data storage system, it needs to give that database a name. -Thus, there is an additional parameter in the Corpus constructor, ``db_collection_prefix``, which is only used by the MongoDB backend. +Since the MongoDB backend uses a MongoDB database as its data storage backend, it needs to give that database a name. +Thus, there is an additional parameter in the Corpus constructor, db_collection_prefix, which is only used by the MongoDB backend. This parameter determines how the MongoDB database will be named. Note that you still have the option of not specifying a name, but in this case a random name will be used. It is best practice to explicitly supply a name yourself, so you know what database to reconnect to in the event that reconnection is needed after a system crash. - -Second, because all operations in MongoDB involve *copying* data from the MongoDB database to the Python process (or vice versa), all metadata values must be treated as *immutable*. -This does not really make a difference for primitive values like ints and strings, since those are immutable in Python to begin with. -However, code that relies on mutating a more complex type like a dictionary may not work as expected in the MongoDB backend. -For example, suppose the metadata entry ``"foo"`` is a list type, and you access it by saving it to a Python variable as follows: - ->>> saved_foo = my_utt.meta["foo"] - -Because lists are considered mutable in Python, you might expect the following code to successfully add a new item in the ``foo`` metadata of ``my_utt``: - ->>> saved_foo.append("new value") - -This will work in the native Python backend. -However, it will not work in the MongoDB backend; the code will run, but only the variable ``saved_foo`` will be affected, not the actual metadata of ``my_utt``. -This is because ``saved_foo`` only contains a copy of the data in the MongoDB database, which has been translated into a Python object. -Thus, any operations that are done directly on ``saved_foo`` are done only to the Python object, and do not involve any database writes. - -It is therefore best to treat *all* metadata objects, regardless of type, as immutable when using the MongoDB backend. -Thus, the correct way to change metadata in MongoDB mode is the same way you would change an int or string type metadata entry: that is, by completely overwriting it. -For example, to achieve the desired effect with the ``"foo"`` metadata entry from above, you should do the following: - ->>> temp_foo = my_utt.meta["foo"] ->>> temp_foo.append("new value") ->>> my_utt.meta["foo"] = temp_foo - -By adding the additional line of code that overwrites the ``"foo"`` metadata entry, you are telling ConvoKit that you want to update the value of ``"foo"`` in the database-backed metadata table with a new value, represented by ``temp_foo`` which contains the new additional item. -Thus the contents of ``temp_foo`` will get written to the database as the new value of ``my_utt.meta["foo"]``, hence updating the metadata as desired. diff --git a/docs/source/troubleshooting.rst b/docs/source/troubleshooting.rst index 58f93c81..13c7d267 100644 --- a/docs/source/troubleshooting.rst +++ b/docs/source/troubleshooting.rst @@ -62,3 +62,34 @@ and if that doesn't fix the issue, then run: >>> open /Applications/Python\ 3.7/Install\ Certificates.command (Substitute 3.7 in the above command with your current Python version (e.g. 3.8 or 3.9) if necessary.) + +Immutability of Metadata Fields +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Starting with 3.0, ConvoKit disallows mutation on Metadata fields to prevent unintended data loss and ensure the integrity of the corpus metadata backend storage. +When accessing a Metadata field, a deep copy of the field is returned to prevent mutation changes to the copy from affecting the backend storage. +This behavior is intended to ensure consistency between DB and MEM modes, since permitting mutations to mutable metadata fields in DB mode would solely modify the in-memory data without updating the database, thereby risking potential data loss. + +Therefore, all metadata values must be treated as *immutable*. This does not really make a difference for primitive values like ints and strings, +since those are immutable in Python to begin with. However, code that relies on mutating a more complex type like lists or dictionaries may not work as expected. +For example, suppose the metadata entry ``"foo"`` is a list type, and you access it by saving it to a Python variable as follows: + +>>> saved_foo = my_utt.meta["foo"] + +Because lists are considered mutable in Python, you might expect the following code to successfully add a new item in the ``foo`` metadata of ``my_utt``: + +>>> saved_foo.append("new value") + +However, it will not work starting with the 3.0 version of ConvoKit; the code will run, but only the variable ``saved_foo`` will be affected, not the actual metadata storage of ``my_utt``. +This is because ``saved_foo`` only contains a copy of the data from the backend storage. +Thus, any operations that are done directly on ``saved_foo`` are done only to the copy, and do not involve any backend storage writes. + +It is therefore necessary to treat *all* metadata objects, regardless of type, as immutable. +Thus, the way to change metadata is the same way you would change an int or string type metadata entry: that is, by completely overwriting it. +For example, to achieve the desired effect with the ``"foo"`` metadata entry from above, you should do the following: + +>>> temp_foo = my_utt.meta["foo"] +>>> temp_foo.append("new value") +>>> my_utt.meta["foo"] = temp_foo + +By adding the additional line of code that overwrites the ``"foo"`` metadata entry, you are telling ConvoKit that you want to update the value of ``"foo"`` in the storage’s metadata table with a new value, represented by ``temp_foo`` which contains the new additional item. +Thus the contents of ``temp_foo`` will get written to the backend storage as the new value of ``my_utt.meta["foo"]``, hence updating the metadata as desired. diff --git a/examples/Introduction_to_ConvoKit.ipynb b/examples/Introduction_to_ConvoKit.ipynb index c2138e6e..ec5158ee 100644 --- a/examples/Introduction_to_ConvoKit.ipynb +++ b/examples/Introduction_to_ConvoKit.ipynb @@ -32,22 +32,15 @@ "# for pretty printing of cells within the Colab version of this notebook\n", "from IPython.display import HTML, display\n", "\n", - "\n", "def set_css():\n", - " display(\n", - " HTML(\n", - " \"\"\"\n", + " display(HTML('''\n", " \n", - " \"\"\"\n", - " )\n", - " )\n", - "\n", - "\n", - "get_ipython().events.register(\"pre_run_cell\", set_css)" + " '''))\n", + "get_ipython().events.register('pre_run_cell', set_css)" ] }, { @@ -129,7 +122,7 @@ } ], "source": [ - "corpus = Corpus(download(\"subreddit-Cornell\"))" + "corpus = Corpus(download('subreddit-Cornell'))" ] }, { @@ -164,7 +157,7 @@ } ], "source": [ - "switchboard_corpus = Corpus(download(\"switchboard-corpus\"))" + "switchboard_corpus = Corpus(download('switchboard-corpus'))" ] }, { @@ -606,7 +599,7 @@ ], "source": [ "# Here's an example of a corpus with more sophisticated labels such as speaker intentions and perceptions\n", - "deception_corpus = Corpus(download(\"diplomacy-corpus\"))\n", + "deception_corpus = Corpus(download('diplomacy-corpus'))\n", "deception_corpus.random_utterance().meta" ] }, @@ -645,7 +638,7 @@ } ], "source": [ - "# Another example is Switchboard, which has utterances with a list of [text segment, tag] pairs,\n", + "# Another example is Switchboard, which has utterances with a list of [text segment, tag] pairs, \n", "# where tag refers to the DAMSL speech act tag.\n", "switchboard_corpus.random_utterance().meta" ] @@ -695,7 +688,7 @@ ], "source": [ "# How to add metadata\n", - "utt.meta[\"dialog_acts\"] = [\"question\", \"request\"]\n", + "utt.meta['dialog_acts'] = ['question', 'request']\n", "utt.meta" ] }, @@ -744,7 +737,7 @@ ], "source": [ "# This is an equivalent way of adding metadata\n", - "convo.add_meta(\"toxicity_score\", 5)\n", + "convo.add_meta('toxicity_score', 5)\n", "convo.meta" ] }, @@ -821,7 +814,7 @@ ], "source": [ "# We can fetch individual objects by ID\n", - "corpus.get_speaker(\"ulysses2014\")\n", + "corpus.get_speaker('ulysses2014')\n", "# corpus.get_conversation('7bir0w')\n", "# corpus.get_utterance('dsyd46r')" ] @@ -862,7 +855,7 @@ ], "source": [ "# We can check if the Corpus contains an object with a specified ID\n", - "corpus.has_speaker(\"ulysses2014\")\n", + "corpus.has_speaker('ulysses2014')\n", "# corpus.has_conversation('7bir0w')\n", "# corpus.has_utterance('dsyd46r')" ] @@ -1078,7 +1071,7 @@ "source": [ "for utt in corpus.iter_utterances():\n", " print(utt.text)\n", - " break" + " break " ] }, { @@ -1114,7 +1107,7 @@ ], "source": [ "for convo in corpus.iter_conversations():\n", - " print(convo.meta[\"num_comments\"])\n", + " print(convo.meta['num_comments'])\n", " break" ] }, @@ -1343,8 +1336,8 @@ "source": [ "# consider this sequence of operations that highlight how to navigate between components\n", "utt = corpus.random_utterance()\n", - "convo = utt.get_conversation() # get the Conversation the Utterance belongs to\n", - "spkr = utt.speaker # get the Speaker who made the Utterance\n", + "convo = utt.get_conversation() # get the Conversation the Utterance belongs to\n", + "spkr = utt.speaker # get the Speaker who made the Utterance\n", "\n", "spkr_convos = list(spkr.iter_conversations())\n", "spkr_utts = list(spkr.iter_utterances())\n", @@ -1651,7 +1644,7 @@ } ], "source": [ - "convo = corpus.get_conversation(\"7yy032\")\n", + "convo = corpus.get_conversation('7yy032')\n", "print(convo)" ] }, @@ -1780,7 +1773,7 @@ ], "source": [ "# Instead of printing the Speaker ID, we print the Utterance ID\n", - "switchboard_corpus.get_conversation(\"4771-0\").print_conversation_structure(lambda utt: utt.id)" + "switchboard_corpus.get_conversation('4771-0').print_conversation_structure(lambda utt: utt.id)" ] }, { @@ -1936,7 +1929,7 @@ ], "source": [ "# Since the first utterance has ID '7yy032'\n", - "first_utt = corpus.get_utterance(\"7yy032\")" + "first_utt = corpus.get_utterance('7yy032')" ] }, { @@ -2072,16 +2065,16 @@ "source": [ "# Traverse the conversation tree in interesting ways\n", "print(\"Breadth first:\")\n", - "print([utt.speaker.id for utt in convo.traverse(\"bfs\")], \"\\n\")\n", + "print([utt.speaker.id for utt in convo.traverse('bfs')],\"\\n\")\n", "\n", "print(\"Depth first:\")\n", - "print([utt.speaker.id for utt in convo.traverse(\"dfs\")], \"\\n\")\n", + "print([utt.speaker.id for utt in convo.traverse('dfs')],\"\\n\")\n", "\n", "print(\"Preorder:\")\n", - "print([utt.speaker.id for utt in convo.traverse(\"preorder\")], \"\\n\")\n", + "print([utt.speaker.id for utt in convo.traverse('preorder')],\"\\n\")\n", "\n", "print(\"Postorder:\")\n", - "print([utt.speaker.id for utt in convo.traverse(\"postorder\")], \"\\n\")\n", + "print([utt.speaker.id for utt in convo.traverse('postorder')],\"\\n\")\n", "\n", "print(\"For reference, here is the structure of the thread again:\")\n", "convo.print_conversation_structure()" @@ -2331,7 +2324,7 @@ } ], "source": [ - "corpus = Corpus(download(\"reddit-corpus-small\"))" + "corpus = Corpus(download('reddit-corpus-small'))" ] }, { @@ -2493,11 +2486,8 @@ } ], "source": [ - "fw.fit(\n", - " corpus,\n", - " class1_func=lambda utt: utt.meta[\"subreddit\"] == \"Christianity\",\n", - " class2_func=lambda utt: utt.meta[\"subreddit\"] == \"atheism\",\n", - ")" + "fw.fit(corpus, class1_func = lambda utt: utt.meta['subreddit'] == 'Christianity',\n", + " class2_func = lambda utt: utt.meta['subreddit'] == 'atheism')" ] }, { @@ -2960,7 +2950,7 @@ } ], "source": [ - "fw.summarize(corpus, plot=True, class1_name=\"r/Christianity\", class2_name=\"r/atheism\")" + "fw.summarize(corpus, plot=True, class1_name='r/Christianity', class2_name='r/atheism')" ] }, { @@ -3025,4 +3015,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/examples/conversations-gone-awry/Conversations_Gone_Awry_Prediction.ipynb b/examples/conversations-gone-awry/Conversations_Gone_Awry_Prediction.ipynb index 616bd3bd..b011a089 100644 --- a/examples/conversations-gone-awry/Conversations_Gone_Awry_Prediction.ipynb +++ b/examples/conversations-gone-awry/Conversations_Gone_Awry_Prediction.ipynb @@ -40,7 +40,6 @@ "from convokit import Corpus\n", "\n", "import matplotlib.pyplot as plt\n", - "\n", "%matplotlib inline" ] }, @@ -51,8 +50,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -83,7 +81,7 @@ } ], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# AWRY_ROOT_DIR = download('conversations-gone-awry-corpus', data_dir=DATA_DIR)\n", @@ -93,7 +91,7 @@ "# AWRY_ROOT_DIR = ''\n", "\n", "awry_corpus = Corpus(AWRY_ROOT_DIR)\n", - "awry_corpus.load_info(\"utterance\", [\"parsed\"])" + "awry_corpus.load_info('utterance',['parsed'])" ] }, { @@ -109,9 +107,7 @@ "metadata": {}, "outputs": [], "source": [ - "awry_corpus = awry_corpus.filter_conversations_by(\n", - " lambda convo: convo.meta[\"annotation_year\"] == \"2018\"\n", - ")" + "awry_corpus = awry_corpus.filter_conversations_by(lambda convo: convo.meta['annotation_year'] == '2018')" ] }, { @@ -159,7 +155,7 @@ } ], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# FULL_ROOT_DIR = download('wiki-corpus', data_dir=DATA_DIR)\n", @@ -169,7 +165,7 @@ "# FULL_ROOT_DIR = ''\n", "\n", "full_corpus = Corpus(FULL_ROOT_DIR)\n", - "full_corpus.load_info(\"utterance\", [\"parsed\"])" + "full_corpus.load_info('utterance',['parsed'])" ] }, { @@ -324,18 +320,9 @@ } ], "source": [ - "pt_model = PromptTypeWrapper(\n", - " n_types=6,\n", - " use_prompt_motifs=False,\n", - " root_only=False,\n", - " questions_only=False,\n", - " enforce_caps=False,\n", - " min_support=20,\n", - " min_df=100,\n", - " svd__n_components=50,\n", - " max_dist=1.0,\n", - " random_state=1000,\n", - ")\n", + "pt_model = PromptTypeWrapper(n_types=6, use_prompt_motifs=False, root_only=False,\n", + " questions_only=False, enforce_caps=False, min_support=20, min_df=100,\n", + " svd__n_components=50, max_dist=1., random_state=1000)\n", "pt_model.fit(full_corpus)" ] }, @@ -1904,14 +1891,8 @@ }, "outputs": [], "source": [ - "TYPE_NAMES = [\n", - " \"Prompt: Casual\",\n", - " \"Prompt: Moderation\",\n", - " \"Prompt: Coordination\",\n", - " \"Prompt: Contention\",\n", - " \"Prompt: Editing\",\n", - " \"Prompt: Procedures\",\n", - "]" + "TYPE_NAMES = ['Prompt: Casual', 'Prompt: Moderation', 'Prompt: Coordination', 'Prompt: Contention',\n", + " 'Prompt: Editing', 'Prompt: Procedures']" ] }, { @@ -1967,9 +1948,10 @@ }, "outputs": [], "source": [ - "prompt_dist_df = awry_corpus.get_vectors(name=\"prompt_types__prompt_dists__6\", as_dataframe=True)\n", + "prompt_dist_df = awry_corpus.get_vectors(name='prompt_types__prompt_dists__6', \n", + " as_dataframe=True)\n", "type_ids = np.argmin(prompt_dist_df.values, axis=1)\n", - "mask = np.min(prompt_dist_df.values, axis=1) >= 1.0\n", + "mask = np.min(prompt_dist_df.values, axis=1) >= 1.\n", "type_ids[mask] = 6" ] }, @@ -2107,13 +2089,10 @@ "metadata": {}, "outputs": [], "source": [ - "prompt_type_assignments = np.zeros((len(prompt_dist_df), prompt_dist_df.shape[1] + 1))\n", - "prompt_type_assignments[np.arange(len(type_ids)), type_ids] = 1\n", - "prompt_type_assignment_df = pd.DataFrame(\n", - " columns=np.arange(prompt_dist_df.shape[1] + 1),\n", - " index=prompt_dist_df.index,\n", - " data=prompt_type_assignments,\n", - ")\n", + "prompt_type_assignments = np.zeros((len(prompt_dist_df), prompt_dist_df.shape[1]+1))\n", + "prompt_type_assignments[np.arange(len(type_ids)),type_ids] = 1\n", + "prompt_type_assignment_df = pd.DataFrame(columns=np.arange(prompt_dist_df.shape[1]+1), index=prompt_dist_df.index, \n", + " data=prompt_type_assignments)\n", "prompt_type_assignment_df = prompt_type_assignment_df[prompt_type_assignment_df.columns[:-1]]" ] }, @@ -2901,9 +2880,7 @@ " convo_ids.append(comment.conversation_id)\n", " timestamps.append(comment.timestamp)\n", " page_ids.append(conversation.meta[\"page_id\"])\n", - "comment_df = pd.DataFrame(\n", - " {\"conversation_id\": convo_ids, \"timestamp\": timestamps, \"page_id\": page_ids}, index=comment_ids\n", - ")\n", + "comment_df = pd.DataFrame({\"conversation_id\": convo_ids, \"timestamp\": timestamps, \"page_id\": page_ids}, index=comment_ids)\n", "\n", "# we'll do our construction using awry conversation ID's as the reference key\n", "awry_convo_ids = set()\n", @@ -2911,21 +2888,14 @@ "good_convo_map = {}\n", "page_id_map = {}\n", "for conversation in awry_corpus.iter_conversations():\n", - " if (\n", - " conversation.meta[\"conversation_has_personal_attack\"]\n", - " and conversation.id not in awry_convo_ids\n", - " ):\n", + " if conversation.meta[\"conversation_has_personal_attack\"] and conversation.id not in awry_convo_ids:\n", " awry_convo_ids.add(conversation.id)\n", " good_convo_map[conversation.id] = conversation.meta[\"pair_id\"]\n", " page_id_map[conversation.id] = conversation.meta[\"page_id\"]\n", "awry_convo_ids = list(awry_convo_ids)\n", - "pairs_df = pd.DataFrame(\n", - " {\n", - " \"bad_conversation_id\": awry_convo_ids,\n", - " \"conversation_id\": [good_convo_map[cid] for cid in awry_convo_ids],\n", - " \"page_id\": [page_id_map[cid] for cid in awry_convo_ids],\n", - " }\n", - ")\n", + "pairs_df = pd.DataFrame({\"bad_conversation_id\": awry_convo_ids,\n", + " \"conversation_id\": [good_convo_map[cid] for cid in awry_convo_ids],\n", + " \"page_id\": [page_id_map[cid] for cid in awry_convo_ids]})\n", "# finally, we will augment the pairs dataframe with the IDs of the first and second comment for both\n", "# the bad and good conversation. This will come in handy for constructing the feature matrix.\n", "first_ids = []\n", @@ -2934,22 +2904,14 @@ "second_ids_bad = []\n", "for row in pairs_df.itertuples():\n", " # \"first two\" is defined in terms of time of posting\n", - " comments_sorted = comment_df[comment_df.conversation_id == row.conversation_id].sort_values(\n", - " by=\"timestamp\"\n", - " )\n", + " comments_sorted = comment_df[comment_df.conversation_id==row.conversation_id].sort_values(by=\"timestamp\")\n", " first_ids.append(comments_sorted.iloc[0].name)\n", " second_ids.append(comments_sorted.iloc[1].name)\n", - " comments_sorted_bad = comment_df[\n", - " comment_df.conversation_id == row.bad_conversation_id\n", - " ].sort_values(by=\"timestamp\")\n", + " comments_sorted_bad = comment_df[comment_df.conversation_id==row.bad_conversation_id].sort_values(by=\"timestamp\")\n", " first_ids_bad.append(comments_sorted_bad.iloc[0].name)\n", " second_ids_bad.append(comments_sorted_bad.iloc[1].name)\n", - "pairs_df = pairs_df.assign(\n", - " first_id=first_ids,\n", - " second_id=second_ids,\n", - " bad_first_id=first_ids_bad,\n", - " bad_second_id=second_ids_bad,\n", - ")" + "pairs_df = pairs_df.assign(first_id=first_ids, second_id=second_ids, \n", + " bad_first_id=first_ids_bad, bad_second_id=second_ids_bad)" ] }, { @@ -2986,19 +2948,19 @@ "outputs": [], "source": [ "def clean_feature_name(feat):\n", - " new_feat = feat.replace(\"feature_politeness\", \"\").replace(\"==\", \"\").replace(\"_\", \" \")\n", + " new_feat = feat.replace('feature_politeness','').replace('==','').replace('_', ' ')\n", " split = new_feat.split()\n", - " first, rest = split[0], \" \".join(split[1:]).lower()\n", + " first, rest = split[0], ' '.join(split[1:]).lower()\n", " if first[0].isalpha():\n", " first = first.title()\n", - " if \"Hashedge\" in first:\n", - " return \"Hedge (lexicon)\"\n", - " if \"Hedges\" in first:\n", - " return \"Hedge (dep. tree)\"\n", - " if \"greeting\" in feat:\n", - " return \"Greetings\"\n", - " cleaner_str = first + \" \" + rest\n", - " # cleaner_str = cleaner_str.replace('2nd', '2$\\mathregular{^{nd}}$').replace('1st', '1$\\mathregular{^{st}}$')\n", + " if 'Hashedge' in first:\n", + " return 'Hedge (lexicon)'\n", + " if 'Hedges' in first:\n", + " return 'Hedge (dep. tree)'\n", + " if 'greeting' in feat:\n", + " return 'Greetings'\n", + " cleaner_str = first + ' ' + rest\n", + "# cleaner_str = cleaner_str.replace('2nd', '2$\\mathregular{^{nd}}$').replace('1st', '1$\\mathregular{^{st}}$')\n", " return cleaner_str" ] }, @@ -3010,16 +2972,9 @@ }, "outputs": [], "source": [ - "politeness_strategies_display = politeness_strategies[\n", - " [\n", - " col\n", - " for col in politeness_strategies.columns\n", - " if col not in [\"feature_politeness_==HASNEGATIVE==\", \"feature_politeness_==HASPOSITIVE==\"]\n", - " ]\n", - "].copy()\n", - "politeness_strategies_display.columns = [\n", - " clean_feature_name(col) for col in politeness_strategies_display.columns\n", - "]" + "politeness_strategies_display = politeness_strategies[[col for col in politeness_strategies.columns \n", + " if col not in ['feature_politeness_==HASNEGATIVE==', 'feature_politeness_==HASPOSITIVE==']]].copy()\n", + "politeness_strategies_display.columns = [clean_feature_name(col) for col in politeness_strategies_display.columns]" ] }, { @@ -3041,19 +2996,11 @@ }, "outputs": [], "source": [ - "tox_first_comment_features = pairs_df[[\"bad_first_id\"]].join(\n", - " all_features, how=\"left\", on=\"bad_first_id\"\n", - ")[all_features.columns]\n", - "ntox_first_comment_features = pairs_df[[\"first_id\"]].join(all_features, how=\"left\", on=\"first_id\")[\n", - " all_features.columns\n", - "]\n", + "tox_first_comment_features =pairs_df[['bad_first_id']].join(all_features, how='left', on='bad_first_id')[all_features.columns]\n", + "ntox_first_comment_features =pairs_df[['first_id']].join(all_features, how='left', on='first_id')[all_features.columns]\n", "\n", - "tox_second_comment_features = pairs_df[[\"bad_second_id\"]].join(\n", - " all_features, how=\"left\", on=\"bad_second_id\"\n", - ")[all_features.columns]\n", - "ntox_second_comment_features = pairs_df[[\"second_id\"]].join(\n", - " all_features, how=\"left\", on=\"second_id\"\n", - ")[all_features.columns]" + "tox_second_comment_features =pairs_df[['bad_second_id']].join(all_features, how='left', on='bad_second_id')[all_features.columns]\n", + "ntox_second_comment_features =pairs_df[['second_id']].join(all_features, how='left', on='second_id')[all_features.columns]" ] }, { @@ -3065,49 +3012,36 @@ "outputs": [], "source": [ "def get_p_stars(x):\n", - " if x < 0.001:\n", - " return \"***\"\n", - " elif x < 0.01:\n", - " return \"**\"\n", - " elif x < 0.05:\n", - " return \"*\"\n", - " else:\n", - " return \"\"\n", - "\n", - "\n", - "def compare_tox(df_ntox, df_tox, min_n=0):\n", + " if x < .001: return '***'\n", + " elif x < .01: return '**'\n", + " elif x < .05: return '*'\n", + " else: return ''\n", + "def compare_tox(df_ntox, df_tox, min_n=0):\n", " cols = df_ntox.columns\n", - " num_feats_in_tox = df_tox[cols].sum().astype(int).rename(\"num_feat_tox\")\n", - " num_nfeats_in_tox = (1 - df_tox[cols]).sum().astype(int).rename(\"num_nfeat_tox\")\n", - " num_feats_in_ntox = df_ntox[cols].sum().astype(int).rename(\"num_feat_ntox\")\n", - " num_nfeats_in_ntox = (1 - df_ntox[cols]).sum().astype(int).rename(\"num_nfeat_ntox\")\n", - " prop_tox = df_tox[cols].mean().rename(\"prop_tox\")\n", - " ref_prop_ntox = df_ntox[cols].mean().rename(\"prop_ntox\")\n", + " num_feats_in_tox = df_tox[cols].sum().astype(int).rename('num_feat_tox')\n", + " num_nfeats_in_tox = (1 - df_tox[cols]).sum().astype(int).rename('num_nfeat_tox')\n", + " num_feats_in_ntox = df_ntox[cols].sum().astype(int).rename('num_feat_ntox')\n", + " num_nfeats_in_ntox = (1 - df_ntox[cols]).sum().astype(int).rename('num_nfeat_ntox')\n", + " prop_tox = df_tox[cols].mean().rename('prop_tox')\n", + " ref_prop_ntox = df_ntox[cols].mean().rename('prop_ntox')\n", " n_tox = len(df_tox)\n", - " df = pd.concat(\n", - " [\n", - " num_feats_in_tox,\n", - " num_nfeats_in_tox,\n", - " num_feats_in_ntox,\n", - " num_nfeats_in_ntox,\n", - " prop_tox,\n", - " ref_prop_ntox,\n", - " ],\n", - " axis=1,\n", - " )\n", - " df[\"num_total\"] = df.num_feat_tox + df.num_feat_ntox\n", - " df[\"log_odds\"] = (\n", - " np.log(df.num_feat_tox)\n", - " - np.log(df.num_nfeat_tox)\n", - " + np.log(df.num_nfeat_ntox)\n", - " - np.log(df.num_feat_ntox)\n", - " )\n", - " df[\"abs_log_odds\"] = np.abs(df.log_odds)\n", - " df[\"binom_p\"] = df.apply(lambda x: stats.binom_test(x.num_feat_tox, n_tox, x.prop_ntox), axis=1)\n", + " df = pd.concat([\n", + " num_feats_in_tox, \n", + " num_nfeats_in_tox,\n", + " num_feats_in_ntox,\n", + " num_nfeats_in_ntox,\n", + " prop_tox,\n", + " ref_prop_ntox,\n", + " ], axis=1)\n", + " df['num_total'] = df.num_feat_tox + df.num_feat_ntox\n", + " df['log_odds'] = np.log(df.num_feat_tox) - np.log(df.num_nfeat_tox) \\\n", + " + np.log(df.num_nfeat_ntox) - np.log(df.num_feat_ntox)\n", + " df['abs_log_odds'] = np.abs(df.log_odds)\n", + " df['binom_p'] = df.apply(lambda x: stats.binom_test(x.num_feat_tox, n_tox, x.prop_ntox), axis=1)\n", " df = df[df.num_total >= min_n]\n", - " df[\"p\"] = df[\"binom_p\"].apply(lambda x: \"%.3f\" % x)\n", - " df[\"pstars\"] = df[\"binom_p\"].apply(get_p_stars)\n", - " return df.sort_values(\"log_odds\", ascending=False)" + " df['p'] = df['binom_p'].apply(lambda x: '%.3f' % x)\n", + " df['pstars'] = df['binom_p'].apply(get_p_stars)\n", + " return df.sort_values('log_odds', ascending=False)" ] }, { @@ -3130,110 +3064,85 @@ }, "outputs": [], "source": [ - "# we are now ready to plot these comparisons. the following (rather intimidating) helper function\n", + "# we are now ready to plot these comparisons. the following (rather intimidating) helper function \n", "# produces a nicely-formatted plot:\n", - "def draw_figure(\n", - " ax, first_cmp, second_cmp, title=\"\", prompt_types=6, min_log_odds=0.2, min_freq=50, xlim=0.85\n", - "):\n", + "def draw_figure(ax, first_cmp, second_cmp, title='', prompt_types=6, min_log_odds=.2, min_freq=50,xlim=.85):\n", + "\n", " # selecting and sorting the features to plot, given minimum effect sizes and statistical significance\n", - " frequent_feats = first_cmp[first_cmp.num_total >= min_freq].index.union(\n", - " second_cmp[second_cmp.num_total >= min_freq].index\n", - " )\n", - " lrg_effect_feats = first_cmp[\n", - " (first_cmp.abs_log_odds >= 0.2) & (first_cmp.binom_p < 0.05)\n", - " ].index.union(second_cmp[(second_cmp.abs_log_odds >= 0.2) & (second_cmp.binom_p < 0.05)].index)\n", + " frequent_feats = first_cmp[first_cmp.num_total >= min_freq].index.union(second_cmp[second_cmp.num_total >= min_freq].index)\n", + " lrg_effect_feats = first_cmp[(first_cmp.abs_log_odds >= .2)\n", + " & (first_cmp.binom_p < .05)].index.union(second_cmp[(second_cmp.abs_log_odds >= .2)\n", + " & (second_cmp.binom_p < .05)].index)\n", " feats_to_include = frequent_feats.intersection(lrg_effect_feats)\n", " feat_order = sorted(feats_to_include, key=lambda x: first_cmp.loc[x].log_odds, reverse=True)\n", "\n", " # parameters determining the look of the figure\n", - " colors = [\"darkorchid\", \"seagreen\"]\n", - " shapes = [\"d\", \"s\"]\n", - " eps = 0.02\n", - " star_eps = 0.035\n", + " colors = ['darkorchid', 'seagreen']\n", + " shapes = ['d', 's'] \n", + " eps = .02\n", + " star_eps = .035\n", " xlim = xlim\n", - " min_log = 0.2\n", + " min_log = .2\n", " gap_prop = 2\n", " label_size = 14\n", - " title_size = 18\n", + " title_size=18\n", " radius = 144\n", " features = feat_order\n", " ax.invert_yaxis()\n", - " ax.plot([0, 0], [0, len(features) / gap_prop], color=\"black\")\n", - "\n", - " # for each figure we plot the point according to effect size in the first and second comment,\n", + " ax.plot([0,0], [0, len(features)/gap_prop], color='black')\n", + " \n", + " # for each figure we plot the point according to effect size in the first and second comment, \n", " # and add axis labels denoting statistical significance\n", " yticks = []\n", " yticklabels = []\n", " for f_idx, feat in enumerate(features):\n", - " curr_y = (f_idx + 0.5) / gap_prop\n", + " curr_y = (f_idx + .5)/gap_prop\n", " yticks.append(curr_y)\n", " try:\n", + " \n", " first_p = first_cmp.loc[feat].binom_p\n", - " second_p = second_cmp.loc[feat].binom_p\n", + " second_p = second_cmp.loc[feat].binom_p \n", " if first_cmp.loc[feat].abs_log_odds < min_log:\n", " first_face = \"white\"\n", " elif first_p >= 0.05:\n", - " first_face = \"white\"\n", + " first_face = 'white'\n", " else:\n", " first_face = colors[0]\n", " if second_cmp.loc[feat].abs_log_odds < min_log:\n", " second_face = \"white\"\n", " elif second_p >= 0.05:\n", - " second_face = \"white\"\n", + " second_face = 'white'\n", " else:\n", " second_face = colors[1]\n", - " ax.plot(\n", - " [-1 * xlim, xlim], [curr_y, curr_y], \"--\", color=\"grey\", zorder=0, linewidth=0.5\n", - " )\n", - "\n", - " ax.scatter(\n", - " [first_cmp.loc[feat].log_odds],\n", - " [curr_y + eps],\n", - " s=radius,\n", - " edgecolor=colors[0],\n", - " marker=shapes[0],\n", - " zorder=20,\n", - " facecolors=first_face,\n", - " )\n", - " ax.scatter(\n", - " [second_cmp.loc[feat].log_odds],\n", - " [curr_y + eps],\n", - " s=radius,\n", - " edgecolor=colors[1],\n", - " marker=shapes[1],\n", - " zorder=10,\n", - " facecolors=second_face,\n", - " )\n", - "\n", + " ax.plot([-1 * xlim, xlim], [curr_y, curr_y], '--', color='grey', zorder=0, linewidth=.5)\n", + " \n", + " ax.scatter([first_cmp.loc[feat].log_odds], [curr_y + eps], s=radius, edgecolor=colors[0], marker=shapes[0],\n", + " zorder=20, facecolors=first_face)\n", + " ax.scatter([second_cmp.loc[feat].log_odds], [curr_y + eps], s=radius, edgecolor=colors[1], marker=shapes[1], \n", + " zorder=10, facecolors=second_face)\n", + " \n", " first_pstr_len = len(get_p_stars(first_p))\n", " second_pstr_len = len(get_p_stars(second_p))\n", - " p_str = np.array([\" \"] * 8)\n", + " p_str = np.array([' '] * 8)\n", " if first_pstr_len > 0:\n", - " p_str[:first_pstr_len] = \"*\"\n", + " p_str[:first_pstr_len] = '*'\n", " if second_pstr_len > 0:\n", - " p_str[-second_pstr_len:] = \"⁺\"\n", - "\n", - " feat_str = feat + \"\\n\" + \"\".join(p_str)\n", + " p_str[-second_pstr_len:] = '⁺'\n", + " \n", + " feat_str = feat + '\\n' + ''.join(p_str)\n", " yticklabels.append(feat_str)\n", " except Exception as e:\n", - " yticklabels.append(\"\")\n", - "\n", + " yticklabels.append('')\n", + " \n", " # add the axis labels\n", - " ax.set_xlabel(\"log-odds ratio\", fontsize=14, family=\"serif\")\n", - " ax.set_xticks([-xlim - 0.05, -0.5, 0, 0.5, xlim])\n", - " ax.set_xticklabels([\"on-track\", -0.5, 0, 0.5, \"awry\"], fontsize=14, family=\"serif\")\n", + " ax.set_xlabel('log-odds ratio', fontsize=14, family='serif')\n", + " ax.set_xticks([-xlim-.05, -.5, 0, .5, xlim])\n", + " ax.set_xticklabels(['on-track', -.5, 0, .5, 'awry'], fontsize=14, family='serif')\n", " ax.set_yticks(yticks)\n", - " ax.set_yticklabels(yticklabels, fontsize=16, family=\"serif\")\n", - " ax.tick_params(axis=\"both\", which=\"both\", bottom=\"off\", top=\"off\", left=\"off\")\n", - " if title != \"\":\n", - " ax.text(\n", - " 0,\n", - " (len(features) + 2.25) / gap_prop,\n", - " title,\n", - " fontsize=title_size,\n", - " family=\"serif\",\n", - " horizontalalignment=\"center\",\n", - " )\n", + " ax.set_yticklabels(yticklabels, fontsize=16, family='serif')\n", + " ax.tick_params(axis='both', which='both', bottom='off', top='off',left='off')\n", + " if title != '':\n", + " ax.text(0, (len(features) + 2.25)/ gap_prop, title, fontsize=title_size, family='serif',horizontalalignment='center',)\n", " return feat_order" ] }, @@ -3254,8 +3163,8 @@ } ], "source": [ - "f, ax = plt.subplots(1, 1, figsize=(5, 10))\n", - "_ = draw_figure(ax, first_comparisons, second_comparisons, \"First & second comment\")" + "f, ax = plt.subplots(1,1, figsize=(5,10))\n", + "_ = draw_figure(ax, first_comparisons, second_comparisons, 'First & second comment')" ] }, { @@ -3288,28 +3197,20 @@ "outputs": [], "source": [ "def features_for_convo(convo_id, first_comment_id, second_comment_id):\n", + "\n", " # get prompt type features\n", " try:\n", " first_prompts = prompt_dist_df.loc[first_comment_id]\n", " except:\n", - " first_prompts = pd.Series(\n", - " data=np.ones(len(prompt_dist_df.columns)), index=prompt_dist_df.columns\n", - " )\n", + " first_prompts = pd.Series(data=np.ones(len(prompt_dist_df.columns)), index=prompt_dist_df.columns)\n", " try:\n", - " second_prompts = prompt_dist_df.loc[second_comment_id].rename(\n", - " {c: c + \"_second\" for c in prompt_dist_df.columns}\n", - " )\n", + " second_prompts = prompt_dist_df.loc[second_comment_id].rename({c: c + \"_second\" for c in prompt_dist_df.columns})\n", " except:\n", - " second_prompts = pd.Series(\n", - " data=np.ones(len(prompt_dist_df.columns)),\n", - " index=[c + \"_second\" for c in prompt_dist_df.columns],\n", - " )\n", + " second_prompts = pd.Series(data=np.ones(len(prompt_dist_df.columns)), index=[c + \"_second\" for c in prompt_dist_df.columns])\n", " prompts = first_prompts.append(second_prompts)\n", " # get politeness strategies features\n", " first_politeness = politeness_strategies.loc[first_comment_id]\n", - " second_politeness = politeness_strategies.loc[second_comment_id].rename(\n", - " {c: c + \"_second\" for c in politeness_strategies.columns}\n", - " )\n", + " second_politeness = politeness_strategies.loc[second_comment_id].rename({c: c + \"_second\" for c in politeness_strategies.columns})\n", " politeness = first_politeness.append(second_politeness)\n", " return politeness.append(prompts)" ] @@ -3323,16 +3224,9 @@ "outputs": [], "source": [ "convo_ids = np.concatenate((pairs_df.conversation_id.values, pairs_df.bad_conversation_id.values))\n", - "feats = [\n", - " features_for_convo(row.conversation_id, row.first_id, row.second_id)\n", - " for row in pairs_df.itertuples()\n", - "] + [\n", - " features_for_convo(row.bad_conversation_id, row.bad_first_id, row.bad_second_id)\n", - " for row in pairs_df.itertuples()\n", - "]\n", - "feature_table = pd.DataFrame(\n", - " data=np.vstack([f.values for f in feats]), columns=feats[0].index, index=convo_ids\n", - ")" + "feats = [features_for_convo(row.conversation_id, row.first_id, row.second_id) for row in pairs_df.itertuples()] + \\\n", + " [features_for_convo(row.bad_conversation_id, row.bad_first_id, row.bad_second_id) for row in pairs_df.itertuples()]\n", + "feature_table = pd.DataFrame(data=np.vstack([f.values for f in feats]), columns=feats[0].index, index=convo_ids)" ] }, { @@ -3345,14 +3239,10 @@ "source": [ "# in the paper, we dropped the sentiment lexicon based features (HASPOSITIVE and HASNEGATIVE), opting\n", "# to instead use them as a baseline. We do this here as well to be consistent with the paper.\n", - "feature_table = feature_table.drop(\n", - " columns=[\n", - " \"feature_politeness_==HASPOSITIVE==\",\n", - " \"feature_politeness_==HASNEGATIVE==\",\n", - " \"feature_politeness_==HASPOSITIVE==_second\",\n", - " \"feature_politeness_==HASNEGATIVE==_second\",\n", - " ]\n", - ")" + "feature_table = feature_table.drop(columns=[\"feature_politeness_==HASPOSITIVE==\",\n", + " \"feature_politeness_==HASNEGATIVE==\",\n", + " \"feature_politeness_==HASPOSITIVE==_second\",\n", + " \"feature_politeness_==HASNEGATIVE==_second\"])" ] }, { @@ -3638,7 +3528,7 @@ } ], "source": [ - "feature_table.head(5)" + "feature_table.head(5)\n" ] }, { @@ -3674,42 +3564,27 @@ " vals, counts = np.unique(seq, return_counts=True)\n", " return vals[np.argmax(counts)]\n", "\n", - "\n", "def run_pred_single(inputs, X, y):\n", " f_idx, (train_idx, test_idx) = inputs\n", - "\n", + " \n", " X_train, X_test = X[train_idx], X[test_idx]\n", " y_train, y_test = y[train_idx], y[test_idx]\n", - "\n", - " base_clf = Pipeline(\n", - " [\n", - " (\"scaler\", StandardScaler()),\n", - " (\"featselect\", SelectPercentile(score_func=f_classif, percentile=10)),\n", - " (\"logreg\", LogisticRegression(solver=\"liblinear\")),\n", - " ]\n", - " )\n", - " clf = GridSearchCV(\n", - " base_clf,\n", - " {\n", - " \"logreg__C\": [10**i for i in range(-4, 4)],\n", - " \"featselect__percentile\": list(range(10, 110, 10)),\n", - " },\n", - " cv=3,\n", - " )\n", + " \n", + " base_clf = Pipeline([(\"scaler\", StandardScaler()), (\"featselect\", SelectPercentile(score_func=f_classif, percentile=10)), (\"logreg\", LogisticRegression(solver='liblinear'))])\n", + " clf = GridSearchCV(base_clf, {\"logreg__C\": [10**i for i in range(-4,4)], \"featselect__percentile\": list(range(10, 110, 10))}, cv=3)\n", "\n", " clf.fit(X_train, y_train)\n", - "\n", - " y_scores = clf.predict_proba(X_test)[:, 1]\n", + " \n", + " y_scores = clf.predict_proba(X_test)[:,1]\n", " y_pred = clf.predict(X_test)\n", - "\n", + " \n", " feature_weights = clf.best_estimator_.named_steps[\"logreg\"].coef_.flatten()\n", " feature_mask = clf.best_estimator_.named_steps[\"featselect\"].get_support()\n", - "\n", + " \n", " hyperparams = clf.best_params_\n", - "\n", + " \n", " return (y_pred, y_scores, feature_weights, hyperparams, feature_mask)\n", "\n", - "\n", "def run_pred(X, y, fnames, groups):\n", " feature_weights = {}\n", " scores = np.asarray([np.nan for i in range(len(y))])\n", @@ -3717,10 +3592,10 @@ " hyperparameters = defaultdict(list)\n", " splits = list(enumerate(LeaveOneGroupOut().split(X, y, groups)))\n", " accs = []\n", - "\n", + " \n", " with Pool(os.cpu_count()) as p:\n", " prediction_results = p.map(partial(run_pred_single, X=X, y=y), splits)\n", - "\n", + " \n", " fselect_pvals_all = []\n", " for i in range(len(splits)):\n", " f_idx, (train_idx, test_idx) = splits[i]\n", @@ -3730,16 +3605,15 @@ " feature_weights[f_idx] = np.asarray([np.nan for _ in range(len(fnames))])\n", " feature_weights[f_idx][mask_i] = weights_i\n", " for param in hyperparams_i:\n", - " hyperparameters[param].append(hyperparams_i[param])\n", - "\n", + " hyperparameters[param].append(hyperparams_i[param]) \n", + " \n", " acc = np.mean(y_pred == y)\n", " pvalue = stats.binom_test(sum(y_pred == y), n=len(y), alternative=\"greater\")\n", - "\n", + " \n", " coef_df = pd.DataFrame(feature_weights, index=fnames)\n", - " coef_df[\"mean_coef\"] = coef_df.apply(np.nanmean, axis=1)\n", - " coef_df[\"std_coef\"] = coef_df.apply(np.nanstd, axis=1)\n", - " return acc, coef_df[[\"mean_coef\", \"std_coef\"]], scores, pd.DataFrame(hyperparameters), pvalue\n", - "\n", + " coef_df['mean_coef'] = coef_df.apply(np.nanmean, axis=1)\n", + " coef_df['std_coef'] = coef_df.apply(np.nanstd, axis=1)\n", + " return acc, coef_df[['mean_coef', 'std_coef']], scores, pd.DataFrame(hyperparameters), pvalue\n", "\n", "def get_labeled_pairs(pairs_df):\n", " paired_labels = []\n", @@ -3753,33 +3627,27 @@ " else:\n", " c0s.append(row.bad_conversation_id)\n", " c1s.append(row.conversation_id)\n", - " paired_labels.append(i % 2)\n", + " paired_labels.append(i%2)\n", " page_ids.append(row.page_id)\n", - " return pd.DataFrame(\n", - " {\"c0\": c0s, \"c1\": c1s, \"first_convo_toxic\": paired_labels, \"page_id\": page_ids}\n", - " )\n", - "\n", + " return pd.DataFrame({\"c0\": c0s, \"c1\": c1s,\"first_convo_toxic\": paired_labels, \"page_id\": page_ids})\n", "\n", "def get_feature_subset(labeled_pairs_df, feature_list):\n", - " prompt_type_names = [\"type_%d_dist\" % i for i in range(6)] + [\n", - " \"type_%d_dist_second\" % i for i in range(6)\n", - " ]\n", + " prompt_type_names = [\"type_%d_dist\" % i for i in range(6)] + [\"type_%d_dist_second\" % i for i in range(6)]\n", " politeness_names = [f for f in feature_table.columns if f not in prompt_type_names]\n", - "\n", + " \n", " features_to_use = []\n", " if \"prompt_types\" in feature_list:\n", " features_to_use += prompt_type_names\n", " if \"politeness_strategies\" in feature_list:\n", " features_to_use += politeness_names\n", - "\n", + " \n", " feature_subset = feature_table[features_to_use]\n", - "\n", + " \n", " c0_feats = feature_subset.loc[labeled_pairs_df.c0].values\n", " c1_feats = feature_subset.loc[labeled_pairs_df.c1].values\n", - "\n", + " \n", " return c0_feats, c1_feats, features_to_use\n", "\n", - "\n", "def run_pipeline(feature_set):\n", " print(\"Running prediction task for feature set\", \"+\".join(feature_set))\n", " print(\"Generating labels...\")\n", @@ -3790,9 +3658,7 @@ " print(\"Using\", X.shape[1], \"features\")\n", " y = labeled_pairs_df.first_convo_toxic.values\n", " print(\"Running leave-one-page-out prediction...\")\n", - " accuracy, coefs, scores, hyperparams, pvalue = run_pred(\n", - " X, y, feature_names, labeled_pairs_df.page_id\n", - " )\n", + " accuracy, coefs, scores, hyperparams, pvalue = run_pred(X, y, feature_names, labeled_pairs_df.page_id)\n", " print(\"Accuracy:\", accuracy)\n", " print(\"p-value: %.4e\" % pvalue)\n", " print(\"C (mode):\", mode(hyperparams.logreg__C))\n", @@ -3957,11 +3823,7 @@ } ], "source": [ - "feature_combos = [\n", - " [\"politeness_strategies\"],\n", - " [\"prompt_types\"],\n", - " [\"politeness_strategies\", \"prompt_types\"],\n", - "]\n", + "feature_combos = [[\"politeness_strategies\"], [\"prompt_types\"], [\"politeness_strategies\", \"prompt_types\"]]\n", "combo_names = []\n", "accs = []\n", "for combo in feature_combos:\n", diff --git a/examples/converting_movie_corpus.ipynb b/examples/converting_movie_corpus.ipynb index 9eee2773..24275b5e 100644 --- a/examples/converting_movie_corpus.ipynb +++ b/examples/converting_movie_corpus.ipynb @@ -85,7 +85,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(data_dir + \"movie_characters_metadata.txt\", \"r\", encoding=\"utf-8\", errors=\"ignore\") as f:\n", + "with open(data_dir + \"movie_characters_metadata.txt\", \"r\", encoding='utf-8', errors='ignore') as f:\n", " speaker_data = f.readlines()" ] }, @@ -111,13 +111,11 @@ "speaker_meta = {}\n", "for speaker in speaker_data:\n", " speaker_info = [info.strip() for info in speaker.split(\"+++$+++\")]\n", - " speaker_meta[speaker_info[0]] = {\n", - " \"character_name\": speaker_info[1],\n", - " \"movie_idx\": speaker_info[2],\n", - " \"movie_name\": speaker_info[3],\n", - " \"gender\": speaker_info[4],\n", - " \"credit_pos\": speaker_info[5],\n", - " }" + " speaker_meta[speaker_info[0]] = {\"character_name\": speaker_info[1],\n", + " \"movie_idx\": speaker_info[2],\n", + " \"movie_name\": speaker_info[3],\n", + " \"gender\": speaker_info[4],\n", + " \"credit_pos\": speaker_info[5]}" ] }, { @@ -133,7 +131,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus_speakers = {k: Speaker(id=k, meta=v) for k, v in speaker_meta.items()}" + "corpus_speakers = {k: Speaker(id = k, meta = v) for k,v in speaker_meta.items()}" ] }, { @@ -181,7 +179,7 @@ } ], "source": [ - "corpus_speakers[\"u0\"].meta" + "corpus_speakers['u0'].meta" ] }, { @@ -198,7 +196,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(data_dir + \"movie_lines.txt\", \"r\", encoding=\"utf-8\", errors=\"ignore\") as f:\n", + "with open(data_dir + \"movie_lines.txt\", \"r\", encoding='utf-8', errors='ignore') as f:\n", " utterance_data = f.readlines()" ] }, @@ -251,27 +249,21 @@ "\n", "count = 0\n", "for utterance in tqdm(utterance_data):\n", + " \n", " utterance_info = [info.strip() for info in utterance.split(\"+++$+++\")]\n", - "\n", + " \n", " if len(utterance_info) < 4:\n", " print(utterance_info)\n", - "\n", + " \n", " try:\n", - " idx, speaker, movie_id, text = (\n", - " utterance_info[0],\n", - " utterance_info[1],\n", - " utterance_info[2],\n", - " utterance_info[4],\n", - " )\n", + " idx, speaker, movie_id, text = utterance_info[0], utterance_info[1], utterance_info[2], utterance_info[4]\n", " except:\n", " print(utterance_info)\n", - "\n", - " meta = {\"movie_id\": movie_id}\n", - "\n", - " # root & reply_to will be updated later, timestamp is not applicable\n", - " utterance_corpus[idx] = Utterance(\n", - " id=idx, speaker=corpus_speakers[speaker], text=text, meta=meta\n", - " )\n", + " \n", + " meta = {'movie_id': movie_id}\n", + " \n", + " # root & reply_to will be updated later, timestamp is not applicable \n", + " utterance_corpus[idx] = Utterance(id=idx, speaker=corpus_speakers[speaker], text=text, meta=meta)\n", "\n", "print(\"Total number of utterances = {}\".format(len(utterance_corpus)))" ] @@ -300,7 +292,7 @@ } ], "source": [ - "utterance_corpus[\"L1044\"]" + "utterance_corpus['L1044'] " ] }, { @@ -317,7 +309,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(data_dir + \"movie_conversations.txt\", \"r\", encoding=\"utf-8\", errors=\"ignore\") as f:\n", + "with open(data_dir + \"movie_conversations.txt\", \"r\", encoding='utf-8', errors='ignore') as f:\n", " convo_data = f.readlines()" ] }, @@ -345,25 +337,27 @@ ], "source": [ "for info in tqdm(convo_data):\n", + " \n", " speaker1, speaker2, m, convo = [info.strip() for info in info.split(\"+++$+++\")]\n", "\n", " convo_seq = ast.literal_eval(convo)\n", - "\n", + " \n", " # update utterance\n", " conversation_id = convo_seq[0]\n", - "\n", + " \n", " # convo_seq is a list of utterances ids, arranged in conversational order\n", " for i, line in enumerate(convo_seq):\n", + " \n", " # sanity checking: speaker giving the utterance is indeed in the pair of characters provided\n", " if utterance_corpus[line].speaker.id not in [speaker1, speaker2]:\n", " print(\"speaker mismatch in line {0}\".format(i))\n", - "\n", + " \n", " utterance_corpus[line].conversation_id = conversation_id\n", - "\n", + " \n", " if i == 0:\n", " utterance_corpus[line].reply_to = None\n", " else:\n", - " utterance_corpus[line].reply_to = convo_seq[i - 1]" + " utterance_corpus[line].reply_to = convo_seq[i-1]" ] }, { @@ -390,7 +384,7 @@ } ], "source": [ - "utterance_corpus[\"L666499\"]" + "utterance_corpus['L666499']" ] }, { @@ -416,7 +410,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Note that by default the version number is incremented\n", + "# Note that by default the version number is incremented \n", "movie_corpus = Corpus(utterances=utterance_list)" ] }, @@ -441,9 +435,7 @@ } ], "source": [ - "print(\n", - " \"number of conversations in the dataset = {}\".format(len(movie_corpus.get_conversation_ids()))\n", - ")" + "print(\"number of conversations in the dataset = {}\".format(len(movie_corpus.get_conversation_ids())))" ] }, { @@ -490,7 +482,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(data_dir + \"movie_titles_metadata.txt\", \"r\", encoding=\"utf-8\", errors=\"ignore\") as f:\n", + "with open(data_dir + \"movie_titles_metadata.txt\", \"r\", encoding='utf-8', errors='ignore') as f:\n", " movie_extra = f.readlines()" ] }, @@ -503,14 +495,12 @@ "movie_meta = defaultdict(dict)\n", "\n", "for movie in movie_extra:\n", - " movie_id, title, year, rating, votes, genre = [info.strip() for info in movie.split(\"+++$+++\")]\n", - " movie_meta[movie_id] = {\n", - " \"movie_name\": title,\n", - " \"release_year\": year,\n", - " \"rating\": rating,\n", - " \"votes\": votes,\n", - " \"genre\": genre,\n", - " }" + " movie_id, title, year, rating, votes, genre = [info.strip() for info in movie.split(\"+++$+++\")]\n", + " movie_meta[movie_id] = {\"movie_name\": title,\n", + " \"release_year\": year,\n", + " \"rating\": rating,\n", + " \"votes\": votes,\n", + " \"genre\": genre}" ] }, { @@ -527,12 +517,13 @@ "outputs": [], "source": [ "for convo in movie_corpus.iter_conversations():\n", + " \n", " # get the movie_id for the conversation by checking from utterance info\n", " convo_id = convo.get_id()\n", - " movie_idx = movie_corpus.get_utterance(convo_id).meta[\"movie_id\"]\n", - "\n", + " movie_idx = movie_corpus.get_utterance(convo_id).meta['movie_id']\n", + " \n", " # add movie idx as meta, and update meta with additional movie information\n", - " convo.meta[\"movie_idx\"] = movie_idx\n", + " convo.meta['movie_idx'] = movie_idx\n", " convo.meta.update(movie_meta[movie_idx])" ] }, @@ -588,7 +579,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(data_dir + \"raw_script_urls.txt\", \"r\", encoding=\"utf-8\", errors=\"ignore\") as f:\n", + "with open(data_dir + \"raw_script_urls.txt\", \"r\", encoding='utf-8', errors='ignore') as f:\n", " urls = f.readlines()" ] }, @@ -610,7 +601,7 @@ "metadata": {}, "outputs": [], "source": [ - "movie_corpus.meta[\"url\"] = movie2url" + "movie_corpus.meta['url'] = movie2url" ] }, { @@ -626,7 +617,7 @@ "metadata": {}, "outputs": [], "source": [ - "movie_corpus.meta[\"name\"] = \"Cornell Movie-Dialogs Corpus\"" + "movie_corpus.meta['name'] = \"Cornell Movie-Dialogs Corpus\"" ] }, { @@ -745,7 +736,7 @@ } ], "source": [ - "movie_corpus.get_utterance(\"L666499\").retrieve_meta(\"parsed\")" + "movie_corpus.get_utterance('L666499').retrieve_meta('parsed')" ] }, { @@ -816,7 +807,7 @@ } ], "source": [ - "meta_index(filename=os.path.join(os.path.expanduser(\"~\"), \".convokit/saved-corpora/movie-corpus\"))" + "meta_index(filename = os.path.join(os.path.expanduser(\"~\"), \".convokit/saved-corpora/movie-corpus\"))" ] }, { diff --git a/examples/coordination/examples.ipynb b/examples/coordination/examples.ipynb index e3c83734..a93ee950 100644 --- a/examples/coordination/examples.ipynb +++ b/examples/coordination/examples.ipynb @@ -21,7 +21,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 3, "metadata": { "collapsed": true }, @@ -38,11 +38,20 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": { "collapsed": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading wiki-corpus to /Users/seanzhangkx/.convokit/saved-corpora/wiki-corpus\n", + "Downloading wiki-corpus from http://zissou.infosci.cornell.edu/convokit/datasets/wiki-corpus/wiki-corpus.zip (238.4MB)... Done\n" + ] + } + ], "source": [ "# OPTION 1: DOWNLOAD CORPUS\n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", @@ -58,7 +67,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": { "collapsed": true }, @@ -78,9 +87,8 @@ " # the rest plots this data as a double bar graph\n", " a_data_points = sorted(a_score_by_marker.items())\n", " b_data_points = sorted(b_score_by_marker.items())\n", - " a_data_points, b_data_points = zip(\n", - " *sorted(zip(a_data_points, b_data_points), key=lambda x: x[0][1], reverse=True)\n", - " )\n", + " a_data_points, b_data_points = zip(*sorted(zip(a_data_points, b_data_points),\n", + " key=lambda x: x[0][1], reverse=True))\n", " labels, a_data_points = zip(*a_data_points)\n", " _, b_data_points = zip(*b_data_points)\n", "\n", @@ -95,24 +103,14 @@ " ax.bar(np.arange(len(a_data_points)), a_data_points, 0.35, color=a_color)\n", " ax.bar(np.arange(len(b_data_points)) + 0.35, b_data_points, 0.35, color=b_color)\n", "\n", - " b_patch = mpatches.Patch(\n", - " color=\"b\",\n", - " label=a_description\n", - " + \" (total: \"\n", - " + str(a_scores[\"count_agg1\"])\n", - " + \", \"\n", - " + str(a_scores[\"count_agg2\"])\n", - " + \")\",\n", - " )\n", - " g_patch = mpatches.Patch(\n", - " color=\"g\",\n", - " label=b_description\n", - " + \" (total: \"\n", - " + str(b_scores[\"count_agg1\"])\n", - " + \", \"\n", - " + str(b_scores[\"count_agg2\"])\n", - " + \")\",\n", - " )\n", + " b_patch = mpatches.Patch(color=\"b\",\n", + " label=a_description + \" (total: \" +\n", + " str(a_scores[\"count_agg1\"]) + \", \" +\n", + " str(a_scores[\"count_agg2\"]) + \")\")\n", + " g_patch = mpatches.Patch(color=\"g\",\n", + " label=b_description + \" (total: \" +\n", + " str(b_scores[\"count_agg1\"]) + \", \" +\n", + " str(b_scores[\"count_agg2\"]) + \")\")\n", " plt.legend(handles=[b_patch, g_patch])\n", "\n", " filename = str(a_description) + \" vs \" + str(b_description) + \".png\"\n", @@ -122,7 +120,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": { "collapsed": true }, @@ -169,49 +167,19 @@ "\n", "# do speakers on the whole coordinate more to admins or nonadmins?\n", "make_chart(\n", - " coord.summarize(\n", - " corpus,\n", - " everyone,\n", - " admins,\n", - " focus=\"targets\",\n", - " summary_report=True,\n", - " target_thresh=7,\n", - " split_by_attribs=split,\n", - " ),\n", - " coord.summarize(\n", - " corpus,\n", - " everyone,\n", - " nonadmins,\n", - " focus=\"targets\",\n", - " summary_report=True,\n", - " target_thresh=7,\n", - " split_by_attribs=split,\n", - " ),\n", - " \"Target-admins\",\n", - " \"Target-nonadmins\",\n", + " coord.summarize(corpus, everyone, admins, focus=\"targets\", summary_report=True,\n", + " target_thresh=7, split_by_attribs=split),\n", + " coord.summarize(corpus, everyone, nonadmins, focus=\"targets\", summary_report=True,\n", + " target_thresh=7, split_by_attribs=split),\n", + " \"Target-admins\", \"Target-nonadmins\"\n", ")\n", "# do admins coordinate to other people more than nonadmins do?\n", "make_chart(\n", - " coord.summarize(\n", - " corpus,\n", - " admins,\n", - " everyone,\n", - " summary_report=True,\n", - " speaker_thresh=7,\n", - " target_thresh=7,\n", - " split_by_attribs=split,\n", - " ),\n", - " coord.summarize(\n", - " corpus,\n", - " nonadmins,\n", - " everyone,\n", - " summary_report=True,\n", - " speaker_thresh=7,\n", - " target_thresh=7,\n", - " split_by_attribs=split,\n", - " ),\n", - " \"Speaker-admins\",\n", - " \"Speaker-nonadmins\",\n", + " coord.summarize(corpus, admins, everyone, summary_report=True,\n", + " speaker_thresh=7, target_thresh=7, split_by_attribs=split),\n", + " coord.summarize(corpus, nonadmins, everyone, summary_report=True,\n", + " speaker_thresh=7, target_thresh=7, split_by_attribs=split),\n", + " \"Speaker-admins\", \"Speaker-nonadmins\"\n", ")" ] }, @@ -233,7 +201,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": { "collapsed": true }, @@ -246,17 +214,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": { "collapsed": true, "scrolled": true }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading supreme-corpus to /Users/seanzhangkx/.convokit/saved-corpora/supreme-corpus\n", + "Downloading supreme-corpus from http://zissou.infosci.cornell.edu/convokit/datasets/supreme-corpus/supreme-corpus.zip (1255.8MB)... Done\n" + ] + } + ], "source": [ "# OPTION 1: DOWNLOAD CORPUS\n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", - "# ROOT_DIR = download('supreme-corpus', data_dir=DATA_DIR)\n", + "# ROOT_DIR = convokit.download('supreme-corpus', data_dir=DATA_DIR)\n", "\n", "# OPTION 2: READ PREVIOUSLY-DOWNLOADED CORPUS FROM DISK\n", "# UNCOMMENT THIS LINE AND REPLACE WITH THE DIRECTORY WHERE THE CORPUS IS LOCATED\n", @@ -267,7 +244,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": { "scrolled": true }, @@ -369,18 +346,16 @@ "# compute coordination from each justice to everyone\n", "print(\"Justices, ranked by how much they coordinate to others:\")\n", "justices_to_everyone = coord.summarize(corpus, justices, everyone)\n", - "for justice, score in sorted(\n", - " justices_to_everyone.averages_by_speaker().items(), key=lambda x: x[1], reverse=True\n", - "):\n", + "for justice, score in sorted(justices_to_everyone.averages_by_speaker().items(),\n", + " key=lambda x: x[1], reverse=True):\n", " print(justice.id, round(score, 5))\n", "print()\n", "\n", "# compute coordination from everyone to each justice\n", "print(\"Justices, ranked by how much others coordinate to them:\")\n", "everyone_to_justices = coord.summarize(corpus, everyone, justices, focus=\"targets\")\n", - "for justice, score in sorted(\n", - " everyone_to_justices.averages_by_speaker().items(), key=lambda x: x[1], reverse=True\n", - "):\n", + "for justice, score in sorted(everyone_to_justices.averages_by_speaker().items(), \n", + " key=lambda x: x[1], reverse=True):\n", " print(justice.id, round(score, 5))\n", "print()" ] @@ -405,225 +380,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": { "collapsed": true }, "outputs": [], "source": [ - "case_ids = {\n", - " \"03-1164\",\n", - " \"04-1332\",\n", - " \"04-1140\",\n", - " \"04-805\",\n", - " \"04-1495\",\n", - " \"05-352\",\n", - " \"04-1360b\",\n", - " \"06-5306\",\n", - " \"03-1388\",\n", - " \"04-473b\",\n", - " \"03-8661\",\n", - " \"03-1160\",\n", - " \"03-633\",\n", - " \"05-1508\",\n", - " \"05-746\",\n", - " \"05-547\",\n", - " \"05-502\",\n", - " \"04-759\",\n", - " \"03-1116\",\n", - " \"05-1240\",\n", - " \"03-287\",\n", - " \"04-607\",\n", - " \"05-1126\",\n", - " \"04-1477\",\n", - " \"04-8990\",\n", - " \"06-480\",\n", - " \"04-1152\",\n", - " \"05-1429\",\n", - " \"03-1488\",\n", - " \"04-10566\",\n", - " \"04-905\",\n", - " \"05-493\",\n", - " \"05-1575\",\n", - " \"04-848\",\n", - " \"05-983\",\n", - " \"03-1395\",\n", - " \"06-5754\",\n", - " \"04-52\",\n", - " \"05-9264\",\n", - " \"03-725\",\n", - " \"05-184\",\n", - " \"04-1131\",\n", - " \"04-698\",\n", - " \"05-381\",\n", - " \"06-593\",\n", - " \"02-1472\",\n", - " \"04-712\",\n", - " \"04-1376\",\n", - " \"03-184\",\n", - " \"06-116\",\n", - " \"04-1618\",\n", - " \"03-1500\",\n", - " \"03-9627\",\n", - " \"05-669\",\n", - " \"05-85\",\n", - " \"05-7058\",\n", - " \"06-313\",\n", - " \"05-1631\",\n", - " \"05-6551\",\n", - " \"04-1244\",\n", - " \"05-705\",\n", - " \"06-84\",\n", - " \"03-1693\",\n", - " \"04-593\",\n", - " \"04-1034\",\n", - " \"04-944\",\n", - " \"04-1186\",\n", - " \"05-1342\",\n", - " \"04-277\",\n", - " \"04-37\",\n", - " \"04-70\",\n", - " \"06-219\",\n", - " \"04-1329\",\n", - " \"05-465\",\n", - " \"05-595\",\n", - " \"04-631\",\n", - " \"03-1230\",\n", - " \"06-278\",\n", - " \"04-473\",\n", - " \"05-130\",\n", - " \"03-814\",\n", - " \"04-1414\",\n", - " \"04-433\",\n", - " \"05-83\",\n", - " \"04-637\",\n", - " \"04-1327\",\n", - " \"03-9685\",\n", - " \"02-1672\",\n", - " \"03-1696\",\n", - " \"04-1170b\",\n", - " \"03-636\",\n", - " \"04-1371\",\n", - " \"05-1272\",\n", - " \"04-6964\",\n", - " \"05-380\",\n", - " \"05-996\",\n", - " \"03-1407\",\n", - " \"05-1256\",\n", - " \"05-998\",\n", - " \"03-932\",\n", - " \"06-5247\",\n", - " \"04-1067\",\n", - " \"05-1157\",\n", - " \"03-923\",\n", - " \"05-1541\",\n", - " \"05-9222\",\n", - " \"05-5992\",\n", - " \"03-9168\",\n", - " \"05-200\",\n", - " \"05-260\",\n", - " \"04-368\",\n", - " \"04-603\",\n", - " \"05-204\",\n", - " \"04-480\",\n", - " \"04-1528\",\n", - " \"04-721\",\n", - " \"03-10198\",\n", - " \"04-495\",\n", - " \"03-878\",\n", - " \"03-9877\",\n", - " \"04-1527\",\n", - " \"05-593\",\n", - " \"04-1506\",\n", - " \"05-128\",\n", - " \"06-5618\",\n", - " \"05-1074\",\n", - " \"03-9560\",\n", - " \"03-892\",\n", - " \"04-1084\",\n", - " \"04-980\",\n", - " \"05-7053\",\n", - " \"04-881\",\n", - " \"03-1237\",\n", - " \"04-1324\",\n", - " \"05-416\",\n", - " \"04-5928\",\n", - " \"05-1629\",\n", - " \"04-5293\",\n", - " \"03-9046\",\n", - " \"04-163\",\n", - " \"05-5705\",\n", - " \"03-1293\",\n", - " \"04-1581\",\n", - " \"04-597\",\n", - " \"04-169\",\n", - " \"03-1423\",\n", - " \"03-407\",\n", - " \"03-750\",\n", - " \"05-1056\",\n", - " \"03-388\",\n", - " \"05-5224\",\n", - " \"03-931\",\n", - " \"03-1238\",\n", - " \"04-1203\",\n", - " \"03-1454\",\n", - " \"05-259\",\n", - " \"05-11284\",\n", - " \"05-8820\",\n", - " \"05-608\",\n", - " \"04-1739\",\n", - " \"06-102\",\n", - " \"04-5462\",\n", - " \"03-855\",\n", - " \"03-1039\",\n", - " \"04-514\",\n", - " \"04-563\",\n", - " \"05-11304\",\n", - " \"05-8794\",\n", - " \"04-623\",\n", - " \"04-885\",\n", - " \"04-1170\",\n", - " \"05-1589\",\n", - " \"04-9728\",\n", - " \"06-157\",\n", - " \"04-5286\",\n", - " \"04-1264\",\n", - " \"05-908\",\n", - " \"04-1704\",\n", - " \"05-848\",\n", - " \"04-1350\",\n", - " \"05-1120\",\n", - " \"03-409\",\n", - " \"06-484\",\n", - " \"04-1144\",\n", - " \"05-785\",\n", - " \"03-1601\",\n", - " \"04-6432\",\n", - " \"04-373\",\n", - " \"04-1544\",\n", - " \"04-278\",\n", - " \"05-409\",\n", - " \"05-5966\",\n", - " \"04-928\",\n", - " \"05-1382\",\n", - " \"05-915\",\n", - " \"05-1345\",\n", - " \"128orig\",\n", - " \"04-340\",\n", - " \"03-1566\",\n", - " \"05-18\",\n", - " \"105original\",\n", - " \"03-9659\",\n", - " \"04-1360\",\n", - " \"03-710\",\n", - "}\n", + "case_ids = {'03-1164', '04-1332', '04-1140', '04-805', '04-1495', '05-352', '04-1360b', '06-5306', '03-1388', '04-473b', '03-8661', '03-1160', '03-633', '05-1508', '05-746', '05-547', '05-502', '04-759', '03-1116', '05-1240', '03-287', '04-607', '05-1126', '04-1477', '04-8990', '06-480', '04-1152', '05-1429', '03-1488', '04-10566', '04-905', '05-493', '05-1575', '04-848', '05-983', '03-1395', '06-5754', '04-52', '05-9264', '03-725', '05-184', '04-1131', '04-698', '05-381', '06-593', '02-1472', '04-712', '04-1376', '03-184', '06-116', '04-1618', '03-1500', '03-9627', '05-669', '05-85', '05-7058', '06-313', '05-1631', '05-6551', '04-1244', '05-705', '06-84', '03-1693', '04-593', '04-1034', '04-944', '04-1186', '05-1342', '04-277', '04-37', '04-70', '06-219', '04-1329', '05-465', '05-595', '04-631', '03-1230', '06-278', '04-473', '05-130', '03-814', '04-1414', '04-433', '05-83', '04-637', '04-1327', '03-9685', '02-1672', '03-1696', '04-1170b', '03-636', '04-1371', '05-1272', '04-6964', '05-380', '05-996', '03-1407', '05-1256', '05-998', '03-932', '06-5247', '04-1067', '05-1157', '03-923', '05-1541', '05-9222', '05-5992', '03-9168', '05-200', '05-260', '04-368', '04-603', '05-204', '04-480', '04-1528', '04-721', '03-10198', '04-495', '03-878', '03-9877', '04-1527', '05-593', '04-1506', '05-128', '06-5618', '05-1074', '03-9560', '03-892', '04-1084', '04-980', '05-7053', '04-881', '03-1237', '04-1324', '05-416', '04-5928', '05-1629', '04-5293', '03-9046', '04-163', '05-5705', '03-1293', '04-1581', '04-597', '04-169', '03-1423', '03-407', '03-750', '05-1056', '03-388', '05-5224', '03-931', '03-1238', '04-1203', '03-1454', '05-259', '05-11284', '05-8820', '05-608', '04-1739', '06-102', '04-5462', '03-855', '03-1039', '04-514', '04-563', '05-11304', '05-8794', '04-623', '04-885', '04-1170', '05-1589', '04-9728', '06-157', '04-5286', '04-1264', '05-908', '04-1704', '05-848', '04-1350', '05-1120', '03-409', '06-484', '04-1144', '05-785', '03-1601', '04-6432', '04-373', '04-1544', '04-278', '05-409', '05-5966', '04-928', '05-1382', '05-915', '05-1345', '128orig', '04-340', '03-1566', '05-18', '105original', '03-9659', '04-1360', '03-710'}\n", "\n", - "corpus = corpus.filter_utterances(corpus, lambda u: u.meta[\"case_id\"][5:] in case_ids)" + "corpus = Corpus.filter_utterances(corpus, lambda u: u.meta[\"case_id\"][5:] in case_ids)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, "outputs": [ { @@ -669,109 +439,64 @@ "justices = lambda speaker: speaker.meta[\"type\"] == \"J\"\n", "lawyers = lambda speaker: speaker.meta[\"type\"] == \"A\"\n", "\n", - "\n", "# filter out justice utterances, leaving only justices who voted on the same side as the advocates speaking to them\n", "def target_justice_is_favorable(speaker_utt, target_utt):\n", " convo = speaker_utt.get_conversation()\n", - " if convo.meta[\"votes_side\"] is None:\n", - " return False\n", - " if speaker_utt.speaker.id not in convo.meta[\"advocates\"]:\n", - " return False\n", + " if convo.meta[\"votes_side\"] is None: return False\n", + " if speaker_utt.speaker.id not in convo.meta[\"advocates\"]: return False\n", " lawyer_side = convo.meta[\"advocates\"][speaker_utt.speaker.id][\"side\"]\n", - " if target_utt.speaker.id not in convo.meta[\"votes_side\"]:\n", - " return False\n", + " if target_utt.speaker.id not in convo.meta[\"votes_side\"]: return False\n", " justice_side = convo.meta[\"votes_side\"][target_utt.speaker.id]\n", " return justice_side == lawyer_side\n", "\n", - "\n", "def target_justice_is_unfavorable(speaker_utt, target_utt):\n", " convo = speaker_utt.get_conversation()\n", - " if convo.meta[\"votes_side\"] is None:\n", - " return False\n", - " if speaker_utt.speaker.id not in convo.meta[\"advocates\"]:\n", - " return False\n", + " if convo.meta[\"votes_side\"] is None: return False\n", + " if speaker_utt.speaker.id not in convo.meta[\"advocates\"]: return False\n", " lawyer_side = convo.meta[\"advocates\"][speaker_utt.speaker.id][\"side\"]\n", - " if target_utt.speaker.id not in convo.meta[\"votes_side\"]:\n", - " return False\n", + " if target_utt.speaker.id not in convo.meta[\"votes_side\"]: return False\n", " justice_side = convo.meta[\"votes_side\"][target_utt.speaker.id]\n", " return justice_side != lawyer_side\n", "\n", - "\n", "# filter out justice utterances, leaving only justices who voted on the same side as the advocates they spoke to\n", "def speaker_justice_is_favorable(speaker_utt, target_utt):\n", " convo = speaker_utt.get_conversation()\n", - " if convo.meta[\"votes_side\"] is None:\n", - " return False\n", - " if target_utt.speaker.id not in convo.meta[\"advocates\"]:\n", - " return False\n", + " if convo.meta[\"votes_side\"] is None: return False\n", + " if target_utt.speaker.id not in convo.meta[\"advocates\"]: return False\n", " lawyer_side = convo.meta[\"advocates\"][target_utt.speaker.id][\"side\"]\n", - " if speaker_utt.speaker.id not in convo.meta[\"votes_side\"]:\n", - " return False\n", + " if speaker_utt.speaker.id not in convo.meta[\"votes_side\"]: return False\n", " justice_side = convo.meta[\"votes_side\"][speaker_utt.speaker.id]\n", " return justice_side == lawyer_side\n", "\n", - "\n", "def speaker_justice_is_unfavorable(speaker_utt, target_utt):\n", " convo = speaker_utt.get_conversation()\n", - " if convo.meta[\"votes_side\"] is None:\n", - " return False\n", - " if target_utt.speaker.id not in convo.meta[\"advocates\"]:\n", - " return False\n", + " if convo.meta[\"votes_side\"] is None: return False\n", + " if target_utt.speaker.id not in convo.meta[\"advocates\"]: return False\n", " lawyer_side = convo.meta[\"advocates\"][target_utt.speaker.id][\"side\"]\n", - " if speaker_utt.speaker.id not in convo.meta[\"votes_side\"]:\n", - " return False\n", + " if speaker_utt.speaker.id not in convo.meta[\"votes_side\"]: return False\n", " justice_side = convo.meta[\"votes_side\"][speaker_utt.speaker.id]\n", " return justice_side != lawyer_side\n", "\n", - "\n", "# do lawyers coordinate more to unfavorable or favorable justices?\n", "make_chart(\n", - " coord.summarize(\n", - " corpus,\n", - " lawyers,\n", - " justices,\n", - " summary_report=True,\n", - " target_thresh=3,\n", - " speaker_thresh=3,\n", - " split_by_attribs=split,\n", - " target_utterance_selector=target_justice_is_unfavorable,\n", - " ),\n", - " coord.summarize(\n", - " corpus,\n", - " lawyers,\n", - " justices,\n", - " summary_report=True,\n", - " target_thresh=3,\n", - " speaker_thresh=3,\n", - " split_by_attribs=split,\n", - " target_utterance_selector=target_justice_is_favorable,\n", - " ),\n", - " \"Target-unfavorable justice\",\n", - " \"Target-favorable justice\",\n", + " coord.summarize(corpus, lawyers, justices, summary_report=True,\n", + " target_thresh=3, speaker_thresh=3, split_by_attribs=split,\n", + " target_utterance_selector=target_justice_is_unfavorable),\n", + " coord.summarize(corpus, lawyers, justices, summary_report=True,\n", + " target_thresh=3, speaker_thresh=3, split_by_attribs=split,\n", + " target_utterance_selector=target_justice_is_favorable),\n", + " \"Target-unfavorable justice\", \"Target-favorable justice\"\n", ")\n", "# do unfavorable justices coordinate to lawyers more than favorable justices, or\n", "# vice versa?\n", "make_chart(\n", - " coord.summarize(\n", - " corpus,\n", - " justices,\n", - " lawyers,\n", - " summary_report=True,\n", - " target_thresh=6,\n", - " split_by_attribs=split,\n", - " speaker_utterance_selector=speaker_justice_is_unfavorable,\n", - " ),\n", - " coord.summarize(\n", - " corpus,\n", - " justices,\n", - " lawyers,\n", - " summary_report=True,\n", - " target_thresh=6,\n", - " split_by_attribs=split,\n", - " speaker_utterance_selector=speaker_justice_is_favorable,\n", - " ),\n", - " \"Speaker-unfavorable justice\",\n", - " \"Speaker-favorable justice\",\n", + " coord.summarize(corpus, justices, lawyers, summary_report=True,\n", + " target_thresh=6, split_by_attribs=split,\n", + " speaker_utterance_selector=speaker_justice_is_unfavorable),\n", + " coord.summarize(corpus, justices, lawyers, summary_report=True,\n", + " target_thresh=6, split_by_attribs=split,\n", + " speaker_utterance_selector=speaker_justice_is_favorable),\n", + " \"Speaker-unfavorable justice\", \"Speaker-favorable justice\"\n", ")" ] }, @@ -801,7 +526,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.9.7" } }, "nbformat": 4, diff --git a/examples/corpus_from_pandas.ipynb b/examples/corpus_from_pandas.ipynb index 15610b42..37d3e8a6 100644 --- a/examples/corpus_from_pandas.ipynb +++ b/examples/corpus_from_pandas.ipynb @@ -48,7 +48,7 @@ ], "source": [ "# using an existing Corpus of the subreddit named 'hey'\n", - "corpus = Corpus(download(\"subreddit-hey\"))" + "corpus = Corpus(download('subreddit-hey')) " ] }, { @@ -78,9 +78,9 @@ "outputs": [], "source": [ "# you can ignore this\n", - "utt_df = corpus.get_utterances_dataframe().drop(columns=[\"vectors\"])\n", - "convo_df = corpus.get_conversations_dataframe().drop(columns=[\"vectors\"])\n", - "speaker_df = corpus.get_speakers_dataframe().drop(columns=[\"vectors\"])" + "utt_df = corpus.get_utterances_dataframe().drop(columns=['vectors'])\n", + "convo_df = corpus.get_conversations_dataframe().drop(columns=['vectors'])\n", + "speaker_df = corpus.get_speakers_dataframe().drop(columns=['vectors'])" ] }, { @@ -992,9 +992,7 @@ } ], "source": [ - "new_corpus = Corpus.from_pandas(\n", - " utterances_df=utt_df, speakers_df=speaker_df, conversations_df=convo_df\n", - ")" + "new_corpus = Corpus.from_pandas(utterances_df=utt_df, speakers_df=speaker_df, conversations_df=convo_df)" ] }, { @@ -1032,10 +1030,10 @@ "outputs": [], "source": [ "# constructing simple utterance dataframe, you can ignore this\n", - "simple_utt_df = utt_df[[\"timestamp\", \"text\", \"speaker\", \"reply_to\", \"conversation_id\"]]\n", + "simple_utt_df = utt_df[['timestamp', 'text', 'speaker', 'reply_to', 'conversation_id']]\n", "ids = list(simple_utt_df.index)\n", "simple_utt_df = simple_utt_df.reset_index()\n", - "simple_utt_df[\"id\"] = ids" + "simple_utt_df['id'] = ids" ] }, { @@ -1536,7 +1534,7 @@ ], "source": [ "# before\n", - "corpus.get_conversations_dataframe().drop(columns=[\"vectors\"]).head()" + "corpus.get_conversations_dataframe().drop(columns=['vectors']).head()" ] }, { @@ -1603,7 +1601,7 @@ ], "source": [ "# after\n", - "new_corpus.get_conversations_dataframe().drop(columns=[\"vectors\"]).head()" + "new_corpus.get_conversations_dataframe().drop(columns=['vectors']).head()" ] }, { @@ -1642,4 +1640,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/examples/dataset-examples/wikiconv/Create_Conversations_Script.ipynb b/examples/dataset-examples/wikiconv/Create_Conversations_Script.ipynb index 164a93b3..3264372b 100644 --- a/examples/dataset-examples/wikiconv/Create_Conversations_Script.ipynb +++ b/examples/dataset-examples/wikiconv/Create_Conversations_Script.ipynb @@ -15,7 +15,7 @@ "metadata": {}, "outputs": [], "source": [ - "# import relevant modules\n", + "#import relevant modules\n", "from datetime import datetime, timedelta\n", "from convokit import Corpus, download\n", "import re\n", @@ -31,13 +31,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "Dataset already exists at /Users/seanzhangkx/.convokit/downloads/wikiconv-2003\n" + "Downloading wikiconv-2003 to /kitchen/convokit_corpora_jpc/wikiconv-2003\n", + "Downloading wikiconv-2003 from http://zissou.infosci.cornell.edu/convokit/datasets/wikiconv-corpus/corpus-zipped/2003/full.corpus.zip (38.7MB)... Done\n" ] } ], "source": [ "# Load the 2003 wikiconv corpus (feel free to change this to a year of your preference)\n", - "wikiconv_corpus = Corpus(filename=download(\"wikiconv-2003\"))" + "wikiconv_corpus = Corpus(filename=download('wikiconv-2003'))" ] }, { @@ -102,21 +103,18 @@ "metadata": {}, "outputs": [], "source": [ - "# Randomly chooses the set number of conversations to print from the entire conversaton set\n", - "def print_random_conversations(\n", - " conversation_list, number_of_conversations, conversation_min_length, conversation_corpus\n", - "):\n", + "#Randomly chooses the set number of conversations to print from the entire conversaton set\n", + "def print_random_conversations(conversation_list, number_of_conversations, conversation_min_length, conversation_corpus): \n", " randomly_generated_conversation_list = []\n", - " while len(randomly_generated_conversation_list) != number_of_conversations:\n", - " new_conversation = random.randint(0, (len(conversation_list) - 1))\n", + " while (len(randomly_generated_conversation_list) != number_of_conversations):\n", + " new_conversation = random.randint(0, (len(conversation_list)-1))\n", " new_conversation_id = conversation_list[new_conversation]\n", " conversation_ids_list = new_conversation_id.get_utterance_ids()\n", - " if new_conversation not in randomly_generated_conversation_list and (\n", - " len(conversation_ids_list) >= conversation_min_length\n", - " ):\n", + " if (new_conversation not in randomly_generated_conversation_list \n", + " and (len(conversation_ids_list) >= conversation_min_length)):\n", " randomly_generated_conversation_list.append(new_conversation_id)\n", - "\n", - " return randomly_generated_conversation_list" + " \n", + " return randomly_generated_conversation_list\n" ] }, { @@ -135,7 +133,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "[Conversation({'obj_type': 'conversation', 'vectors': [], 'tree': None, 'owner': , 'id': '1172638.641.641', 'meta': ConvoKitMeta({'page_id': '253576', 'page_title': 'Roman Catholic Archdiocese of Quebec', 'page_type': 'talk'})}), Conversation({'obj_type': 'conversation', 'vectors': [], 'tree': None, 'owner': , 'id': '691102.8439.8439', 'meta': ConvoKitMeta({'page_id': '178863', 'page_title': 'Jimfbleak', 'page_type': 'user_talk'})}), Conversation({'obj_type': 'conversation', 'vectors': [], 'tree': None, 'owner': , 'id': '490071.18706.18706', 'meta': ConvoKitMeta({'page_id': '14218', 'page_title': 'Homophobia/Archive 6', 'page_type': 'talk'})})]\n" + "[Conversation({'obj_type': 'conversation', 'meta': {'page_id': '383784', 'page_title': 'Matty j', 'page_type': 'user_talk'}, 'vectors': [], 'tree': None, 'owner': , 'id': '2081953.389.389'}), Conversation({'obj_type': 'conversation', 'meta': {'page_id': '187772', 'page_title': 'Large numbers', 'page_type': 'talk'}, 'vectors': [], 'tree': None, 'owner': , 'id': '1053969.145.145'}), Conversation({'obj_type': 'conversation', 'meta': {'page_id': '269015', 'page_title': 'Creationism/Archive 6', 'page_type': 'talk'}, 'vectors': [], 'tree': None, 'owner': , 'id': '1324714.25382.25382'})]\n" ] } ], @@ -144,10 +142,9 @@ "number_of_conversations_to_print = 3\n", "conversation_min_length = 2\n", "\n", - "random_conversations = print_random_conversations(\n", - " conversation_list, number_of_conversations_to_print, conversation_min_length, wikiconv_corpus\n", - ")\n", - "print(random_conversations)" + "random_conversations = print_random_conversations(conversation_list, number_of_conversations_to_print,\n", + " conversation_min_length, wikiconv_corpus)\n", + "print (random_conversations)" ] }, { @@ -167,22 +164,21 @@ "name": "stdout", "output_type": "stream", "text": [ - "https://en.wikipedia.org/w/index.php?title=talk:Roman_Catholic_Archdiocese_of_Quebec\n", - "https://en.wikipedia.org/w/index.php?title=user_talk:Jimfbleak\n", - "https://en.wikipedia.org/w/index.php?title=talk:Homophobia/Archive_6\n" + "https://en.wikipedia.org/w/index.php?title=user_talk:Matty_j\n", + "https://en.wikipedia.org/w/index.php?title=talk:Large_numbers\n", + "https://en.wikipedia.org/w/index.php?title=talk:Creationism/Archive_6\n" ] } ], "source": [ "def wikipedia_link_info(conversation):\n", - " page_title = conversation.meta[\"page_title\"]\n", - " page_title = re.sub(\"\\s+\", \"_\", page_title)\n", - " page_type = conversation.meta[\"page_type\"]\n", - " link_value = \"https://en.wikipedia.org/w/index.php?title=\" + page_type + \":\" + page_title\n", - "\n", + " page_title = conversation.meta['page_title']\n", + " page_title = re.sub('\\s+', '_', page_title)\n", + " page_type = conversation.meta['page_type']\n", + " link_value = \"https://en.wikipedia.org/w/index.php?title=\"+page_type+\":\"+page_title\n", + " \n", " return link_value\n", "\n", - "\n", "for conversation in random_conversations:\n", " print(wikipedia_link_info(conversation))\n", " conversation_ids_list = conversation.get_utterance_ids()" @@ -203,7 +199,7 @@ "metadata": {}, "outputs": [], "source": [ - "# For any comments that do not have matching reply to ids, sort these comments in order of recency\n", + "#For any comments that do not have matching reply to ids, sort these comments in order of recency \n", "def sort_by_timestamp(conversation_ids_list, conversation_corpus):\n", " list_of_utterances = []\n", " for id_val in conversation_ids_list:\n", @@ -212,10 +208,10 @@ " tuple_val = (id_val, timestamp_val)\n", " list_of_utterances.append(tuple_val)\n", "\n", - " sorted_utterance_list = sorted(list_of_utterances, key=lambda x: x[1])\n", + " sorted_utterance_list = sorted(list_of_utterances, key = lambda x:x[1])\n", " sorted_utterance_list.reverse()\n", " id_list = [i[0] for i in sorted_utterance_list]\n", - " return id_list" + " return (id_list)" ] }, { @@ -224,25 +220,23 @@ "metadata": {}, "outputs": [], "source": [ - "# Find cases in which an utterance's reply to is to a comment in the chain that has been modified, deleted or restored\n", - "def check_lists_for_match(\n", - " x, conversation_ids_list, utterance, next_utterance_value, conversation_corpus\n", - "):\n", - " modification_list = utterance.meta[\"modification\"]\n", - " deletion_list = utterance.meta[\"deletion\"]\n", - " restoration_list = utterance.meta[\"restoration\"]\n", - " if len(modification_list) > 0:\n", + "#Find cases in which an utterance's reply to is to a comment in the chain that has been modified, deleted or restored\n", + "def check_lists_for_match(x, conversation_ids_list, utterance, next_utterance_value, conversation_corpus):\n", + " modification_list = utterance.meta['modification']\n", + " deletion_list = utterance.meta['deletion']\n", + " restoration_list = utterance.meta['restoration']\n", + " if (len(modification_list)>0):\n", " for utterance_val in modification_list:\n", - " if utterance_val[\"id\"] == next_utterance_value.reply_to:\n", - " conversation_ids_list.insert(x + 1, next_utterance_value.id)\n", - " if len(deletion_list) > 0:\n", + " if (utterance_val['id'] == next_utterance_value.reply_to):\n", + " conversation_ids_list.insert(x+1, next_utterance_value.id)\n", + " if (len(deletion_list)>0):\n", " for utterance_val in deletion_list:\n", - " if utterance_val[\"id\"] == next_utterance_value.reply_to:\n", - " conversation_ids_list.insert(x + 1, next_utterance_value.id)\n", - " if len(restoration_list) > 0:\n", + " if (utterance_val['id'] == next_utterance_value.reply_to):\n", + " conversation_ids_list.insert(x+1, next_utterance_value.id)\n", + " if (len(restoration_list)>0):\n", " for utterance_val in restoration_list:\n", - " if utterance_val[\"id\"] == next_utterance_value.reply_to:\n", - " conversation_ids_list.insert(x + 1, next_utterance_value.id)" + " if (utterance_val['id'] == next_utterance_value.reply_to):\n", + " conversation_ids_list.insert(x+1, next_utterance_value.id)" ] }, { @@ -255,21 +249,15 @@ "def add_utterance(conversation_ids_list, next_utterance_value, conversation_corpus):\n", " if next_utterance_value.id in conversation_ids_list:\n", " return conversation_ids_list\n", - " elif next_utterance_value.reply_to is None:\n", + " elif (next_utterance_value.reply_to is None):\n", " conversation_ids_list.append(next_utterance_value.id)\n", " else:\n", - " for x in range(0, len(conversation_ids_list)):\n", + " for x in range(0,len(conversation_ids_list)):\n", " utterance_id = conversation_ids_list[x]\n", - " if utterance_id == next_utterance_value.reply_to:\n", - " conversation_ids_list.insert(x + 1, next_utterance_value.id)\n", + " if (utterance_id == next_utterance_value.reply_to):\n", + " conversation_ids_list.insert(x+1, next_utterance_value.id)\n", " else:\n", - " check_lists_for_match(\n", - " x,\n", - " conversation_ids_list,\n", - " conversation_corpus.get_utterance(utterance_id),\n", - " next_utterance_value,\n", - " conversation_corpus,\n", - " )\n", + " check_lists_for_match(x, conversation_ids_list, conversation_corpus.get_utterance(utterance_id), next_utterance_value, conversation_corpus)\n", "\n", " return conversation_ids_list" ] @@ -280,35 +268,29 @@ "metadata": {}, "outputs": [], "source": [ - "# The order of the returned conversation ids is not guaranteed; compute the correct ordering\n", + "#The order of the returned conversation ids is not guaranteed; compute the correct ordering \n", "def find_correct_order(conversation_ids_list, conversation_corpus):\n", " correct_list_order = []\n", - " # if the conversation has only one comment, return the conversation list\n", - " if len(conversation_ids_list) == 1:\n", + " #if the conversation has only one comment, return the conversation list\n", + " if (len(conversation_ids_list) == 1 ):\n", " return conversation_ids_list\n", "\n", - " # When the conversation has more than one comment, find the correct order of the comments\n", - " if len(conversation_ids_list) > 1:\n", - " # Implement a fail safe to efficiently sort\n", + " #When the conversation has more than one comment, find the correct order of the comments\n", + " if (len(conversation_ids_list) >1):\n", + " #Implement a fail safe to efficiently sort \n", " number_of_iterations = 0\n", - " while number_of_iterations < 20 and len(correct_list_order) != len(conversation_ids_list):\n", + " while (number_of_iterations <20 and len(correct_list_order) != len(conversation_ids_list)):\n", " for utterance_id in conversation_ids_list:\n", - " correct_list_order = add_utterance(\n", - " correct_list_order,\n", - " conversation_corpus.get_utterance(utterance_id),\n", - " conversation_corpus,\n", - " )\n", - " number_of_iterations += 1\n", + " correct_list_order = add_utterance(correct_list_order, conversation_corpus.get_utterance(utterance_id), conversation_corpus)\n", + " number_of_iterations+=1\n", "\n", - " # In some of the conversations, new utterances will be added that don't reply directly to the current conversation\n", - " # Instead, these new utterances are part of the topic at hand (under the same conversation root) and are sorted by recency\n", - " if len(correct_list_order) != len(conversation_ids_list):\n", + " #In some of the conversations, new utterances will be added that don't reply directly to the current conversation\n", + " #Instead, these new utterances are part of the topic at hand (under the same conversation root) and are sorted by recency\n", + " if (len(correct_list_order) != len(conversation_ids_list)):\n", " difference_in_sets = set(conversation_ids_list).difference(correct_list_order)\n", - " timestamp_sorted_difference = sort_by_timestamp(\n", - " list(difference_in_sets), conversation_corpus\n", - " )\n", + " timestamp_sorted_difference = sort_by_timestamp(list(difference_in_sets), conversation_corpus)\n", " correct_list_order.extend(timestamp_sorted_difference)\n", - " return correct_list_order" + " return correct_list_order\n" ] }, { @@ -327,16 +309,16 @@ "name": "stdout", "output_type": "stream", "text": [ - "Original Order of IDs:['1172638.641.641', '1173000.1045.1045']\n", - "Correct Order of IDs:['1172638.641.641', '1173000.1045.1045']\n", + "Original Order of IDs:['2081953.413.389', '2081953.389.389']\n", + "Correct Order of IDs:['2081953.389.389', '2081953.413.389']\n", "\n", "\n", - "Original Order of IDs:['811685.0.8439', '811685.0.8612']\n", - "Correct Order of IDs:['811685.0.8439', '811685.0.8612']\n", + "Original Order of IDs:['1053969.145.145', '1054046.1046.1046', '1054707.1680.1680', '3744612.1995.1995']\n", + "Correct Order of IDs:['1053969.145.145', '1054707.1680.1680', '3744612.1995.1995', '1054046.1046.1046']\n", "\n", "\n", - "Original Order of IDs:['709021.8491.9368', '709021.8654.11099']\n", - "Correct Order of IDs:['709021.8491.9368', '709021.8654.11099']\n", + "Original Order of IDs:['1344757.132.30543', '1344757.132.30573', '1344757.132.30760', '1344757.132.31076', '1344757.132.30860', '1344757.132.31731', '1344757.132.32775', '1344757.132.32926', '1329814.35766.35766']\n", + "Correct Order of IDs:['1344757.132.30543', '1344757.132.30573', '1344757.132.30760', '1344757.132.31076', '1344757.132.30860', '1344757.132.31731', '1344757.132.32775', '1344757.132.32926', '1329814.35766.35766']\n", "\n", "\n" ] @@ -345,9 +327,9 @@ "source": [ "for conversation in random_conversations:\n", " conversation_ids_list = conversation.get_utterance_ids()\n", - " print(\"Original Order of IDs:\" + str(conversation_ids_list))\n", - " print(\"Correct Order of IDs:\" + str(find_correct_order(conversation_ids_list, wikiconv_corpus)))\n", - " print(\"\\n\")" + " print ('Original Order of IDs:' + str(conversation_ids_list))\n", + " print('Correct Order of IDs:' + str(find_correct_order(conversation_ids_list, wikiconv_corpus)))\n", + " print ('\\n')" ] }, { @@ -363,26 +345,22 @@ "metadata": {}, "outputs": [], "source": [ - "# Print the conversation text from the list of conversation ids\n", + "#Print the conversation text from the list of conversation ids\n", "def print_final_conversation(random_conversations, conversation_corpus):\n", " for conversation in random_conversations:\n", " print(wikipedia_link_info(conversation))\n", " conversation_ids_list = conversation.get_utterance_ids()\n", - " # First correctly reorder the comments\n", + " #First correctly reorder the comments\n", " ordered_list = find_correct_order(conversation_ids_list, conversation_corpus)\n", - " # For each utterance, print the text present if the utterance has not been deleted\n", + " #For each utterance, print the text present if the utterance has not been deleted\n", " for utterance_id in ordered_list:\n", " utterance_value = conversation_corpus.get_utterance(utterance_id)\n", - " if utterance_value.text != \" \":\n", - " print(utterance_value.text)\n", - " date_time_val = datetime.fromtimestamp(utterance_value.timestamp).strftime(\n", - " \"%H:%M %d-%m-%Y\"\n", - " )\n", - " formatted_user_name = (\n", - " \"--\" + str(utterance_value.speaker.id) + \" \" + str(date_time_val)\n", - " )\n", - " print(formatted_user_name)\n", - " print(\"\\n\\n\")" + " if (utterance_value.text != \" \"):\n", + " print (utterance_value.text)\n", + " date_time_val = datetime.fromtimestamp(utterance_value.timestamp).strftime('%H:%M %d-%m-%Y')\n", + " formatted_user_name = \"--\" + str(utterance_value.user.name) + \" \" + str(date_time_val)\n", + " print (formatted_user_name)\n", + " print ('\\n\\n')" ] }, { @@ -394,32 +372,46 @@ "name": "stdout", "output_type": "stream", "text": [ - "https://en.wikipedia.org/w/index.php?title=talk:Roman_Catholic_Archdiocese_of_Quebec\n", - "Two problems with the title:\n", - "1) it should not be capitalized;\n", - "2) all the archbishops are also bishops (for example, the list at Diocese de Montreal lists a particular one as \"third bishop and first archbishop\").\n", - "I had moved this to List of Roman Catholic bishops of Quebec, and likewise for the Montreal list, but efghij moved them back. May I ask why? - \n", - "--Montrealais 11:41 20-07-2003\n", - "1) It should be capitalized. \"Bishop of Quebec\" is a title, just like \"Prime Minister of Canada\" or \"King of Spain\".\n", - "2) It's technically correct that all archbishops are also bishops, however it is somewhat counter-intuatve to list them all under \"Bishops of Quebec\".\n", - "- 19:00 20 Jul 2003 (UTC)\n", - "--Efghij 15:00 20-07-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:Matty_j\n", + " Baseball/temp \n", + "--Ktsquare 20:56 23-12-2003\n", + "Hi Matty, I noticed you have contributed to many bios of MLB players, I'd like to ask for your opinion whether it's time to move the rewritten Baseball/temp to Baseball. Please comment at Talk:Baseball/temp. Thanks. \n", + "--Ktsquare 20:56 23-12-2003\n", "\n", "\n", "\n", - "https://en.wikipedia.org/w/index.php?title=user_talk:Jimfbleak\n", + "https://en.wikipedia.org/w/index.php?title=talk:Large_numbers\n", + "Can I suggest that we include only pure numbers in this article, not distances and other measurements? Would anyone object if I deleted the astronomical distances, since they are only large numbers when expressed in small units? I suppose I should go further and say that Avogradro's number is also just an arbitrary unit, but I shan't, because I feel I'm on a slippery slope towards excluding everything! \n", + "--Heron 05:16 18-06-2003\n", + "Let me put this another way. I think the present article should be, as it mostly is, about the mathematics of large numbers. Other large quantities, such as astronomical distances, already have a place on the orders of magnitude pages (1e10 m etc.) Perhaps we should just link to them. \n", + "--Heron 05:56 18-06-2003\n", + "Yes, you're quite right. Well, about most things. I could argue that physically distance is dimensionless but that would just be arrogant pedantry. The page title is \"large number\" not just \"large\", and the order of magnitude pages are pretty good for comparing distances. BTW did you see my reply for you on Wikipedia:Reference desk? 13:53 18 Jun 2003 (UTC)\n", + "--Tim Starling 09:53 18-06-2003\n", + "I agree with you about 1010. I wouldn't call the number of bits on a hard disk particularly large, either. It is certainly subjective. My point was that measurements of distance etc. are different from pure numbers. Measurements are, by definition, relative, whereas at least pure numbers are absolute. Largeness is another thing. Perhaps one definition would be \"a number considered as large at a particular time by a particular culture\". For example, I seem to remember that the Old Testament uses the number 40 as a generic large number in several places (e.g. \"40 days and 40 nights\"). \n", + "--Heron 05:45 18-06-2003\n", "\n", "\n", "\n", - "https://en.wikipedia.org/w/index.php?title=talk:Homophobia/Archive_6\n", + "https://en.wikipedia.org/w/index.php?title=talk:Creationism/Archive_6\n", + "Luckily my university has an institutional subscription to OED online. Here it is:\n", + "'''Creationism''' A system or theory of creation: ''spec.'' ''a.'' The theory that God immediately creates a soul for every human being born (opposed to traducianism); ''b.'' The theory which attributes the origin of matter, the different species of animals and plants, etc., to `special creation' (opposed to evolutionism).\n", + " \n", + "--Netesq 12:29 22-08-2003\n", "\n", "\n", "\n" ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/sauna/conda-envs/zissou-env/lib/python3.7/site-packages/ipykernel_launcher.py:14: FutureWarning: speaker.name is deprecated and will be removed in a future release. Use speaker.id instead.\n" + ] } ], "source": [ - "print_final_conversation(random_conversations, wikiconv_corpus)" + "print_final_conversation(random_conversations, wikiconv_corpus)" ] }, { @@ -435,15 +427,10 @@ "metadata": {}, "outputs": [], "source": [ - "def change_defaults_print_final(\n", - " conversation_list, number_of_conversations, conversation_min_length, conversation_corpus\n", - "):\n", - " random_conversations = print_random_conversations(\n", - " conversation_list,\n", - " number_of_conversations_to_print,\n", - " conversation_min_length,\n", - " wikiconv_corpus,\n", - " )\n", + "def change_defaults_print_final(conversation_list, number_of_conversations, conversation_min_length, \n", + " conversation_corpus):\n", + " random_conversations = print_random_conversations(conversation_list, number_of_conversations_to_print,\n", + " conversation_min_length, wikiconv_corpus)\n", " print_final_conversation(random_conversations, conversation_corpus)" ] }, @@ -456,32 +443,41 @@ "name": "stdout", "output_type": "stream", "text": [ - "https://en.wikipedia.org/w/index.php?title=user_talk:Daniel_C._Boyer/archive_1\n", - "Have you written this article? or do you have any idea about this article? (If you don't understand Korean, the title means \"unmarried girl backdoor\" or something.) Is it one of your work? \n", - "--217.0.84.251 15:36 07-04-2003\n", - "Yes; it should be The Tailgating Spinster (title of my book of poetry). I apologise if my Korean is not good enough; perhaps you could provide a better translation of the title. \n", - "--Daniel C. Boyer 10:47 09-04-2003\n", - "OK. I'll try to find a better translation. But due to my poor english, I can't understand the title. Does Tailgating mean ''chasing closely''? And does Spinster mean ''unmarried old woman''? \n", - "--Xaos~enwiki 23:02 09-04-2003\n", - "Yes; \"tailgating\" means (when one is driving) to follow too closely behind the car (or truck) in front of you. A spinster is usually used to mean an ''unmaried old woman'' but it can mean an unmarried woman of any age (probably she would have to be old enough to be able to get married to qualify as a spinster). 19:43 Apr 10, 2003 (UTC)\n", - "--Daniel C. Boyer 15:43 10-04-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:Main_Page\n", + "age pump|Village pump]]. See talk:Wikipedia category schemes for general discussion of the category scheme on Wikipedia's Main Page.'''\n", + "'''See Wikipedia talk:Selected Articles on the Main Page for discussion of (and recommendations for) the Selected Articles on the Main Page. See below for more discussion of particular issues regarding the Main Page (e.g., whether to include a particular category on the page). Please add your additions at the bottom.'''\n", + "\n", + "Some older talk has been archived to\n", + "talk:Main Page/Archive 1\n", + "talk:Main Page/Archive 2\n", + "talk:Main Page/Archive 3\n", + "talk:Main Page/Archive 4\n", + "talk:Main Page/Archive 5\n", + "talk:Main Page/Archive 6\n", + "--Schneelocke 08:26 05-09-2003\n", "\n", "\n", "\n" ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/sauna/conda-envs/zissou-env/lib/python3.7/site-packages/ipykernel_launcher.py:14: FutureWarning: speaker.name is deprecated and will be removed in a future release. Use speaker.id instead.\n" + ] } ], "source": [ "conversation_list = list(wikiconv_corpus.iter_conversations())\n", "number_of_conversations_to_print = 1\n", "conversation_min_length = 2\n", - "# Refresher on where the wikiconv_corpus is defined\n", + "#Refresher on where the wikiconv_corpus is defined\n", "# corpus_path = \"/Users/adityajha/Desktop/ConvoKit-master/second_set/conversation_corpus_year_2015\"\n", "# wikiconv_corpus = Corpus(filename=corpus_path)\n", "\n", - "change_defaults_print_final(\n", - " conversation_list, number_of_conversations_to_print, conversation_min_length, wikiconv_corpus\n", - ")" + "change_defaults_print_final(conversation_list, number_of_conversations_to_print, conversation_min_length,\n", + " wikiconv_corpus)" ] }, { @@ -497,45 +493,37 @@ "metadata": {}, "outputs": [], "source": [ - "def sort_changes_by_timestamp(\n", - " modification_list, deletion_list, restoration_list, original_utterance\n", - "):\n", + "def sort_changes_by_timestamp(modification_list, deletion_list, restoration_list, original_utterance):\n", " text_time_tuple_list = []\n", - " if original_utterance is not None:\n", - " text_time_original = (\n", - " original_utterance[\"text\"],\n", - " original_utterance[\"timestamp\"],\n", - " original_utterance[\"speaker.id\"],\n", - " \"original\",\n", - " )\n", + " if (original_utterance is not None):\n", + " text_time_original = (original_utterance['text'],original_utterance['timestamp'],\n", + " original_utterance['speaker.id'], 'original')\n", " text_time_tuple_list.append(text_time_original)\n", + " \n", "\n", " for utterance in modification_list:\n", - " text_time = (\n", - " utterance[\"text\"],\n", - " utterance[\"timestamp\"],\n", - " utterance[\"speaker.id\"],\n", - " \"modification\",\n", - " )\n", + " text_time= (utterance['text'], utterance['timestamp'],\n", + " utterance['speaker.id'], 'modification')\n", " text_time_tuple_list.append(text_time)\n", - "\n", + " \n", " for utterance in deletion_list:\n", - " text_time = (\"\", utterance[\"timestamp\"], utterance[\"speaker.id\"], \"deletion\")\n", + " text_time= ('', utterance['timestamp'],\n", + " utterance['speaker.id'], 'deletion')\n", " text_time_tuple_list.append(text_time)\n", - "\n", + " \n", " for utterance in restoration_list:\n", - " text_time = (\n", - " utterance[\"text\"],\n", - " utterance[\"timestamp\"],\n", - " utterance[\"speaker.id\"],\n", - " \"restoration\",\n", - " )\n", + " text_time= (utterance['text'], utterance['timestamp'],\n", + " utterance['speaker.id'], 'restoration')\n", " text_time_tuple_list.append(text_time)\n", - "\n", + " \n", " text_time_tuple_list.sort(key=lambda x: x[1])\n", - " # text_time_tuple_list.reverse()\n", - "\n", - " return text_time_tuple_list" + " #text_time_tuple_list.reverse()\n", + " \n", + " \n", + " \n", + " return text_time_tuple_list\n", + " \n", + " " ] }, { @@ -547,52 +535,40 @@ "def print_intermediate_conversation(random_conversations, conversation_corpus):\n", " for conversation in random_conversations:\n", " conversation_ids_list = conversation.get_utterance_ids()\n", - " # First correctly reorder the comments\n", + " #First correctly reorder the comments\n", " ordered_list = find_correct_order(conversation_ids_list, conversation_corpus)\n", - " # For each utterance, print the text present if the utterance has not been deleted\n", + " #For each utterance, print the text present if the utterance has not been deleted\n", " for utterance_id in ordered_list:\n", " utterance_value = conversation_corpus.get_utterance(utterance_id)\n", - " if utterance_value.text != \" \":\n", - " final_comment = utterance_value.text\n", - " date_time_val = datetime.fromtimestamp(utterance_value.timestamp).strftime(\n", - " \"%H:%M %d-%m-%Y\"\n", - " )\n", - " formatted_user_name = (\n", - " \"--\" + str(utterance_value.speaker.id) + \" \" + str(date_time_val)\n", - " )\n", - "\n", + " if (utterance_value.text != \" \"):\n", + " final_comment = utterance_value.text\n", + " date_time_val = datetime.fromtimestamp(utterance_value.timestamp).strftime('%H:%M %d-%m-%Y')\n", + " formatted_user_name = \"--\" + str(utterance_value.user.name) + \" \" + str(date_time_val)\n", + " \n", + " \n", " final_timestamp = utterance_value.timestamp\n", - " modification_list = utterance_value.meta[\"modification\"]\n", - " deletion_list = utterance_value.meta[\"deletion\"]\n", - " restoration_list = utterance_value.meta[\"restoration\"]\n", - "\n", - " sorted_timestamps = sort_changes_by_timestamp(\n", - " modification_list,\n", - " deletion_list,\n", - " restoration_list,\n", - " utterance_value.meta[\"original\"],\n", - " )\n", - "\n", - " if len(sorted_timestamps) > 0:\n", + " modification_list = utterance_value.meta['modification']\n", + " deletion_list = utterance_value.meta['deletion']\n", + " restoration_list = utterance_value.meta['restoration']\n", + " \n", + " sorted_timestamps = sort_changes_by_timestamp(modification_list, deletion_list, restoration_list,\n", + " utterance_value.meta['original'])\n", + " \n", + " if (len(sorted_timestamps)>0):\n", " print(wikipedia_link_info(conversation))\n", - " print(\"Final Comment\")\n", - " print(final_comment)\n", - " print(formatted_user_name)\n", - "\n", + " print ('Final Comment')\n", + " print (final_comment)\n", + " print (formatted_user_name)\n", + " \n", " for value in sorted_timestamps:\n", - " print(\"\\n\")\n", - " print(value[3])\n", - " print(value[0])\n", - " formatted_user_name = (\n", - " \"--\"\n", - " + str(value[2])\n", - " + \" \"\n", - " + str(\n", - " datetime.fromtimestamp(float(value[1])).strftime(\"%H:%M %d-%m-%Y\")\n", - " )\n", - " )\n", - " # str(datetime.fromtimestamp(value[1]).strftime('%H:%M %d-%m-%Y'))\n", - " print(formatted_user_name)" + " print ('\\n')\n", + " print (value[3])\n", + " print (value[0])\n", + " formatted_user_name = \"--\" + str(value[2]) + \" \" + str(datetime.fromtimestamp(float(value[1])).strftime('%H:%M %d-%m-%Y'))\n", + " #str(datetime.fromtimestamp(value[1]).strftime('%H:%M %d-%m-%Y'))\n", + " print (formatted_user_name)\n", + "\n", + " " ] }, { @@ -608,15 +584,10 @@ "metadata": {}, "outputs": [], "source": [ - "def change_defaults_print_intermediate(\n", - " conversation_list, number_of_conversations, conversation_min_length, conversation_corpus\n", - "):\n", - " random_conversations = print_random_conversations(\n", - " conversation_list,\n", - " number_of_conversations_to_print,\n", - " conversation_min_length,\n", - " wikiconv_corpus,\n", - " )\n", + "def change_defaults_print_intermediate(conversation_list, number_of_conversations, conversation_min_length, \n", + " conversation_corpus):\n", + " random_conversations = print_random_conversations(conversation_list, number_of_conversations_to_print,\n", + " conversation_min_length, wikiconv_corpus)\n", " print_intermediate_conversation(random_conversations, conversation_corpus)" ] }, @@ -636,25 +607,499 @@ "name": "stdout", "output_type": "stream", "text": [ - "https://en.wikipedia.org/w/index.php?title=user_talk:Alex756/Archive\n", + "https://en.wikipedia.org/w/index.php?title=talk:Book_of_Revelation/Archive_1\n", + "Final Comment\n", + "OK, I can go look up some info. I am a little unsure of which part is considered sweeping. I assume that we agree that this historical interpretation is common among non-Christians and secular Bible scholars. It will be easy for me to get references to the Catholic part of this claim; this view is what their new publications have taught, for well over 20 years. References will be forthcoming. As for the viewpoint of liberal protestants, this will take a bit more work! \n", + "--RK 21:39 24-04-2003\n", + "\n", + "\n", + "original\n", + "OK, I can go look up some info. I am a little unsure of which part is considered sweeping. I assume that we agree that this historical interpretation is common among non-Christians and secular Bible scholars. It will be easy for me to get references to the Catholic part of this claim; this view is what their new publications have taught, for well over 20 years. References will be forthcoming. As for the viewpoint of liberal protestants, this will take a bit more work! \n", + "--RK 11:41 11-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--RK 21:33 24-04-2003\n", + "\n", + "\n", + "restoration\n", + "OK, I can go look up some info. I am a little unsure of which part is considered sweeping. I assume that we agree that this historical interpretation is common among non-Christians and secular Bible scholars. It will be easy for me to get references to the Catholic part of this claim; this view is what their new publications have taught, for well over 20 years. References will be forthcoming. As for the viewpoint of liberal protestants, this will take a bit more work! \n", + "--RK 21:39 24-04-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:Book_of_Revelation/Archive_1\n", + "Final Comment\n", + "RK, I believe you are confusing ''interpretation'' with ''criticism''. I remodeled the contentious paragraph, and included a link to an article on apocalyptic literature. If I understand what you mean by \"historical\", you are regarding the book as entirely concerned with events in the recent past as of the date of authorship, fused with vague predictions of Christ coming in fire and vengeance, etc.. If so, you are not discussing an interpretation of the book qua prophecy, but an understanding held by higher critics who treat the book as an example of apocalyptic literature. Please take it there, and feel free to go crazy! Don't forget the book of Enoch, and the many apocalyptic works of the Maccabean era. .\n", + "--RK 21:39 24-04-2003\n", + "\n", + "\n", + "original\n", + "RK, I believe you are confusing ''interpretation'' with ''criticism''. I remodeled the contentious paragraph, and included a link to an article on apocalyptic literature. If I understand what you mean by \"historical\", you are regarding the book as entirely concerned with events in the recent past as of the date of authorship, fused with vague predictions of Christ coming in fire and vengeance, etc.. If so, you are not discussing an interpretation of the book qua prophecy, but an understanding held by higher critics who treat the book as an example of apocalyptic literature. Please take it there, and feel free to go crazy! Don't forget the book of Enoch, and the many apocalyptic works of the Maccabean era. .\n", + "--LenBudney 11:49 11-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--RK 21:33 24-04-2003\n", + "\n", + "\n", + "restoration\n", + "RK, I believe you are confusing ''interpretation'' with ''criticism''. I remodeled the contentious paragraph, and included a link to an article on apocalyptic literature. If I understand what you mean by \"historical\", you are regarding the book as entirely concerned with events in the recent past as of the date of authorship, fused with vague predictions of Christ coming in fire and vengeance, etc.. If so, you are not discussing an interpretation of the book qua prophecy, but an understanding held by higher critics who treat the book as an example of apocalyptic literature. Please take it there, and feel free to go crazy! Don't forget the book of Enoch, and the many apocalyptic works of the Maccabean era. .\n", + "--RK 21:39 24-04-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:Book_of_Revelation/Archive_1\n", + "Final Comment\n", + "Yes, I think we've achieved understanding! I think the distinction I make is useful for purposes of clarity, since the \"liberal Christians\" who agree with what you called the \"historical school\" are, in doing so, taking a controversial (to Christians) stance on the inspiration of scripture. They are rejecting the book's own claim to be foretelling the future, and agreeing with the criticism which classifies it as a pseudo-prophecy. It would be hard to include that as a \"school of interpretation\" without violating NPOV as to questions of authority, inspiration, etc.\n", + "--RK 21:39 24-04-2003\n", + "\n", + "\n", + "original\n", + "Yes, I think we've achieved understanding! I think the distinction I make is useful for purposes of clarity, since the \"liberal Christians\" who agree with what you called the \"historical school\" are, in doing so, taking a controversial (to Christians) stance on the inspiration of scripture. They are rejecting the book's own claim to be foretelling the future, and agreeing with the criticism which classifies it as a pseudo-prophecy. It would be hard to include that as a \"school of interpretation\" without violating NPOV as to questions of authority, inspiration, etc.\n", + "--LenBudney 12:37 11-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--RK 21:33 24-04-2003\n", + "\n", + "\n", + "restoration\n", + "Yes, I think we've achieved understanding! I think the distinction I make is useful for purposes of clarity, since the \"liberal Christians\" who agree with what you called the \"historical school\" are, in doing so, taking a controversial (to Christians) stance on the inspiration of scripture. They are rejecting the book's own claim to be foretelling the future, and agreeing with the criticism which classifies it as a pseudo-prophecy. It would be hard to include that as a \"school of interpretation\" without violating NPOV as to questions of authority, inspiration, etc.\n", + "--RK 21:39 24-04-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + " King of Wikipedia \n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + " King of Wikipedia \n", + "--Oliver Pereira 23:01 21-08-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + " King of Wikipedia \n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "I don't think you are. P \n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "I don't think you are. P \n", + "--Oliver Pereira 23:01 21-08-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "I don't think you are. P \n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "Ofcourse I am! Look at Wikipedia:King of Wikipedia\n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "Ofcourse I am! Look at Wikipedia:King of Wikipedia\n", + "--BL~enwiki 23:02 21-08-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "Ofcourse I am! Look at Wikipedia:King of Wikipedia\n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "I will defeat you! \n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "I will defeat you! \n", + "--Oliver Pereira 23:05 21-08-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "I will defeat you! \n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "Hi BL, I'm curious why you re-instated the troll comment about jews ruling non-jews on the talk:Israel talk page. Going by the edits of that user on other pages, it wasn't a serious comment but something put on to deliberately provoke a reaction. Various pages linking to Israel or Jews have been targeted by a series of IPs in the 67 and 68 range, some provocatively hostile to Israel, some so sychophantically (may not be the right spelling but fuck it, I'm too knackered to care! -) ) pro-Israel they are unambiguously phoney. As you know, the Israel page and one or two others are like tinderboxes just waiting for someone to cast a match onto them. Whatever about rows over real comments, I thought it unwise to leave a crude piss-take that some people might believe was real and get angry over. We have had enough trouble on that page without phoney wind-up comments triggering off rows. -) \n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "Hi BL, I'm curious why you re-instated the troll comment about jews ruling non-jews on the talk:Israel talk page. Going by the edits of that user on other pages, it wasn't a serious comment but something put on to deliberately provoke a reaction. Various pages linking to Israel or Jews have been targeted by a series of IPs in the 67 and 68 range, some provocatively hostile to Israel, some so sychophantically (may not be the right spelling but fuck it, I'm too knackered to care! -) ) pro-Israel they are unambiguously phoney. As you know, the Israel page and one or two others are like tinderboxes just waiting for someone to cast a match onto them. Whatever about rows over real comments, I thought it unwise to leave a crude piss-take that some people might believe was real and get angry over. We have had enough trouble on that page without phoney wind-up comments triggering off rows. -) \n", + "--Jtdirl 01:34 23-08-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "Hi BL, I'm curious why you re-instated the troll comment about jews ruling non-jews on the talk:Israel talk page. Going by the edits of that user on other pages, it wasn't a serious comment but something put on to deliberately provoke a reaction. Various pages linking to Israel or Jews have been targeted by a series of IPs in the 67 and 68 range, some provocatively hostile to Israel, some so sychophantically (may not be the right spelling but fuck it, I'm too knackered to care! -) ) pro-Israel they are unambiguously phoney. As you know, the Israel page and one or two others are like tinderboxes just waiting for someone to cast a match onto them. Whatever about rows over real comments, I thought it unwise to leave a crude piss-take that some people might believe was real and get angry over. We have had enough trouble on that page without phoney wind-up comments triggering off rows. -) \n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "By the way, would you stop yelling KEEEEP on the votes for deletion page? It's very annoying \n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "By the way, would you stop yelling KEEEEP on the votes for deletion page? It's very annoying \n", + "--Robert Merkel 07:20 23-08-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "By the way, would you stop yelling KEEEEP on the votes for deletion page? It's very annoying \n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "There's no need to go around shouting \"KEEEP IT!\" at people on Wikipedia:Votes for deletion. This being a text medium, people will \"hear\" you just as clearly if you simply say \"Keep it.\"\n", + "In fact, this being a text medium, people are ''more'' likely to pay attention to what you're trying to say if you demonstrate that you do know the correct use of lowercase letters, and the correct spelling of \"keep\".\n", + "—\n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "There's no need to go around shouting \"KEEEP IT!\" at people on Wikipedia:Votes for deletion. This being a text medium, people will \"hear\" you just as clearly if you simply say \"Keep it.\"\n", + "In fact, this being a text medium, people are *more* likely to pay attention to what you're trying to say if you demonstrate that you do know the correct use of lowercase letters, and the correct spelling of \"keep\".\n", + "—\n", + "--Paul A 22:46 24-08-2003\n", + "\n", + "\n", + "modification\n", + "There's no need to go around shouting \"KEEEP IT!\" at people on Wikipedia:Votes for deletion. This being a text medium, people will \"hear\" you just as clearly if you simply say \"Keep it.\"\n", + "In fact, this being a text medium, people are ''more'' likely to pay attention to what you're trying to say if you demonstrate that you do know the correct use of lowercase letters, and the correct spelling of \"keep\".\n", + "—\n", + "--Paul A 22:48 24-08-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "There's no need to go around shouting \"KEEEP IT!\" at people on Wikipedia:Votes for deletion. This being a text medium, people will \"hear\" you just as clearly if you simply say \"Keep it.\"\n", + "In fact, this being a text medium, people are ''more'' likely to pay attention to what you're trying to say if you demonstrate that you do know the correct use of lowercase letters, and the correct spelling of \"keep\".\n", + "—\n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + ")- 0717, Sep 8, 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + ") - 0717, Sep 8, 2003 (UTC)\n", + "--Stevertigo 03:17 08-09-2003\n", + "\n", + "\n", + "modification\n", + ")- 0717, Sep 8, 2003 (UTC)\n", + "--Stevertigo 03:17 08-09-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + ")- 0717, Sep 8, 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "English spellings should not be changed to American without good reason. Why did you change Kilometre to Kilometer in Km/h? The page is located at Kilometre anyway, so you are now causing a redirect to occur. See Wikipedia:Manual of Style. It is generally accepted that the version used by the original author is kept. 23:04, Sep 17, 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "English spellings should not be changed to American without good reason. Why did you change Kilometre to Kilometer in Km/h? The page is located at Kilometre anyway, so you are now causing a redirect to occur. See Wikipedia:Manual of Style. It is generally accepted that the version used by the original author is kept. 23:04, Sep 17, 2003 (UTC)\n", + "--Angela 19:04 17-09-2003\n", + "\n", + "\n", + "modification\n", + "English spellings should not be changed to American without good reason. Why did you change Kilometre to Kilometer in Km/h? The page is located at Kilometre anyway, so you are now causing a redirect to occur. See Wikipedia:Manual of Style. It is generally accepted that the version used by the original author is kept. 23:04, Sep 17, 2003 (UTC)\n", + "--MartinHarper 20:46 20-09-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "English spellings should not be changed to American without good reason. Why did you change Kilometre to Kilometer in Km/h? The page is located at Kilometre anyway, so you are now causing a redirect to occur. See Wikipedia:Manual of Style. It is generally accepted that the version used by the original author is kept. 23:04, Sep 17, 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "Ok, you might find American and British English Differences and http://www.onelook.com/ useful. 23:28, Sep 17, 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "Ok, you might find American and British English Differences and http://www.onelook.com/ useful. 23:28, Sep 17, 2003 (UTC)\n", + "--Angela 19:28 17-09-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "Ok, you might find American and British English Differences and http://www.onelook.com/ useful. 23:28, Sep 17, 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "Could you read and respond to Talk:The_Chronicles_of_George? 12:40, 17 Sep 2003 (UTC)\n", + " Hello? Is this thing on? *tap* *tap* *tap*. 00:46, 21 Sep 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + "Could you read and respond to Talk:The_Chronicles_of_George? 12:40, 17 Sep 2003 (UTC)\n", + " Hello? Is this thing on? *tap* *tap* *tap*. 00:46, 21 Sep 2003 (UTC)\n", + "--MartinHarper 20:46 20-09-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + "Could you read and respond to Talk:The_Chronicles_of_George? 12:40, 17 Sep 2003 (UTC)\n", + " Hello? Is this thing on? *tap* *tap* *tap*. 00:46, 21 Sep 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + " Uh, I dropped it earlier... \n", + "--BL~enwiki 13:28 25-09-2003\n", + "\n", + "\n", + "original\n", + " Uh, I dropped it earlier... \n", + "--Pizza Puzzle 20:56 20-09-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--142.177.74.112 13:06 25-09-2003\n", + "\n", + "\n", + "restoration\n", + " Uh, I dropped it earlier... \n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", "Final Comment\n", - " Points of order \n", - "--142.177.103.185 19:51 13-10-2003\n", + "How is stating that a consensus is not required an \"impractical\" statement? 17:57, 24 Sep 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", "\n", "\n", "original\n", - " Points of order \n", - "--142.177.78.145 19:38 13-10-2003\n", + "How is stating that a consensus is not required an \"impractical\" statement? 17:57, 24 Sep 2003 (UTC)\n", + "--Angela 13:57 24-09-2003\n", "\n", "\n", "deletion\n", "\n", - "--MartinHarper 19:40 13-10-2003\n", + "--142.177.74.112 13:06 25-09-2003\n", "\n", "\n", "restoration\n", - " Points of order \n", - "--142.177.103.185 19:51 13-10-2003\n" + "How is stating that a consensus is not required an \"impractical\" statement? 17:57, 24 Sep 2003 (UTC)\n", + "--BL~enwiki 13:28 25-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:BL~enwiki\n", + "Final Comment\n", + "I enjoyed reading your astute comments on the privatization page. A while ago, I added content to the privatization page, specifically the theory behind it. While I agree that it's generally valid theoretically, your points are often correct in practice.\n", + "Since it seems that you're also interested in economic policy, you might want to take a look at some content that I added a while ago to the History of post-communist Russia article. It's the best example on Wikipedia, of which I'm aware, of an in depth look at privatization in practice. Specifically, you might be interested in the sections on the three stages of privatization in Russia and the next section on the so-called \"virtual economy\" (a term coined by economists Clifford Gaddy and Barry Ickes in reference to post-Communist Russia). Since it's a long article, you'd be able to find these sections by clicking on the link in the pop up table of contents box at the top; the sections of privatization are under the broader heading of \"democratization and its setbacks.\" (Just to make a brief note about the structure of the article, the content doesn't really separate sections on economic restructuring and democratization, since they're so intertwined in a country remaking both its economic and political institutions.)\n", + "The sections that I'm recommending bring up all the sharp insights that you made (i.e. that design is more important than the mere transfer of ownership). Here, I'll just summarize the points made in the Russia article. State monopolies were rarely restructured before privatization. Thus, there was little competition over price and quality for consumer demand. In turn, there were few incentives to invest capital to salvage inefficient value-losing enterprises. Thus, efficiency (in terms of minimizing costs per unit of output) rarely improved. Instead of channeling investment into privatized enterprises, asset stripping (facilitated by capital market liberalization) was the common result. Moreover, despite sweeping openings, capital markets (i.e. the mechanisms to channel private savings into Russian companies) remain week.\n", + "It would be great if your interest in the articles on post-Communist Russia were aroused since I cannot find anyone interested in this subject. This is baffling since it is such a contentious topic. In academic literature (and especially within Russia), reform strategies, the sequencing of reform, and the pace of reform in post-Communist Russia remain contentious topics, argued often with great personal bitterness. After all, many prominent former Soviet specialists in the West were very active in the design of new economic and political institutions in Russia. Subject to harsh retrospective criticism, many economists at the US Treasury Department, IMF, World Bank, and top US universities have become very defensive. It's still a heated subject in the top journals, academic literature, and major periodicals on Russia and economics.\n", + "Anyway, sorry if my comments were a bit on the lengthy side. Don't feel compelled to redirect your attention to post-Communist Russia if you're uninterested. It's only my guess that you would be from your insightful comments. 06:03, 29 Sep 2003 (UTC)\n", + "BTW, since you are frequent contributor to Palestinian-related articles, I'm sure that the loss of the great scholar Edward Said isn't news to you. I don't know your feelings about him, but I think that his passing couldn't have come at a worse time. I'd argue that the popular writings on the Middle East in the US, exemplified by Daniel Pipes, are simply bigoted and hateful. It's sad to see such an effective counterweight to their venom gone. 06:03, 29 Sep 2003 (UTC)\n", + "--172 02:36 29-09-2003\n", + "\n", + "\n", + "original\n", + "I enjoyed reading your astute comments on the privatization page. A while ago, I added content to the privatization page, specifically the theory behind it. While I agree that it's generally valid theoretically, your points are often correct in practice.\n", + "Since it seems that you're also interested in economic policy, you might want to take a look at some content that I added a while ago to the History of post-communist Russia article. It's the best example on Wikipedia, of which I'm aware, of an in depth look at privatization in practice. Specifically, you might be interested in the sections on the three stages of privatization in Russia and the next section on the so-called \"virtual economy\" (a term coined by economists Clifford Gaddy and Barry Ickes in reference to post-Communist Russia). Since it's a long article, you'd be able to find these sections by clicking on the link in the pop up table of contents box at the top; the sections of privatization are under the broader heading of \"democratization and its setbacks.\" (Just to make a brief note about the structure of the article, the content doesn't really separate sections on economic restructuring and democratization, since they're so intertwined in a country remaking both its economic and political institutions.)\n", + "The sections that I'm recommending bring up all the sharp insights that you made (that design is more important than the mere transfer of ownership). Here, I'll just summarize the points made in the Russia article. State monopolies were rarely restructured before privatization. Thus, there was little competition over price and quality for consumer demand. In turn, there were few incentives to invest capital to salvage inefficient value-losing enterprises. Thus, efficiency (minimizing costs per unit of output) rarely improved. Instead of channeling investment into privatized enterprises, asset stripping (facilitated by capital market liberalization) was the common result. Moreover, despite sweeping openings, capital markets (the mechanisms to channel private savings into Russian companies) remain week.\n", + "It would be great if your interest in the articles on post-Communist Russia were aroused since I cannot find anyone interested in this subject. This is baffling since it is such a contentious topic. In academic literature (and especially within Russia), reform strategies, the sequencing of reform, and the pace of reform in post-Communist Russia remain contentious topics, argued often with great personal bitterness. After all, many prominent former Soviet specialists in the West were very active in the design of new economic and political institutions in Russia. Subject to harsh retrospective criticism, many economists at the US Treasury Department, IMF, World Bank, and top US universities have become very defensive. It's still a heated subject in the top journals, academic literature, and major periodicals on Russia and economics.\n", + "Anyway, sorry if my comments were a bit on the lengthy side. Don't feel compelled to redirect your attention to post-Communist Russia if you're uninterested. It's only my guess that you would be from your insightful comments. 06:03, 29 Sep 2003 (UTC)\n", + "BTW, since you are frequently contributor to Palestinian-related articles, I'm sure that the loss of the great scholar Edward Said isn't news to you. I don't know your feelings about him, but I think that his passing couldn't have come at a worse time. I'd argue that the popular writings on the Middle East in the US, exemplified by Daniel Pipes, are simply bigoted and hateful. It's sad to see such an effective counterweight to their venom gone. 06:03, 29 Sep 2003 (UTC)\n", + "--172 02:03 29-09-2003\n", + "\n", + "\n", + "modification\n", + "I enjoyed reading your astute comments on the privatization page. A while ago, I added content to the privatization page, specifically the theory behind it. While I agree that it's generally valid theoretically, your points are often correct in practice.\n", + "Since it seems that you're also interested in economic policy, you might want to take a look at some content that I added a while ago to the History of post-communist Russia article. It's the best example on Wikipedia, of which I'm aware, of an in depth look at privatization in practice. Specifically, you might be interested in the sections on the three stages of privatization in Russia and the next section on the so-called \"virtual economy\" (a term coined by economists Clifford Gaddy and Barry Ickes in reference to post-Communist Russia). Since it's a long article, you'd be able to find these sections by clicking on the link in the pop up table of contents box at the top; the sections of privatization are under the broader heading of \"democratization and its setbacks.\" (Just to make a brief note about the structure of the article, the content doesn't really separate sections on economic restructuring and democratization, since they're so intertwined in a country remaking both its economic and political institutions.)\n", + "The sections that I'm recommending bring up all the sharp insights that you made (that design is more important than the mere transfer of ownership). Here, I'll just summarize the points made in the Russia article. State monopolies were rarely restructured before privatization. Thus, there was little competition over price and quality for consumer demand. In turn, there were few incentives to invest capital to salvage inefficient value-losing enterprises. Thus, efficiency (minimizing costs per unit of output) rarely improved. Instead of channeling investment into privatized enterprises, asset stripping (facilitated by capital market liberalization) was the common result. Moreover, despite sweeping openings, capital markets (the mechanisms to channel private savings into Russian companies) remain week.\n", + "It would be great if your interest in the articles on post-Communist Russia were aroused since I cannot find anyone interested in this subject. This is baffling since it is such a contentious topic. In academic literature (and especially within Russia), reform strategies, the sequencing of reform, and the pace of reform in post-Communist Russia remain contentious topics, argued often with great personal bitterness. After all, many prominent former Soviet specialists in the West were very active in the design of new economic and political institutions in Russia. Subject to harsh retrospective criticism, many economists at the US Treasury Department, IMF, World Bank, and top US universities have become very defensive. It's still a heated subject in the top journals, academic literature, and major periodicals on Russia and economics.\n", + "Anyway, sorry if my comments were a bit on the lengthy side. Don't feel compelled to redirect your attention to post-Communist Russia if you're uninterested. It's only my guess that you would be from your insightful comments. 06:03, 29 Sep 2003 (UTC)\n", + "BTW, since you are frequent contributor to Palestinian-related articles, I'm sure that the loss of the great scholar Edward Said isn't news to you. I don't know your feelings about him, but I think that his passing couldn't have come at a worse time. I'd argue that the popular writings on the Middle East in the US, exemplified by Daniel Pipes, are simply bigoted and hateful. It's sad to see such an effective counterweight to their venom gone. 06:03, 29 Sep 2003 (UTC)\n", + "--172 02:07 29-09-2003\n", + "\n", + "\n", + "modification\n", + "I enjoyed reading your astute comments on the privatization page. A while ago, I added content to the privatization page, specifically the theory behind it. While I agree that it's generally valid theoretically, your points are often correct in practice.\n", + "Since it seems that you're also interested in economic policy, you might want to take a look at some content that I added a while ago to the History of post-communist Russia article. It's the best example on Wikipedia, of which I'm aware, of an in depth look at privatization in practice. Specifically, you might be interested in the sections on the three stages of privatization in Russia and the next section on the so-called \"virtual economy\" (a term coined by economists Clifford Gaddy and Barry Ickes in reference to post-Communist Russia). Since it's a long article, you'd be able to find these sections by clicking on the link in the pop up table of contents box at the top; the sections of privatization are under the broader heading of \"democratization and its setbacks.\" (Just to make a brief note about the structure of the article, the content doesn't really separate sections on economic restructuring and democratization, since they're so intertwined in a country remaking both its economic and political institutions.)\n", + "The sections that I'm recommending bring up all the sharp insights that you made (i.e. that design is more important than the mere transfer of ownership). Here, I'll just summarize the points made in the Russia article. State monopolies were rarely restructured before privatization. Thus, there was little competition over price and quality for consumer demand. In turn, there were few incentives to invest capital to salvage inefficient value-losing enterprises. Thus, efficiency (in terms of minimizing costs per unit of output) rarely improved. Instead of channeling investment into privatized enterprises, asset stripping (facilitated by capital market liberalization) was the common result. Moreover, despite sweeping openings, capital markets (i.e. the mechanisms to channel private savings into Russian companies) remain week.\n", + "It would be great if your interest in the articles on post-Communist Russia were aroused since I cannot find anyone interested in this subject. This is baffling since it is such a contentious topic. In academic literature (and especially within Russia), reform strategies, the sequencing of reform, and the pace of reform in post-Communist Russia remain contentious topics, argued often with great personal bitterness. After all, many prominent former Soviet specialists in the West were very active in the design of new economic and political institutions in Russia. Subject to harsh retrospective criticism, many economists at the US Treasury Department, IMF, World Bank, and top US universities have become very defensive. It's still a heated subject in the top journals, academic literature, and major periodicals on Russia and economics.\n", + "Anyway, sorry if my comments were a bit on the lengthy side. Don't feel compelled to redirect your attention to post-Communist Russia if you're uninterested. It's only my guess that you would be from your insightful comments. 06:03, 29 Sep 2003 (UTC)\n", + "BTW, since you are frequent contributor to Palestinian-related articles, I'm sure that the loss of the great scholar Edward Said isn't news to you. I don't know your feelings about him, but I think that his passing couldn't have come at a worse time. I'd argue that the popular writings on the Middle East in the US, exemplified by Daniel Pipes, are simply bigoted and hateful. It's sad to see such an effective counterweight to their venom gone. 06:03, 29 Sep 2003 (UTC)\n", + "--172 02:36 29-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=user_talk:Angela/Archive1\n", + "Final Comment\n", + "Industrial Waste\n", + "--Angela 19:10 10-12-2003\n", + "\n", + "\n", + "original\n", + "Industrial Waste\n", + "--Ed Poor 14:30 09-12-2003\n", + "\n", + "\n", + "deletion\n", + "\n", + "--Angela 15:05 09-12-2003\n", + "\n", + "\n", + "restoration\n", + "Industrial Waste\n", + "--Angela 19:10 10-12-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:List_of_one-hit_wonders_on_the_UK_Singles_Chart\n", + "Final Comment\n", + "Just wondering on what grounds some of these songs are termed one hit wonders... I mean I just removed \"A-Ha - Take On Me\" because A-Ha were one of the bigger acts of the 80s, with a string of top 10 hits. Kajagoogoo, while being best known for Too Shy, had two other Top 10 hits, and a further top 20 hit. Let alone the top 40.\n", + "The Guinness Book of Hit Singles would tell us that a one hit wonder has to get to number one. I disagree with that, but would insist that a one hit wonder really has to have had only ONE HIT. Whether that's a top 40 hit or a top 75 hit is open to debate perhaps... But a LOT of these acts had more than one hit. -\n", + "--Mintguy 11:11 12-09-2003\n", + "\n", + "\n", + "original\n", + "Just wondering on what grounds some of these songs are termed one hit wonders... I mean I just removed \"A-Ha - Take On Me\" because A-Ha were one of the bigger acts of the 80s, with a string of top 10 hits. Kajagoogoo, while being best known for Too Shy, had two other Top 10 hits, and a further top 20 hit. Let alone the top 40.\n", + "The Guinness Book of Hit Singles would tell us that a one hit wonder has to get to number one. I disagree with that, but would insist that a one hit wonder really has to have had only ONE HIT. Whether that's a top 40 hit or a top 75 hit is open to debate perhaps... But a LOT of these acts had more than one hit. -\n", + "--Nommonomanac 21:30 03-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--213.122.51.46 11:06 12-09-2003\n", + "\n", + "\n", + "restoration\n", + "Just wondering on what grounds some of these songs are termed one hit wonders... I mean I just removed \"A-Ha - Take On Me\" because A-Ha were one of the bigger acts of the 80s, with a string of top 10 hits. Kajagoogoo, while being best known for Too Shy, had two other Top 10 hits, and a further top 20 hit. Let alone the top 40.\n", + "The Guinness Book of Hit Singles would tell us that a one hit wonder has to get to number one. I disagree with that, but would insist that a one hit wonder really has to have had only ONE HIT. Whether that's a top 40 hit or a top 75 hit is open to debate perhaps... But a LOT of these acts had more than one hit. -\n", + "--Mintguy 11:11 12-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:List_of_one-hit_wonders_on_the_UK_Singles_Chart\n", + "Final Comment\n", + "I think if a band had a top 10 hit and had a few bubling under the top ten it's kinda borderline. What do you think? 02:55 Dec 4, 2002 (UTC)\n", + "--Mintguy 11:11 12-09-2003\n", + "\n", + "\n", + "original\n", + "I think if a band had a top 10 hit and had a few bubling under the top ten it's kinda borderline. What do you think? 02:55 Dec 4, 2002 (UTC)\n", + "--Mintguy 21:55 03-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--213.122.51.46 11:06 12-09-2003\n", + "\n", + "\n", + "restoration\n", + "I think if a band had a top 10 hit and had a few bubling under the top ten it's kinda borderline. What do you think? 02:55 Dec 4, 2002 (UTC)\n", + "--Mintguy 11:11 12-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:List_of_one-hit_wonders_on_the_UK_Singles_Chart\n", + "Final Comment\n", + " Well... the way I see it, I'd say that a seperate top 40 hit would negate a top 10 hit's OHW status. I mean, if a band gets one record in the top ten and no others in the top 40, that seems to me to work out as a OHW. But a top ten and another in the top 40 doesn't... It means we don't have to worry about a massive amount of records - just a ''lot'' of records. More than one record in the top 40 doesn't seem like a OHW to me. -\n", + "--Mintguy 11:11 12-09-2003\n", + "\n", + "\n", + "original\n", + " Well... the way I see it, I'd say that a seperate top 40 hit would negate a top 10 hit's OHW status. I mean, if a band gets one record in the top ten and no others in the top 40, that seems to me to work out as a OHW. But a top ten and another in the top 40 doesn't... It means we don't have to worry about a massive amount of records - just a ''lot'' of records. More than one record in the top 40 doesn't seem like a OHW to me. -\n", + "--Nommonomanac 22:02 03-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--213.122.51.46 11:06 12-09-2003\n", + "\n", + "\n", + "restoration\n", + " Well... the way I see it, I'd say that a seperate top 40 hit would negate a top 10 hit's OHW status. I mean, if a band gets one record in the top ten and no others in the top 40, that seems to me to work out as a OHW. But a top ten and another in the top 40 doesn't... It means we don't have to worry about a massive amount of records - just a ''lot'' of records. More than one record in the top 40 doesn't seem like a OHW to me. -\n", + "--Mintguy 11:11 12-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:List_of_one-hit_wonders_on_the_UK_Singles_Chart\n", + "Final Comment\n", + " Hmmm... It's difficult Billy Ray Curtis got another song (\"\"Could've been me\"\") to 24, but I doubt if anyone (but a fan) could remember it. It would be a shame to remove him. Channel 4 had a top 10 one hit wonder thing a while ago. I wonder what criteria they used.\n", + "--Mintguy 11:11 12-09-2003\n", + "\n", + "\n", + "original\n", + " Hmmm... It's difficult Billy Ray Curtis got another song (\"\"Could've been me\"\") to 24, but I doubt if anyone (but a fan) could remember it. It would be a shame to remove him. Channel 4 had a top 10 one hit wonder thing a while ago. I wonder what criteria they used.\n", + "--Mintguy 22:10 03-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--213.122.51.46 11:06 12-09-2003\n", + "\n", + "\n", + "restoration\n", + " Hmmm... It's difficult Billy Ray Curtis got another song (\"\"Could've been me\"\") to 24, but I doubt if anyone (but a fan) could remember it. It would be a shame to remove him. Channel 4 had a top 10 one hit wonder thing a while ago. I wonder what criteria they used.\n", + "--Mintguy 11:11 12-09-2003\n", + "https://en.wikipedia.org/w/index.php?title=talk:List_of_one-hit_wonders_on_the_UK_Singles_Chart\n", + "Final Comment\n", + " Weren't they all number ones? I seem to remember a second Billy Ray Cyrus record coming out. I remember a small amount of associated pain. Though I don't remember the song. Reneé and Renato were in that C4 thing... They had a number 1 and a number 48, so that would fit my criteria. Which seems right. -\n", + "--Mintguy 11:11 12-09-2003\n", + "\n", + "\n", + "original\n", + " Weren't they all number ones? I seem to remember a second Billy Ray Cyrus record coming out. I remember a small amount of associated pain. Though I don't remember the song. Reneé and Renato were in that C4 thing... They had a number 1 and a number 48, so that would fit my criteria. Which seems right. -\n", + "--Nommonomanac 08:28 05-12-2002\n", + "\n", + "\n", + "deletion\n", + "\n", + "--213.122.51.46 11:06 12-09-2003\n", + "\n", + "\n", + "restoration\n", + " Weren't they all number ones? I seem to remember a second Billy Ray Cyrus record coming out. I remember a small amount of associated pain. Though I don't remember the song. Reneé and Renato were in that C4 thing... They had a number 1 and a number 48, so that would fit my criteria. Which seems right. -\n", + "--Mintguy 11:11 12-09-2003\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/sauna/conda-envs/zissou-env/lib/python3.7/site-packages/ipykernel_launcher.py:12: FutureWarning: speaker.name is deprecated and will be removed in a future release. Use speaker.id instead.\n" ] } ], @@ -663,9 +1108,8 @@ "number_of_conversations_to_print = 10\n", "conversation_min_length = 3\n", "\n", - "change_defaults_print_intermediate(\n", - " conversation_list, number_of_conversations_to_print, conversation_min_length, wikiconv_corpus\n", - ")" + "change_defaults_print_intermediate(conversation_list, number_of_conversations_to_print, conversation_min_length,\n", + " wikiconv_corpus)" ] }, { @@ -695,7 +1139,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/examples/dataset-examples/wikiconv/corpus_deletion_demo.ipynb b/examples/dataset-examples/wikiconv/corpus_deletion_demo.ipynb index c8f0a5f3..49a552da 100644 --- a/examples/dataset-examples/wikiconv/corpus_deletion_demo.ipynb +++ b/examples/dataset-examples/wikiconv/corpus_deletion_demo.ipynb @@ -27,9 +27,9 @@ "metadata": {}, "outputs": [], "source": [ - "# import relevant modules\n", + "#import relevant modules\n", "from datetime import datetime, timedelta\n", - "from convokit import Corpus, Utterance, Conversation, download" + "from convokit import Corpus, User, Utterance, Conversation, download" ] }, { @@ -41,13 +41,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "Dataset already exists at /Users/seanzhangkx/.convokit/downloads/wikiconv-2003\n" + "Dataset already exists at /home/jonathan/.convokit/downloads/wikiconv-2003\n" ] } ], "source": [ "# Load the 2003 wikiconv corpus (feel free to change this to a year of your preference)\n", - "wikiconv_corpus = Corpus(filename=download(\"wikiconv-2003\"))" + "wikiconv_corpus = Corpus(filename=download('wikiconv-2003'))" ] }, { @@ -113,7 +113,7 @@ { "data": { "text/plain": [ - "Utterance({'obj_type': 'utterance', 'vectors': [], 'speaker_': Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'Jay', 'meta': ConvoKitMeta({'user_id': '14784'})}), 'owner': , 'id': '5021479.2081.2077', 'meta': ConvoKitMeta({'is_section_header': True, 'indentation': '2', 'toxicity': 0.1219038, 'sever_toxicity': 0.06112729, 'ancestor_id': '5021479.2081.2077', 'rev_id': '5021479', 'parent_id': None, 'original': None, 'modification': [], 'deletion': [], 'restoration': []})})" + "Utterance({'id': '5021479.2081.2077', 'user': User([('name', 'Jay')]), 'root': '5021479.1277.1272', 'reply_to': '5021479.1277.1272', 'timestamp': 1070614595.0, 'text': \"You're right about separating the sandwich of war names and MiG names. Each plane should be sorted chronologically and have its own sentence detailing its importance. \", 'meta': {'is_section_header': True, 'indentation': '2', 'toxicity': 0.1219038, 'sever_toxicity': 0.06112729, 'ancestor_id': '5021479.2081.2077', 'rev_id': '5021479', 'parent_id': None, 'original': None, 'modification': [], 'deletion': [], 'restoration': []}})" ] }, "execution_count": 5, @@ -139,34 +139,35 @@ "outputs": [], "source": [ "def check_deletion_list_data(list_of_deletion_utterances, original_posting_time, timedelta_value):\n", - " # Count the total number of deleted utterances of each type\n", + " #Count the total number of deleted utterances of each type\n", " count_normal = 0\n", - " count_toxic = 0\n", + " count_toxic= 0\n", " count_sever_toxic = 0\n", - "\n", + " \n", " for deletion_utt in list_of_deletion_utterances:\n", - " toxicity_val = deletion_utt[\"meta\"][\"toxicity\"]\n", - " sever_toxicity_val = deletion_utt[\"meta\"][\"sever_toxicity\"]\n", - " timestamp_value = deletion_utt[\"timestamp\"]\n", + " toxicity_val = deletion_utt.meta['toxicity']\n", + " sever_toxicity_val = deletion_utt.meta['sever_toxicity']\n", + " timestamp_value = deletion_utt.timestamp\n", " deletion_datetime_val = datetime.fromtimestamp(timestamp_value)\n", - "\n", - " # delta_value is the time delta between when the deletion utt happened and the original utt's posting\n", - " if original_posting_time is None:\n", + " \n", + " #delta_value is the time delta between when the deletion utt happened and the original utt's posting \n", + " if (original_posting_time is None):\n", " delta_value = 0\n", - " else:\n", - " delta_value = deletion_datetime_val - original_posting_time\n", - "\n", - " # If the delta value is less than the provided time delta, consider its type\n", - " if delta_value <= timedelta(days=timedelta_value):\n", - " if toxicity_val < 0.5 and sever_toxicity_val < 0.5:\n", - " count_normal += 1\n", - " if toxicity_val > 0.5:\n", - " count_toxic += 1\n", - " if sever_toxicity_val > 0.5:\n", - " count_sever_toxic += 1\n", - "\n", - " # Return in tuple form the number of each type of affected comment\n", - " return (count_normal, count_toxic, count_sever_toxic)" + " else: \n", + " delta_value = deletion_datetime_val - original_posting_time \n", + " \n", + " #If the delta value is less than the provided time delta, consider its type\n", + " if (delta_value <= timedelta(days = timedelta_value)):\n", + " if (toxicity_val < 0.5 and sever_toxicity_val < 0.5):\n", + " count_normal +=1\n", + " if (toxicity_val > 0.5):\n", + " count_toxic +=1\n", + " if (sever_toxicity_val > 0.5):\n", + " count_sever_toxic +=1 \n", + " \n", + " #Return in tuple form the number of each type of affected comment\n", + " return (count_normal, count_toxic, count_sever_toxic)\n", + " " ] }, { @@ -183,68 +184,67 @@ "outputs": [], "source": [ "def get_deletion_counts(individual_utterance_list, timedelta_value):\n", - " # Normal Data count\n", + " #Normal Data count\n", " count_normal_deleted = 0\n", " count_normal_total = 0\n", " set_of_normal_comments = set()\n", + " \n", "\n", - " # Toxic data count\n", + " #Toxic data count\n", " count_toxic_deleted = 0\n", " count_toxic_total = 0\n", " set_of_toxic_comments = set()\n", "\n", - " # Sever Toxic Data count\n", + " #Sever Toxic Data count\n", " count_sever_deleted = 0\n", " count_sever_total = 0\n", " set_of_sever_comments = set()\n", - "\n", - " # Check each utterance\n", + " \n", + " #Check each utterance\n", " for utterance_value in individual_utterance_list:\n", - " toxicity_val = utterance_value.meta[\"toxicity\"]\n", - " sever_toxicity_val = utterance_value.meta[\"sever_toxicity\"]\n", - "\n", - " # Find the total number of comments of each type\n", - " if toxicity_val < 0.5 and sever_toxicity_val < 0.5:\n", - " if utterance_value.id not in set_of_normal_comments:\n", - " count_normal_total += 1\n", - " set_of_normal_comments.add(utterance_value.id)\n", - "\n", - " if toxicity_val > 0.5:\n", - " if utterance_value.id not in set_of_toxic_comments:\n", - " count_toxic_total += 1\n", + " toxicity_val = utterance_value.meta['toxicity']\n", + " sever_toxicity_val = utterance_value.meta['sever_toxicity']\n", + " \n", + " #Find the total number of comments of each type\n", + " if (toxicity_val < 0.5 and sever_toxicity_val < 0.5):\n", + " if (utterance_value.id not in set_of_normal_comments):\n", + " count_normal_total +=1 \n", + " set_of_normal_comments.add(utterance_value.id) \n", + " \n", + " if (toxicity_val > 0.5):\n", + " if (utterance_value.id not in set_of_toxic_comments):\n", + " count_toxic_total +=1\n", " set_of_toxic_comments.add(utterance_value.id)\n", - "\n", - " if sever_toxicity_val > 0.5:\n", - " if utterance_value.id not in set_of_sever_comments:\n", - " count_sever_total += 1\n", + " \n", + " if (sever_toxicity_val > 0.5):\n", + " if (utterance_value.id not in set_of_sever_comments):\n", + " count_sever_total +=1\n", " set_of_sever_comments.add(utterance_value.id)\n", - "\n", - " # Find the time that the original utterance is posted\n", - " original_utterance = utterance_value.meta[\"original\"]\n", - " if original_utterance is not None:\n", - " original_time = original_utterance[\"timestamp\"]\n", + " \n", + " #Find the time that the original utterance is posted\n", + " original_utterance = utterance_value.meta['original']\n", + " if (original_utterance is not None):\n", + " original_time = original_utterance.timestamp\n", " original_date_time = datetime.fromtimestamp(original_time)\n", " else:\n", - " original_date_time = datetime.fromtimestamp(utterance_value.timestamp)\n", - "\n", - " # Count the number of deleted comments\n", - " if len(utterance_value.meta[\"deletion\"]) > 0:\n", - " deletion_list = utterance_value.meta[\"deletion\"]\n", - " ind_normal, ind_toxic, ind_sever = check_deletion_list_data(\n", - " deletion_list, original_date_time, timedelta_value\n", - " )\n", + " original_date_time = datetime.fromtimestamp(utterance_value.timestamp)\n", + " \n", + " \n", + " #Count the number of deleted comments \n", + " if (len(utterance_value.meta['deletion']) >0):\n", + " deletion_list = utterance_value.meta['deletion']\n", + " ind_normal, ind_toxic, ind_sever = check_deletion_list_data(deletion_list, original_date_time, timedelta_value)\n", " count_normal_deleted += ind_normal\n", " count_toxic_deleted += ind_toxic\n", " count_sever_deleted += ind_sever\n", + " \n", + " return (count_normal_deleted, count_toxic_deleted, count_sever_deleted, \n", + " count_normal_total, count_toxic_total, count_sever_total)\n", + "\n", + "\n", + "\n", "\n", - " return (\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " count_normal_total,\n", - " count_toxic_total,\n", - " count_sever_total,\n", - " )" + "\n" ] }, { @@ -260,21 +260,14 @@ "metadata": {}, "outputs": [], "source": [ - "def print_statistics(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - "):\n", - " prop_normal = count_normal_deleted / float(total_normal)\n", - " prop_toxic = count_toxic_deleted / float(total_toxic)\n", - " prop_sever = count_sever_deleted / float(total_sever)\n", + "def print_statistics(count_normal_deleted, count_toxic_deleted, count_sever_deleted, total_normal, total_toxic, total_sever):\n", + " prop_normal = count_normal_deleted/float(total_normal)\n", + " prop_toxic = count_toxic_deleted/float(total_toxic)\n", + " prop_sever = count_sever_deleted/float(total_sever)\n", "\n", - " print(\"Proportion of normal comments deleted: \" + str(prop_normal))\n", - " print(\"Proportion of toxic comments deleted: \" + str(prop_toxic))\n", - " print(\"Proportion of sever toxic comments deleted: \" + str(prop_sever))" + " print ('Proportion of normal comments deleted: ' + str(prop_normal)) \n", + " print ('Proportion of toxic comments deleted: ' + str(prop_toxic))\n", + " print ('Proportion of sever toxic comments deleted: ' + str(prop_sever)) " ] }, { @@ -300,28 +293,17 @@ } ], "source": [ - "# Set the default values we will need to compute the corpus statistics\n", + "#Set the default values we will need to compute the corpus statistics\n", "individual_utterance_list = list(wikiconv_corpus.iter_utterances())\n", "len_utterances = len(individual_utterance_list)\n", "timedelta_value = 1\n", "\n", - "# Find the counts of deleted comments and print statistics with a time delta of One Day\n", - "(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ") = get_deletion_counts(individual_utterance_list, timedelta_value)\n", - "print_statistics(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ")" + "#Find the counts of deleted comments and print statistics with a time delta of One Day\n", + "(count_normal_deleted, count_toxic_deleted, count_sever_deleted,\n", + " total_normal, total_toxic, total_sever) = get_deletion_counts(individual_utterance_list, timedelta_value)\n", + "print_statistics(count_normal_deleted, count_toxic_deleted, count_sever_deleted,\n", + " total_normal, total_toxic, total_sever)\n", + "\n" ] }, { @@ -348,22 +330,11 @@ ], "source": [ "timedelta_value = 7\n", - "(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ") = get_deletion_counts(individual_utterance_list, timedelta_value)\n", - "print_statistics(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ")" + "(count_normal_deleted, count_toxic_deleted, count_sever_deleted, \n", + " total_normal, total_toxic, total_sever) = get_deletion_counts(individual_utterance_list, timedelta_value)\n", + "print_statistics(count_normal_deleted, count_toxic_deleted, count_sever_deleted,\n", + " total_normal, total_toxic, total_sever)\n", + "\n" ] }, { @@ -390,22 +361,10 @@ ], "source": [ "timedelta_value = 30\n", - "(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ") = get_deletion_counts(individual_utterance_list, timedelta_value)\n", - "print_statistics(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ")" + "(count_normal_deleted, count_toxic_deleted, count_sever_deleted, \n", + " total_normal, total_toxic, total_sever) = get_deletion_counts(individual_utterance_list, timedelta_value)\n", + "print_statistics(count_normal_deleted, count_toxic_deleted, count_sever_deleted,\n", + " total_normal, total_toxic, total_sever)\n" ] }, { @@ -432,22 +391,10 @@ ], "source": [ "timedelta_value = 365\n", - "(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ") = get_deletion_counts(individual_utterance_list, timedelta_value)\n", - "print_statistics(\n", - " count_normal_deleted,\n", - " count_toxic_deleted,\n", - " count_sever_deleted,\n", - " total_normal,\n", - " total_toxic,\n", - " total_sever,\n", - ")" + "(count_normal_deleted, count_toxic_deleted, count_sever_deleted,\n", + " total_normal, total_toxic, total_sever) = get_deletion_counts(individual_utterance_list, timedelta_value)\n", + "print_statistics(count_normal_deleted, count_toxic_deleted, count_sever_deleted,\n", + " total_normal, total_toxic, total_sever)" ] } ], @@ -467,7 +414,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.7.2" } }, "nbformat": 4, diff --git a/examples/hyperconvo/hyperconvo_demo.ipynb b/examples/hyperconvo/hyperconvo_demo.ipynb index ee8a38ce..d46ef45d 100644 --- a/examples/hyperconvo/hyperconvo_demo.ipynb +++ b/examples/hyperconvo/hyperconvo_demo.ipynb @@ -14,7 +14,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -43,14 +43,14 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Dataset already exists at /Users/seanzhangkx/.convokit/downloads/reddit-corpus-small\n" + "Dataset already exists at /Users/calebchiam/Documents/GitHub/ConvoKit/convokit/tensors/reddit-corpus-small\n" ] } ], @@ -60,16 +60,19 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "ConvoKitMeta({'subreddit': 'reddit-corpus-small', 'num_posts': 8286, 'num_comments': 288846, 'num_user': 119889})" + "{'subreddit': 'reddit-corpus-small',\n", + " 'num_posts': 8286,\n", + " 'num_comments': 288846,\n", + " 'num_user': 119889}" ] }, - "execution_count": 4, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -80,7 +83,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -115,18 +118,16 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ - "top_level_utterance_ids = [\n", - " utt.id for utt in corpus.iter_utterances() if utt.id == utt.meta[\"top_level_comment\"]\n", - "]" + "top_level_utterance_ids = [utt.id for utt in corpus.iter_utterances() if utt.id == utt.meta['top_level_comment']]" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -135,7 +136,7 @@ "10000" ] }, - "execution_count": 7, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -146,16 +147,13 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ - "threads_corpus = corpus.reindex_conversations(\n", - " source_corpus=corpus,\n", - " new_convo_roots=top_level_utterance_ids,\n", - " preserve_convo_meta=True,\n", - " preserve_corpus_meta=False,\n", - ")" + "threads_corpus = corpus.reindex_conversations(new_convo_roots=top_level_utterance_ids, \n", + " preserve_convo_meta=True,\n", + " preserve_corpus_meta=False)" ] }, { @@ -167,27 +165,27 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Utterance({'obj_type': 'utterance', 'vectors': [], 'speaker_': Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'Prestonurvagi', 'meta': ConvoKitMeta({'num_posts': 0, 'num_comments': 1})}), 'owner': , 'id': 'e63uqhy', 'meta': ConvoKitMeta({'score': 1, 'top_level_comment': 'e63uqhy', 'retrieved_on': 1539133743, 'gilded': 0, 'gildings': {'gid_1': 0, 'gid_2': 0, 'gid_3': 0}, 'subreddit': 'sex', 'stickied': False, 'permalink': '/r/sex/comments/9gfh93/first_day_of_my_menstrual_cycle_and_im_wanting/e63uqhy/', 'author_flair_text': ''})})" + "Utterance({'obj_type': 'utterance', '_owner': , 'meta': {'score': 4091, 'top_level_comment': None, 'retrieved_on': 1540057333, 'gilded': 0, 'gildings': {'gid_1': 0, 'gid_2': 0, 'gid_3': 0}, 'subreddit': 'tifu', 'stickied': False, 'permalink': '/r/tifu/comments/9bzh9g/tifu_by_masturbating_with_my_dads_penis/', 'author_flair_text': ''}, '_id': '9bzh9g', 'vectors': [], 'speaker': Speaker({'obj_type': 'speaker', '_owner': , 'meta': {'num_posts': 1, 'num_comments': 0}, '_id': 'gerbalt', 'vectors': []}), 'conversation_id': '9bzh9g', '_root': '9bzh9g', 'reply_to': None, 'timestamp': 1535767318, 'text': '[removed]'})" ] }, - "execution_count": 9, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "corpus.random_utterance()" + "corpus.get_utterance('9bzh9g')" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -206,7 +204,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": {}, "outputs": [ { @@ -240,16 +238,16 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ - "demo_threads = [\"e57u6ft\", \"e56rtrx\"]" + "demo_threads = ['e57u6ft', 'e56rtrx']" ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 14, "metadata": {}, "outputs": [ { @@ -305,12 +303,12 @@ } ], "source": [ - "threads_corpus.get_conversation(\"e57u6ft\").print_conversation_structure()" + "threads_corpus.get_conversation('e57u6ft').print_conversation_structure()" ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -335,12 +333,12 @@ } ], "source": [ - "threads_corpus.get_conversation(\"e56rtrx\").print_conversation_structure()" + "threads_corpus.get_conversation('e56rtrx').print_conversation_structure()" ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 16, "metadata": {}, "outputs": [ { @@ -365,7 +363,7 @@ } ], "source": [ - "threads_corpus.get_conversation(\"e56rtrx\").print_conversation_structure(lambda utt: utt.text)" + "threads_corpus.get_conversation('e56rtrx').print_conversation_structure(lambda utt: utt.text)" ] }, { @@ -384,16 +382,16 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 17, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 16, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -401,7 +399,7 @@ "source": [ "# create a hyperconvo object and use it to extract features\n", "# Limit our analysis to the first 10 comments of threads with at least 10 comments\n", - "hc = HyperConvo(prefix_len=10, min_convo_len=10)\n", + "hc = HyperConvo(prefix_len=10, min_convo_len=10) \n", "hc.transform(threads_corpus)" ] }, @@ -414,7 +412,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 18, "metadata": {}, "outputs": [ { @@ -423,7 +421,7 @@ "{'hyperconvo'}" ] }, - "execution_count": 17, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -434,47 +432,47 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 19, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "e5hm9mp\n" + "e6p7yrp\n" ] } ], "source": [ "# Let's look at a (valid) Conversation that has a HyperConvo vector computed for it\n", - "convo1 = next(threads_corpus.iter_conversations(lambda convo: convo.has_vector(\"hyperconvo\")))\n", + "convo1 = next(threads_corpus.iter_conversations(lambda convo: convo.has_vector('hyperconvo')))\n", "print(convo1.id)" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 21, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "<1x140 sparse matrix of type ''\n", - "\twith 132 stored elements in Compressed Sparse Row format>" + "\twith 130 stored elements in Compressed Sparse Row format>" ] }, - "execution_count": 19, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "convo1.get_vector(\"hyperconvo\")" + "convo1.get_vector('hyperconvo')" ] }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 23, "metadata": {}, "outputs": [ { @@ -498,714 +496,717 @@ " \n", " \n", " \n", - " max[indegree over c->c responses]\n", - " argmax[indegree over c->c responses]\n", - " norm.max[indegree over c->c responses]\n", - " 2nd-largest[indegree over c->c responses]\n", + " 2nd-argmax[indegree over C->C mid-thread responses]\n", + " 2nd-argmax[indegree over C->C responses]\n", + " 2nd-argmax[indegree over C->c mid-thread responses]\n", + " 2nd-argmax[indegree over C->c responses]\n", + " 2nd-argmax[indegree over c->c mid-thread responses]\n", " 2nd-argmax[indegree over c->c responses]\n", - " norm.2nd-largest[indegree over c->c responses]\n", - " mean[indegree over c->c responses]\n", - " mean-nonzero[indegree over c->c responses]\n", - " prop-nonzero[indegree over c->c responses]\n", - " prop-multiple[indegree over c->c responses]\n", - " entropy[indegree over c->c responses]\n", - " 2nd-largest / max[indegree over c->c responses]\n", - " max[outdegree over C->c responses]\n", - " max[indegree over C->c responses]\n", - " argmax[outdegree over C->c responses]\n", - " argmax[indegree over C->c responses]\n", - " norm.max[outdegree over C->c responses]\n", - " norm.max[indegree over C->c responses]\n", - " 2nd-largest[outdegree over C->c responses]\n", - " 2nd-largest[indegree over C->c responses]\n", + " 2nd-argmax[outdegree over C->C mid-thread responses]\n", + " 2nd-argmax[outdegree over C->C responses]\n", + " 2nd-argmax[outdegree over C->c mid-thread responses]\n", " 2nd-argmax[outdegree over C->c responses]\n", - " 2nd-argmax[indegree over C->c responses]\n", - " norm.2nd-largest[outdegree over C->c responses]\n", - " norm.2nd-largest[indegree over C->c responses]\n", - " mean[outdegree over C->c responses]\n", - " mean[indegree over C->c responses]\n", - " mean-nonzero[outdegree over C->c responses]\n", - " mean-nonzero[indegree over C->c responses]\n", - " prop-nonzero[outdegree over C->c responses]\n", - " prop-nonzero[indegree over C->c responses]\n", - " prop-multiple[outdegree over C->c responses]\n", - " prop-multiple[indegree over C->c responses]\n", - " entropy[outdegree over C->c responses]\n", - " entropy[indegree over C->c responses]\n", - " 2nd-largest / max[outdegree over C->c responses]\n", + " 2nd-largest / max[indegree over C->C mid-thread responses]\n", + " 2nd-largest / max[indegree over C->C responses]\n", + " 2nd-largest / max[indegree over C->c mid-thread responses]\n", " 2nd-largest / max[indegree over C->c responses]\n", - " max[outdegree over C->C responses]\n", - " max[indegree over C->C responses]\n", - " argmax[outdegree over C->C responses]\n", - " argmax[indegree over C->C responses]\n", - " norm.max[outdegree over C->C responses]\n", - " norm.max[indegree over C->C responses]\n", - " 2nd-largest[outdegree over C->C responses]\n", - " 2nd-largest[indegree over C->C responses]\n", - " 2nd-argmax[outdegree over C->C responses]\n", - " 2nd-argmax[indegree over C->C responses]\n", - " norm.2nd-largest[outdegree over C->C responses]\n", - " norm.2nd-largest[indegree over C->C responses]\n", - " mean[outdegree over C->C responses]\n", - " mean[indegree over C->C responses]\n", - " mean-nonzero[outdegree over C->C responses]\n", - " mean-nonzero[indegree over C->C responses]\n", - " prop-nonzero[outdegree over C->C responses]\n", - " prop-nonzero[indegree over C->C responses]\n", - " prop-multiple[outdegree over C->C responses]\n", - " prop-multiple[indegree over C->C responses]\n", - " entropy[outdegree over C->C responses]\n", - " entropy[indegree over C->C responses]\n", + " 2nd-largest / max[indegree over c->c mid-thread responses]\n", + " 2nd-largest / max[indegree over c->c responses]\n", + " 2nd-largest / max[outdegree over C->C mid-thread responses]\n", " 2nd-largest / max[outdegree over C->C responses]\n", - " 2nd-largest / max[indegree over C->C responses]\n", - " is-present[reciprocity motif]\n", - " count[reciprocity motif]\n", - " is-present[external reciprocity motif]\n", - " count[external reciprocity motif]\n", - " is-present[dyadic interaction motif]\n", + " 2nd-largest / max[outdegree over C->c mid-thread responses]\n", + " 2nd-largest / max[outdegree over C->c responses]\n", + " 2nd-largest[indegree over C->C mid-thread responses]\n", + " 2nd-largest[indegree over C->C responses]\n", + " 2nd-largest[indegree over C->c mid-thread responses]\n", + " 2nd-largest[indegree over C->c responses]\n", + " 2nd-largest[indegree over c->c mid-thread responses]\n", + " 2nd-largest[indegree over c->c responses]\n", + " 2nd-largest[outdegree over C->C mid-thread responses]\n", + " 2nd-largest[outdegree over C->C responses]\n", + " 2nd-largest[outdegree over C->c mid-thread responses]\n", + " 2nd-largest[outdegree over C->c responses]\n", + " argmax[indegree over C->C mid-thread responses]\n", + " argmax[indegree over C->C responses]\n", + " argmax[indegree over C->c mid-thread responses]\n", + " argmax[indegree over C->c responses]\n", + " argmax[indegree over c->c mid-thread responses]\n", + " argmax[indegree over c->c responses]\n", + " argmax[outdegree over C->C mid-thread responses]\n", + " argmax[outdegree over C->C responses]\n", + " argmax[outdegree over C->c mid-thread responses]\n", + " argmax[outdegree over C->c responses]\n", + " count[dyadic interaction motif over mid-thread]\n", " count[dyadic interaction motif]\n", - " is-present[incoming triads]\n", + " count[external reciprocity motif over mid-thread]\n", + " count[external reciprocity motif]\n", + " count[incoming triads over mid-thread]\n", " count[incoming triads]\n", - " is-present[outgoing triads]\n", + " count[outgoing triads over mid-thread]\n", " count[outgoing triads]\n", - " max[indegree over c->c mid-thread responses]\n", - " argmax[indegree over c->c mid-thread responses]\n", - " norm.max[indegree over c->c mid-thread responses]\n", - " 2nd-largest[indegree over c->c mid-thread responses]\n", - " 2nd-argmax[indegree over c->c mid-thread responses]\n", - " norm.2nd-largest[indegree over c->c mid-thread responses]\n", - " mean[indegree over c->c mid-thread responses]\n", - " mean-nonzero[indegree over c->c mid-thread responses]\n", - " prop-nonzero[indegree over c->c mid-thread responses]\n", - " prop-multiple[indegree over c->c mid-thread responses]\n", - " entropy[indegree over c->c mid-thread responses]\n", - " 2nd-largest / max[indegree over c->c mid-thread responses]\n", - " max[outdegree over C->c mid-thread responses]\n", - " max[indegree over C->c mid-thread responses]\n", - " argmax[outdegree over C->c mid-thread responses]\n", - " argmax[indegree over C->c mid-thread responses]\n", - " norm.max[outdegree over C->c mid-thread responses]\n", - " norm.max[indegree over C->c mid-thread responses]\n", - " 2nd-largest[outdegree over C->c mid-thread responses]\n", - " 2nd-largest[indegree over C->c mid-thread responses]\n", - " 2nd-argmax[outdegree over C->c mid-thread responses]\n", - " 2nd-argmax[indegree over C->c mid-thread responses]\n", - " norm.2nd-largest[outdegree over C->c mid-thread responses]\n", - " norm.2nd-largest[indegree over C->c mid-thread responses]\n", - " mean[outdegree over C->c mid-thread responses]\n", - " mean[indegree over C->c mid-thread responses]\n", - " mean-nonzero[outdegree over C->c mid-thread responses]\n", - " mean-nonzero[indegree over C->c mid-thread responses]\n", - " prop-nonzero[outdegree over C->c mid-thread responses]\n", - " prop-nonzero[indegree over C->c mid-thread responses]\n", - " prop-multiple[outdegree over C->c mid-thread responses]\n", - " prop-multiple[indegree over C->c mid-thread responses]\n", - " entropy[outdegree over C->c mid-thread responses]\n", + " count[reciprocity motif over mid-thread]\n", + " count[reciprocity motif]\n", + " entropy[indegree over C->C mid-thread responses]\n", + " entropy[indegree over C->C responses]\n", " entropy[indegree over C->c mid-thread responses]\n", - " 2nd-largest / max[outdegree over C->c mid-thread responses]\n", - " 2nd-largest / max[indegree over C->c mid-thread responses]\n", - " max[outdegree over C->C mid-thread responses]\n", - " max[indegree over C->C mid-thread responses]\n", - " argmax[outdegree over C->C mid-thread responses]\n", - " argmax[indegree over C->C mid-thread responses]\n", - " norm.max[outdegree over C->C mid-thread responses]\n", - " norm.max[indegree over C->C mid-thread responses]\n", - " 2nd-largest[outdegree over C->C mid-thread responses]\n", - " 2nd-largest[indegree over C->C mid-thread responses]\n", - " 2nd-argmax[outdegree over C->C mid-thread responses]\n", - " 2nd-argmax[indegree over C->C mid-thread responses]\n", - " norm.2nd-largest[outdegree over C->C mid-thread responses]\n", - " norm.2nd-largest[indegree over C->C mid-thread responses]\n", - " mean[outdegree over C->C mid-thread responses]\n", - " mean[indegree over C->C mid-thread responses]\n", - " mean-nonzero[outdegree over C->C mid-thread responses]\n", - " mean-nonzero[indegree over C->C mid-thread responses]\n", - " prop-nonzero[outdegree over C->C mid-thread responses]\n", - " prop-nonzero[indegree over C->C mid-thread responses]\n", - " prop-multiple[outdegree over C->C mid-thread responses]\n", - " prop-multiple[indegree over C->C mid-thread responses]\n", + " entropy[indegree over C->c responses]\n", + " entropy[indegree over c->c mid-thread responses]\n", + " entropy[indegree over c->c responses]\n", " entropy[outdegree over C->C mid-thread responses]\n", - " entropy[indegree over C->C mid-thread responses]\n", - " 2nd-largest / max[outdegree over C->C mid-thread responses]\n", - " 2nd-largest / max[indegree over C->C mid-thread responses]\n", - " is-present[reciprocity motif over mid-thread]\n", - " count[reciprocity motif over mid-thread]\n", - " is-present[external reciprocity motif over mid-thread]\n", - " count[external reciprocity motif over mid-thread]\n", + " entropy[outdegree over C->C responses]\n", + " entropy[outdegree over C->c mid-thread responses]\n", + " entropy[outdegree over C->c responses]\n", " is-present[dyadic interaction motif over mid-thread]\n", - " count[dyadic interaction motif over mid-thread]\n", + " is-present[dyadic interaction motif]\n", + " is-present[external reciprocity motif over mid-thread]\n", + " is-present[external reciprocity motif]\n", " is-present[incoming triads over mid-thread]\n", - " count[incoming triads over mid-thread]\n", + " is-present[incoming triads]\n", " is-present[outgoing triads over mid-thread]\n", - " count[outgoing triads over mid-thread]\n", - " \n", - " \n", - " \n", - " \n", - " e5hm9mp\n", - " 3.0\n", - " 1.0\n", - " 0.333333\n", - " 1.0\n", - " 0.0\n", - " 0.111111\n", - " 0.9\n", - " 1.285714\n", - " 0.7\n", - " 0.142857\n", - " 1.83102\n", - " 0.333333\n", - " 2.0\n", - " 3.0\n", - " 0.0\n", - " 1.0\n", - " 0.222222\n", - " 0.333333\n", - " 2.0\n", - " 1.0\n", - " 2.0\n", - " 0.0\n", - " 0.222222\n", - " 0.111111\n", - " 1.8\n", - " 0.9\n", - " 1.8\n", - " 1.285714\n", - " 1.0\n", - " 0.7\n", - " 0.8\n", - " 0.142857\n", - " 1.581094\n", - " 1.83102\n", - " 1.0\n", - " 0.333333\n", - " 2.0\n", - " 3.0\n", + " is-present[outgoing triads]\n", + " is-present[reciprocity motif over mid-thread]\n", + " is-present[reciprocity motif]\n", + " max[indegree over C->C mid-thread responses]\n", + " max[indegree over C->C responses]\n", + " max[indegree over C->c mid-thread responses]\n", + " max[indegree over C->c responses]\n", + " max[indegree over c->c mid-thread responses]\n", + " max[indegree over c->c responses]\n", + " max[outdegree over C->C mid-thread responses]\n", + " max[outdegree over C->C responses]\n", + " max[outdegree over C->c mid-thread responses]\n", + " max[outdegree over C->c responses]\n", + " mean-nonzero[indegree over C->C mid-thread responses]\n", + " mean-nonzero[indegree over C->C responses]\n", + " mean-nonzero[indegree over C->c mid-thread responses]\n", + " mean-nonzero[indegree over C->c responses]\n", + " mean-nonzero[indegree over c->c mid-thread responses]\n", + " mean-nonzero[indegree over c->c responses]\n", + " mean-nonzero[outdegree over C->C mid-thread responses]\n", + " mean-nonzero[outdegree over C->C responses]\n", + " mean-nonzero[outdegree over C->c mid-thread responses]\n", + " mean-nonzero[outdegree over C->c responses]\n", + " mean[indegree over C->C mid-thread responses]\n", + " mean[indegree over C->C responses]\n", + " mean[indegree over C->c mid-thread responses]\n", + " mean[indegree over C->c responses]\n", + " mean[indegree over c->c mid-thread responses]\n", + " mean[indegree over c->c responses]\n", + " mean[outdegree over C->C mid-thread responses]\n", + " mean[outdegree over C->C responses]\n", + " mean[outdegree over C->c mid-thread responses]\n", + " mean[outdegree over C->c responses]\n", + " norm.2nd-largest[indegree over C->C mid-thread responses]\n", + " norm.2nd-largest[indegree over C->C responses]\n", + " norm.2nd-largest[indegree over C->c mid-thread responses]\n", + " norm.2nd-largest[indegree over C->c responses]\n", + " norm.2nd-largest[indegree over c->c mid-thread responses]\n", + " norm.2nd-largest[indegree over c->c responses]\n", + " norm.2nd-largest[outdegree over C->C mid-thread responses]\n", + " norm.2nd-largest[outdegree over C->C responses]\n", + " norm.2nd-largest[outdegree over C->c mid-thread responses]\n", + " norm.2nd-largest[outdegree over C->c responses]\n", + " norm.max[indegree over C->C mid-thread responses]\n", + " norm.max[indegree over C->C responses]\n", + " norm.max[indegree over C->c mid-thread responses]\n", + " norm.max[indegree over C->c responses]\n", + " norm.max[indegree over c->c mid-thread responses]\n", + " norm.max[indegree over c->c responses]\n", + " norm.max[outdegree over C->C mid-thread responses]\n", + " norm.max[outdegree over C->C responses]\n", + " norm.max[outdegree over C->c mid-thread responses]\n", + " norm.max[outdegree over C->c responses]\n", + " prop-multiple[indegree over C->C mid-thread responses]\n", + " prop-multiple[indegree over C->C responses]\n", + " prop-multiple[indegree over C->c mid-thread responses]\n", + " prop-multiple[indegree over C->c responses]\n", + " prop-multiple[indegree over c->c mid-thread responses]\n", + " prop-multiple[indegree over c->c responses]\n", + " prop-multiple[outdegree over C->C mid-thread responses]\n", + " prop-multiple[outdegree over C->C responses]\n", + " prop-multiple[outdegree over C->c mid-thread responses]\n", + " prop-multiple[outdegree over C->c responses]\n", + " prop-nonzero[indegree over C->C mid-thread responses]\n", + " prop-nonzero[indegree over C->C responses]\n", + " prop-nonzero[indegree over C->c mid-thread responses]\n", + " prop-nonzero[indegree over C->c responses]\n", + " prop-nonzero[indegree over c->c mid-thread responses]\n", + " prop-nonzero[indegree over c->c responses]\n", + " prop-nonzero[outdegree over C->C mid-thread responses]\n", + " prop-nonzero[outdegree over C->C responses]\n", + " prop-nonzero[outdegree over C->c mid-thread responses]\n", + " prop-nonzero[outdegree over C->c responses]\n", + " \n", + " \n", + " \n", + " \n", + " e6p7yrp\n", " 0.0\n", " 1.0\n", - " 0.25\n", - " 0.375\n", - " 2.0\n", - " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 2.0\n", - " 0.0\n", - " 0.25\n", - " 0.25\n", - " 1.6\n", - " 1.6\n", - " 1.6\n", - " 1.6\n", " 1.0\n", + " 2.0\n", + " 3.0\n", + " 0.333333\n", + " 0.2\n", " 1.0\n", - " 0.6\n", - " 0.4\n", - " 1.559581\n", - " 1.494175\n", + " 0.333333\n", " 1.0\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", " 0.666667\n", " 1.0\n", - " 4.0\n", " 1.0\n", - " 4.0\n", " 1.0\n", - " 3.0\n", " 1.0\n", - " 4.0\n", " 1.0\n", - " 3.0\n", - " 3.0\n", - " 0.0\n", - " 0.375\n", " 1.0\n", " 1.0\n", - " 0.125\n", - " 0.888889\n", - " 1.333333\n", - " 0.666667\n", - " 0.166667\n", - " 1.667462\n", - " 0.333333\n", - " 2.0\n", - " 3.0\n", " 1.0\n", - " 0.0\n", - " 0.25\n", - " 0.375\n", - " 2.0\n", " 1.0\n", " 2.0\n", " 1.0\n", - " 0.25\n", - " 0.125\n", - " 1.6\n", - " 0.888889\n", - " 2.0\n", - " 1.333333\n", - " 0.8\n", - " 0.666667\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " 1.0\n", - " 0.166667\n", - " 1.386294\n", - " 1.667462\n", + " 0.0\n", " 1.0\n", - " 0.333333\n", + " 0.0\n", " 2.0\n", " 3.0\n", - " 1.0\n", - " 0.0\n", - " 0.285714\n", - " 0.428571\n", " 2.0\n", - " 1.0\n", " 2.0\n", + " 3.0\n", + " 10.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 4.0\n", + " 1.242453\n", + " 1.073543\n", + " 1.791759\n", + " 1.83102\n", + " 1.791759\n", + " 1.83102\n", + " 1.242453\n", + " 1.667462\n", + " 1.242453\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 1.0\n", - " 0.285714\n", - " 0.142857\n", - " 1.4\n", - " 1.4\n", - " 1.75\n", - " 1.4\n", - " 0.8\n", " 1.0\n", - " 0.75\n", - " 0.2\n", - " 1.351784\n", - " 1.475076\n", " 1.0\n", - " 0.333333\n", " 1.0\n", - " 3.0\n", " 1.0\n", - " 2.0\n", " 1.0\n", - " 2.0\n", + " 1.0\n", + " 3.0\n", + " 5.0\n", " 1.0\n", " 3.0\n", " 1.0\n", " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 1.5\n", + " 2.0\n", + " 1.0\n", + " 1.285714\n", + " 1.0\n", + " 1.285714\n", + " 1.5\n", + " 1.333333\n", + " 1.5\n", + " 1.5\n", + " 1.0\n", + " 1.333333\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 1.0\n", + " 1.333333\n", + " 1.0\n", + " 1.5\n", + " 0.166667\n", + " 0.125\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.125\n", + " 0.166667\n", + " 0.222222\n", + " 0.5\n", + " 0.625\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.5\n", + " 0.375\n", + " 0.5\n", + " 0.333333\n", + " 0.25\n", + " 0.25\n", + " 0.0\n", + " 0.142857\n", + " 0.0\n", + " 0.142857\n", + " 0.25\n", + " 0.166667\n", + " 0.25\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 1.0\n", + " 0.666667\n", + " 1.0\n", " \n", " \n", "\n", "" ], "text/plain": [ - " max[indegree over c->c responses] \\\n", - "e5hm9mp 3.0 \n", + " 2nd-argmax[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " argmax[indegree over c->c responses] \\\n", - "e5hm9mp 1.0 \n", + " 2nd-argmax[indegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " norm.max[indegree over c->c responses] \\\n", - "e5hm9mp 0.333333 \n", + " 2nd-argmax[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " 2nd-largest[indegree over c->c responses] \\\n", - "e5hm9mp 1.0 \n", + " 2nd-argmax[indegree over C->c responses] \\\n", + "e6p7yrp 1.0 \n", + "\n", + " 2nd-argmax[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", " 2nd-argmax[indegree over c->c responses] \\\n", - "e5hm9mp 0.0 \n", + "e6p7yrp 1.0 \n", "\n", - " norm.2nd-largest[indegree over c->c responses] \\\n", - "e5hm9mp 0.111111 \n", + " 2nd-argmax[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 2.0 \n", "\n", - " mean[indegree over c->c responses] \\\n", - "e5hm9mp 0.9 \n", + " 2nd-argmax[outdegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " mean-nonzero[indegree over c->c responses] \\\n", - "e5hm9mp 1.285714 \n", + " 2nd-argmax[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 2.0 \n", "\n", - " prop-nonzero[indegree over c->c responses] \\\n", - "e5hm9mp 0.7 \n", + " 2nd-argmax[outdegree over C->c responses] \\\n", + "e6p7yrp 3.0 \n", "\n", - " prop-multiple[indegree over c->c responses] \\\n", - "e5hm9mp 0.142857 \n", - "\n", - " entropy[indegree over c->c responses] \\\n", - "e5hm9mp 1.83102 \n", - "\n", - " 2nd-largest / max[indegree over c->c responses] \\\n", - "e5hm9mp 0.333333 \n", + " 2nd-largest / max[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " max[outdegree over C->c responses] \\\n", - "e5hm9mp 2.0 \n", + " 2nd-largest / max[indegree over C->C responses] \\\n", + "e6p7yrp 0.2 \n", "\n", - " max[indegree over C->c responses] \\\n", - "e5hm9mp 3.0 \n", + " 2nd-largest / max[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " argmax[outdegree over C->c responses] \\\n", - "e5hm9mp 0.0 \n", + " 2nd-largest / max[indegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " argmax[indegree over C->c responses] \\\n", - "e5hm9mp 1.0 \n", + " 2nd-largest / max[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " norm.max[outdegree over C->c responses] \\\n", - "e5hm9mp 0.222222 \n", + " 2nd-largest / max[indegree over c->c responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " norm.max[indegree over C->c responses] \\\n", - "e5hm9mp 0.333333 \n", + " 2nd-largest / max[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " 2nd-largest[outdegree over C->c responses] \\\n", - "e5hm9mp 2.0 \n", + " 2nd-largest / max[outdegree over C->C responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " 2nd-largest[indegree over C->c responses] \\\n", - "e5hm9mp 1.0 \n", + " 2nd-largest / max[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " 2nd-argmax[outdegree over C->c responses] \\\n", - "e5hm9mp 2.0 \n", + " 2nd-largest / max[outdegree over C->c responses] \\\n", + "e6p7yrp 0.666667 \n", "\n", - " 2nd-argmax[indegree over C->c responses] \\\n", - "e5hm9mp 0.0 \n", + " 2nd-largest[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " norm.2nd-largest[outdegree over C->c responses] \\\n", - "e5hm9mp 0.222222 \n", + " 2nd-largest[indegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " norm.2nd-largest[indegree over C->c responses] \\\n", - "e5hm9mp 0.111111 \n", + " 2nd-largest[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " mean[outdegree over C->c responses] \\\n", - "e5hm9mp 1.8 \n", + " 2nd-largest[indegree over C->c responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " mean[indegree over C->c responses] \\\n", - "e5hm9mp 0.9 \n", + " 2nd-largest[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " mean-nonzero[outdegree over C->c responses] \\\n", - "e5hm9mp 1.8 \n", + " 2nd-largest[indegree over c->c responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " mean-nonzero[indegree over C->c responses] \\\n", - "e5hm9mp 1.285714 \n", + " 2nd-largest[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " prop-nonzero[outdegree over C->c responses] \\\n", - "e5hm9mp 1.0 \n", + " 2nd-largest[outdegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " prop-nonzero[indegree over C->c responses] \\\n", - "e5hm9mp 0.7 \n", + " 2nd-largest[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " prop-multiple[outdegree over C->c responses] \\\n", - "e5hm9mp 0.8 \n", + " 2nd-largest[outdegree over C->c responses] \\\n", + "e6p7yrp 2.0 \n", "\n", - " prop-multiple[indegree over C->c responses] \\\n", - "e5hm9mp 0.142857 \n", + " argmax[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " entropy[outdegree over C->c responses] \\\n", - "e5hm9mp 1.581094 \n", + " argmax[indegree over C->C responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " entropy[indegree over C->c responses] \\\n", - "e5hm9mp 1.83102 \n", + " argmax[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " 2nd-largest / max[outdegree over C->c responses] \\\n", - "e5hm9mp 1.0 \n", + " argmax[indegree over C->c responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " 2nd-largest / max[indegree over C->c responses] \\\n", - "e5hm9mp 0.333333 \n", + " argmax[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " max[outdegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", + " argmax[indegree over c->c responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " max[indegree over C->C responses] \\\n", - "e5hm9mp 3.0 \n", + " argmax[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", " argmax[outdegree over C->C responses] \\\n", - "e5hm9mp 0.0 \n", + "e6p7yrp 0.0 \n", "\n", - " argmax[indegree over C->C responses] \\\n", - "e5hm9mp 1.0 \n", - "\n", - " norm.max[outdegree over C->C responses] \\\n", - "e5hm9mp 0.25 \n", + " argmax[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " norm.max[indegree over C->C responses] \\\n", - "e5hm9mp 0.375 \n", + " argmax[outdegree over C->c responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " 2nd-largest[outdegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", + " count[dyadic interaction motif over mid-thread] \\\n", + "e6p7yrp 2.0 \n", "\n", - " 2nd-largest[indegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", + " count[dyadic interaction motif] \\\n", + "e6p7yrp 3.0 \n", "\n", - " 2nd-argmax[outdegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", + " count[external reciprocity motif over mid-thread] \\\n", + "e6p7yrp 2.0 \n", "\n", - " 2nd-argmax[indegree over C->C responses] \\\n", - "e5hm9mp 0.0 \n", + " count[external reciprocity motif] \\\n", + "e6p7yrp 2.0 \n", "\n", - " norm.2nd-largest[outdegree over C->C responses] \\\n", - "e5hm9mp 0.25 \n", + " count[incoming triads over mid-thread] count[incoming triads] \\\n", + "e6p7yrp 3.0 10.0 \n", "\n", - " norm.2nd-largest[indegree over C->C responses] \\\n", - "e5hm9mp 0.25 \n", + " count[outgoing triads over mid-thread] count[outgoing triads] \\\n", + "e6p7yrp 3.0 3.0 \n", "\n", - " mean[outdegree over C->C responses] \\\n", - "e5hm9mp 1.6 \n", + " count[reciprocity motif over mid-thread] count[reciprocity motif] \\\n", + "e6p7yrp 2.0 4.0 \n", "\n", - " mean[indegree over C->C responses] \\\n", - "e5hm9mp 1.6 \n", + " entropy[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.242453 \n", "\n", - " mean-nonzero[outdegree over C->C responses] \\\n", - "e5hm9mp 1.6 \n", + " entropy[indegree over C->C responses] \\\n", + "e6p7yrp 1.073543 \n", "\n", - " mean-nonzero[indegree over C->C responses] \\\n", - "e5hm9mp 1.6 \n", + " entropy[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.791759 \n", "\n", - " prop-nonzero[outdegree over C->C responses] \\\n", - "e5hm9mp 1.0 \n", + " entropy[indegree over C->c responses] \\\n", + "e6p7yrp 1.83102 \n", "\n", - " prop-nonzero[indegree over C->C responses] \\\n", - "e5hm9mp 1.0 \n", + " entropy[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.791759 \n", "\n", - " prop-multiple[outdegree over C->C responses] \\\n", - "e5hm9mp 0.6 \n", + " entropy[indegree over c->c responses] \\\n", + "e6p7yrp 1.83102 \n", "\n", - " prop-multiple[indegree over C->C responses] \\\n", - "e5hm9mp 0.4 \n", + " entropy[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.242453 \n", "\n", " entropy[outdegree over C->C responses] \\\n", - "e5hm9mp 1.559581 \n", - "\n", - " entropy[indegree over C->C responses] \\\n", - "e5hm9mp 1.494175 \n", + "e6p7yrp 1.667462 \n", "\n", - " 2nd-largest / max[outdegree over C->C responses] \\\n", - "e5hm9mp 1.0 \n", + " entropy[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.242453 \n", "\n", - " 2nd-largest / max[indegree over C->C responses] \\\n", - "e5hm9mp 0.666667 \n", + " entropy[outdegree over C->c responses] \\\n", + "e6p7yrp 1.676988 \n", "\n", - " is-present[reciprocity motif] count[reciprocity motif] \\\n", - "e5hm9mp 1.0 4.0 \n", + " is-present[dyadic interaction motif over mid-thread] \\\n", + "e6p7yrp 1.0 \n", "\n", - " is-present[external reciprocity motif] \\\n", - "e5hm9mp 1.0 \n", + " is-present[dyadic interaction motif] \\\n", + "e6p7yrp 1.0 \n", "\n", - " count[external reciprocity motif] \\\n", - "e5hm9mp 4.0 \n", + " is-present[external reciprocity motif over mid-thread] \\\n", + "e6p7yrp 1.0 \n", "\n", - " is-present[dyadic interaction motif] \\\n", - "e5hm9mp 1.0 \n", + " is-present[external reciprocity motif] \\\n", + "e6p7yrp 1.0 \n", "\n", - " count[dyadic interaction motif] is-present[incoming triads] \\\n", - "e5hm9mp 3.0 1.0 \n", + " is-present[incoming triads over mid-thread] \\\n", + "e6p7yrp 1.0 \n", "\n", - " count[incoming triads] is-present[outgoing triads] \\\n", - "e5hm9mp 4.0 1.0 \n", + " is-present[incoming triads] \\\n", + "e6p7yrp 1.0 \n", "\n", - " count[outgoing triads] max[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 3.0 3.0 \n", + " is-present[outgoing triads over mid-thread] \\\n", + "e6p7yrp 1.0 \n", "\n", - " argmax[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.0 \n", + " is-present[outgoing triads] \\\n", + "e6p7yrp 1.0 \n", "\n", - " norm.max[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.375 \n", + " is-present[reciprocity motif over mid-thread] \\\n", + "e6p7yrp 1.0 \n", "\n", - " 2nd-largest[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " is-present[reciprocity motif] \\\n", + "e6p7yrp 1.0 \n", "\n", - " 2nd-argmax[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " max[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 3.0 \n", "\n", - " norm.2nd-largest[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.125 \n", + " max[indegree over C->C responses] \\\n", + "e6p7yrp 5.0 \n", "\n", - " mean[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.888889 \n", + " max[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " mean-nonzero[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.333333 \n", + " max[indegree over C->c responses] \\\n", + "e6p7yrp 3.0 \n", "\n", - " prop-nonzero[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.666667 \n", + " max[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " prop-multiple[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.166667 \n", + " max[indegree over c->c responses] \\\n", + "e6p7yrp 3.0 \n", "\n", - " entropy[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.667462 \n", + " max[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 3.0 \n", "\n", - " 2nd-largest / max[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.333333 \n", + " max[outdegree over C->C responses] \\\n", + "e6p7yrp 3.0 \n", "\n", " max[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", + "e6p7yrp 3.0 \n", "\n", - " max[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 3.0 \n", + " max[outdegree over C->c responses] \\\n", + "e6p7yrp 3.0 \n", "\n", - " argmax[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " mean-nonzero[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.5 \n", "\n", - " argmax[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.0 \n", + " mean-nonzero[indegree over C->C responses] \\\n", + "e6p7yrp 2.0 \n", "\n", - " norm.max[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.25 \n", + " mean-nonzero[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " norm.max[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.375 \n", + " mean-nonzero[indegree over C->c responses] \\\n", + "e6p7yrp 1.285714 \n", "\n", - " 2nd-largest[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", + " mean-nonzero[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " 2nd-largest[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " mean-nonzero[indegree over c->c responses] \\\n", + "e6p7yrp 1.285714 \n", "\n", - " 2nd-argmax[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", + " mean-nonzero[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.5 \n", "\n", - " 2nd-argmax[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " mean-nonzero[outdegree over C->C responses] \\\n", + "e6p7yrp 1.333333 \n", "\n", - " norm.2nd-largest[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.25 \n", + " mean-nonzero[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.5 \n", "\n", - " norm.2nd-largest[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.125 \n", + " mean-nonzero[outdegree over C->c responses] \\\n", + "e6p7yrp 1.5 \n", "\n", - " mean[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.6 \n", + " mean[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "\n", + " mean[indegree over C->C responses] \\\n", + "e6p7yrp 1.333333 \n", "\n", " mean[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.888889 \n", + "e6p7yrp 0.666667 \n", "\n", - " mean-nonzero[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", + " mean[indegree over C->c responses] \\\n", + "e6p7yrp 0.9 \n", "\n", - " mean-nonzero[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.333333 \n", + " mean[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", "\n", - " prop-nonzero[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.8 \n", + " mean[indegree over c->c responses] \\\n", + "e6p7yrp 0.9 \n", "\n", - " prop-nonzero[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.666667 \n", + " mean[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " prop-multiple[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " mean[outdegree over C->C responses] \\\n", + "e6p7yrp 1.333333 \n", "\n", - " prop-multiple[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.166667 \n", + " mean[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " entropy[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.386294 \n", + " mean[outdegree over C->c responses] \\\n", + "e6p7yrp 1.5 \n", "\n", - " entropy[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.667462 \n", + " norm.2nd-largest[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", "\n", - " 2nd-largest / max[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " norm.2nd-largest[indegree over C->C responses] \\\n", + "e6p7yrp 0.125 \n", "\n", - " 2nd-largest / max[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.333333 \n", + " norm.2nd-largest[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", "\n", - " max[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", + " norm.2nd-largest[indegree over C->c responses] \\\n", + "e6p7yrp 0.111111 \n", "\n", - " max[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 3.0 \n", + " norm.2nd-largest[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", "\n", - " argmax[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " norm.2nd-largest[indegree over c->c responses] \\\n", + "e6p7yrp 0.111111 \n", "\n", - " argmax[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.0 \n", + " norm.2nd-largest[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", "\n", - " norm.max[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.285714 \n", + " norm.2nd-largest[outdegree over C->C responses] \\\n", + "e6p7yrp 0.125 \n", + "\n", + " norm.2nd-largest[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "\n", + " norm.2nd-largest[outdegree over C->c responses] \\\n", + "e6p7yrp 0.222222 \n", "\n", " norm.max[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.428571 \n", + "e6p7yrp 0.5 \n", "\n", - " 2nd-largest[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", + " norm.max[indegree over C->C responses] \\\n", + "e6p7yrp 0.625 \n", "\n", - " 2nd-largest[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " norm.max[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", "\n", - " 2nd-argmax[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", + " norm.max[indegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " 2nd-argmax[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " norm.max[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", "\n", - " norm.2nd-largest[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.285714 \n", + " norm.max[indegree over c->c responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " norm.2nd-largest[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.142857 \n", + " norm.max[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.5 \n", "\n", - " mean[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.4 \n", + " norm.max[outdegree over C->C responses] \\\n", + "e6p7yrp 0.375 \n", "\n", - " mean[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.4 \n", + " norm.max[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.5 \n", "\n", - " mean-nonzero[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.75 \n", + " norm.max[outdegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " mean-nonzero[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.4 \n", + " prop-multiple[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.25 \n", "\n", - " prop-nonzero[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.8 \n", + " prop-multiple[indegree over C->C responses] \\\n", + "e6p7yrp 0.25 \n", "\n", - " prop-nonzero[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " prop-multiple[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " prop-multiple[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.75 \n", + " prop-multiple[indegree over C->c responses] \\\n", + "e6p7yrp 0.142857 \n", "\n", - " prop-multiple[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.2 \n", + " prop-multiple[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", "\n", - " entropy[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.351784 \n", + " prop-multiple[indegree over c->c responses] \\\n", + "e6p7yrp 0.142857 \n", "\n", - " entropy[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.475076 \n", + " prop-multiple[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.25 \n", "\n", - " 2nd-largest / max[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", + " prop-multiple[outdegree over C->C responses] \\\n", + "e6p7yrp 0.166667 \n", "\n", - " 2nd-largest / max[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.333333 \n", + " prop-multiple[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.25 \n", "\n", - " is-present[reciprocity motif over mid-thread] \\\n", - "e5hm9mp 1.0 \n", + " prop-multiple[outdegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", "\n", - " count[reciprocity motif over mid-thread] \\\n", - "e5hm9mp 3.0 \n", + " prop-nonzero[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", "\n", - " is-present[external reciprocity motif over mid-thread] \\\n", - "e5hm9mp 1.0 \n", + " prop-nonzero[indegree over C->C responses] \\\n", + "e6p7yrp 0.666667 \n", "\n", - " count[external reciprocity motif over mid-thread] \\\n", - "e5hm9mp 2.0 \n", + " prop-nonzero[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", "\n", - " is-present[dyadic interaction motif over mid-thread] \\\n", - "e5hm9mp 1.0 \n", + " prop-nonzero[indegree over C->c responses] \\\n", + "e6p7yrp 0.7 \n", "\n", - " count[dyadic interaction motif over mid-thread] \\\n", - "e5hm9mp 2.0 \n", + " prop-nonzero[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", "\n", - " is-present[incoming triads over mid-thread] \\\n", - "e5hm9mp 1.0 \n", + " prop-nonzero[indegree over c->c responses] \\\n", + "e6p7yrp 0.7 \n", "\n", - " count[incoming triads over mid-thread] \\\n", - "e5hm9mp 3.0 \n", + " prop-nonzero[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", "\n", - " is-present[outgoing triads over mid-thread] \\\n", - "e5hm9mp 1.0 \n", + " prop-nonzero[outdegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", "\n", - " count[outgoing triads over mid-thread] \n", - "e5hm9mp 3.0 " + " prop-nonzero[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", + "\n", + " prop-nonzero[outdegree over C->c responses] \n", + "e6p7yrp 1.0 " ] }, - "execution_count": 20, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pd.set_option(\"display.max_columns\", 140)\n", - "convo1.get_vector(\"hyperconvo\", as_dataframe=True)" + "pd.set_option('display.max_columns', 140)\n", + "convo1.get_vector('hyperconvo', as_dataframe=True)" ] }, { @@ -1217,7 +1218,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 27, "metadata": {}, "outputs": [ { @@ -1241,889 +1242,4441 @@ " \n", " \n", " \n", - " max[indegree over c->c responses]\n", - " argmax[indegree over c->c responses]\n", - " norm.max[indegree over c->c responses]\n", - " 2nd-largest[indegree over c->c responses]\n", + " 2nd-argmax[indegree over C->C mid-thread responses]\n", + " 2nd-argmax[indegree over C->C responses]\n", + " 2nd-argmax[indegree over C->c mid-thread responses]\n", + " 2nd-argmax[indegree over C->c responses]\n", + " 2nd-argmax[indegree over c->c mid-thread responses]\n", " 2nd-argmax[indegree over c->c responses]\n", - " norm.2nd-largest[indegree over c->c responses]\n", - " mean[indegree over c->c responses]\n", - " mean-nonzero[indegree over c->c responses]\n", - " prop-nonzero[indegree over c->c responses]\n", - " prop-multiple[indegree over c->c responses]\n", - " entropy[indegree over c->c responses]\n", - " 2nd-largest / max[indegree over c->c responses]\n", - " max[outdegree over C->c responses]\n", - " max[indegree over C->c responses]\n", - " argmax[outdegree over C->c responses]\n", - " argmax[indegree over C->c responses]\n", - " norm.max[outdegree over C->c responses]\n", - " norm.max[indegree over C->c responses]\n", - " 2nd-largest[outdegree over C->c responses]\n", - " 2nd-largest[indegree over C->c responses]\n", + " 2nd-argmax[outdegree over C->C mid-thread responses]\n", + " 2nd-argmax[outdegree over C->C responses]\n", + " 2nd-argmax[outdegree over C->c mid-thread responses]\n", " 2nd-argmax[outdegree over C->c responses]\n", - " 2nd-argmax[indegree over C->c responses]\n", - " norm.2nd-largest[outdegree over C->c responses]\n", - " norm.2nd-largest[indegree over C->c responses]\n", - " mean[outdegree over C->c responses]\n", - " mean[indegree over C->c responses]\n", - " mean-nonzero[outdegree over C->c responses]\n", - " mean-nonzero[indegree over C->c responses]\n", - " prop-nonzero[outdegree over C->c responses]\n", - " prop-nonzero[indegree over C->c responses]\n", - " prop-multiple[outdegree over C->c responses]\n", - " prop-multiple[indegree over C->c responses]\n", - " entropy[outdegree over C->c responses]\n", - " entropy[indegree over C->c responses]\n", - " 2nd-largest / max[outdegree over C->c responses]\n", + " 2nd-largest / max[indegree over C->C mid-thread responses]\n", + " 2nd-largest / max[indegree over C->C responses]\n", + " 2nd-largest / max[indegree over C->c mid-thread responses]\n", " 2nd-largest / max[indegree over C->c responses]\n", - " max[outdegree over C->C responses]\n", - " max[indegree over C->C responses]\n", - " argmax[outdegree over C->C responses]\n", - " argmax[indegree over C->C responses]\n", - " norm.max[outdegree over C->C responses]\n", - " norm.max[indegree over C->C responses]\n", - " 2nd-largest[outdegree over C->C responses]\n", - " 2nd-largest[indegree over C->C responses]\n", - " 2nd-argmax[outdegree over C->C responses]\n", - " 2nd-argmax[indegree over C->C responses]\n", - " norm.2nd-largest[outdegree over C->C responses]\n", - " norm.2nd-largest[indegree over C->C responses]\n", - " mean[outdegree over C->C responses]\n", - " mean[indegree over C->C responses]\n", - " mean-nonzero[outdegree over C->C responses]\n", - " mean-nonzero[indegree over C->C responses]\n", - " prop-nonzero[outdegree over C->C responses]\n", - " prop-nonzero[indegree over C->C responses]\n", - " prop-multiple[outdegree over C->C responses]\n", - " prop-multiple[indegree over C->C responses]\n", - " entropy[outdegree over C->C responses]\n", - " entropy[indegree over C->C responses]\n", + " 2nd-largest / max[indegree over c->c mid-thread responses]\n", + " 2nd-largest / max[indegree over c->c responses]\n", + " 2nd-largest / max[outdegree over C->C mid-thread responses]\n", " 2nd-largest / max[outdegree over C->C responses]\n", - " 2nd-largest / max[indegree over C->C responses]\n", - " is-present[reciprocity motif]\n", - " count[reciprocity motif]\n", - " is-present[external reciprocity motif]\n", + " 2nd-largest / max[outdegree over C->c mid-thread responses]\n", + " 2nd-largest / max[outdegree over C->c responses]\n", + " 2nd-largest[indegree over C->C mid-thread responses]\n", + " 2nd-largest[indegree over C->C responses]\n", + " 2nd-largest[indegree over C->c mid-thread responses]\n", + " 2nd-largest[indegree over C->c responses]\n", + " 2nd-largest[indegree over c->c mid-thread responses]\n", + " 2nd-largest[indegree over c->c responses]\n", + " 2nd-largest[outdegree over C->C mid-thread responses]\n", + " 2nd-largest[outdegree over C->C responses]\n", + " 2nd-largest[outdegree over C->c mid-thread responses]\n", + " 2nd-largest[outdegree over C->c responses]\n", + " argmax[indegree over C->C mid-thread responses]\n", + " argmax[indegree over C->C responses]\n", + " argmax[indegree over C->c mid-thread responses]\n", + " argmax[indegree over C->c responses]\n", + " argmax[indegree over c->c mid-thread responses]\n", + " argmax[indegree over c->c responses]\n", + " argmax[outdegree over C->C mid-thread responses]\n", + " argmax[outdegree over C->C responses]\n", + " argmax[outdegree over C->c mid-thread responses]\n", + " argmax[outdegree over C->c responses]\n", + " count[dyadic interaction motif over mid-thread]\n", + " count[dyadic interaction motif]\n", + " count[external reciprocity motif over mid-thread]\n", " count[external reciprocity motif]\n", + " count[incoming triads over mid-thread]\n", + " count[incoming triads]\n", + " count[outgoing triads over mid-thread]\n", + " count[outgoing triads]\n", + " count[reciprocity motif over mid-thread]\n", + " count[reciprocity motif]\n", + " entropy[indegree over C->C mid-thread responses]\n", + " entropy[indegree over C->C responses]\n", + " entropy[indegree over C->c mid-thread responses]\n", + " entropy[indegree over C->c responses]\n", + " entropy[indegree over c->c mid-thread responses]\n", + " entropy[indegree over c->c responses]\n", + " entropy[outdegree over C->C mid-thread responses]\n", + " entropy[outdegree over C->C responses]\n", + " entropy[outdegree over C->c mid-thread responses]\n", + " entropy[outdegree over C->c responses]\n", + " is-present[dyadic interaction motif over mid-thread]\n", " is-present[dyadic interaction motif]\n", - " count[dyadic interaction motif]\n", + " is-present[external reciprocity motif over mid-thread]\n", + " is-present[external reciprocity motif]\n", + " is-present[incoming triads over mid-thread]\n", " is-present[incoming triads]\n", - " count[incoming triads]\n", + " is-present[outgoing triads over mid-thread]\n", " is-present[outgoing triads]\n", - " count[outgoing triads]\n", + " is-present[reciprocity motif over mid-thread]\n", + " is-present[reciprocity motif]\n", + " max[indegree over C->C mid-thread responses]\n", + " max[indegree over C->C responses]\n", + " max[indegree over C->c mid-thread responses]\n", + " max[indegree over C->c responses]\n", " max[indegree over c->c mid-thread responses]\n", - " argmax[indegree over c->c mid-thread responses]\n", - " norm.max[indegree over c->c mid-thread responses]\n", - " 2nd-largest[indegree over c->c mid-thread responses]\n", - " 2nd-argmax[indegree over c->c mid-thread responses]\n", - " norm.2nd-largest[indegree over c->c mid-thread responses]\n", - " mean[indegree over c->c mid-thread responses]\n", - " mean-nonzero[indegree over c->c mid-thread responses]\n", - " prop-nonzero[indegree over c->c mid-thread responses]\n", - " prop-multiple[indegree over c->c mid-thread responses]\n", - " entropy[indegree over c->c mid-thread responses]\n", - " 2nd-largest / max[indegree over c->c mid-thread responses]\n", + " max[indegree over c->c responses]\n", + " max[outdegree over C->C mid-thread responses]\n", + " max[outdegree over C->C responses]\n", " max[outdegree over C->c mid-thread responses]\n", - " max[indegree over C->c mid-thread responses]\n", - " argmax[outdegree over C->c mid-thread responses]\n", - " argmax[indegree over C->c mid-thread responses]\n", - " norm.max[outdegree over C->c mid-thread responses]\n", - " norm.max[indegree over C->c mid-thread responses]\n", - " 2nd-largest[outdegree over C->c mid-thread responses]\n", - " 2nd-largest[indegree over C->c mid-thread responses]\n", - " 2nd-argmax[outdegree over C->c mid-thread responses]\n", - " 2nd-argmax[indegree over C->c mid-thread responses]\n", - " norm.2nd-largest[outdegree over C->c mid-thread responses]\n", - " norm.2nd-largest[indegree over C->c mid-thread responses]\n", - " mean[outdegree over C->c mid-thread responses]\n", - " mean[indegree over C->c mid-thread responses]\n", - " mean-nonzero[outdegree over C->c mid-thread responses]\n", + " max[outdegree over C->c responses]\n", + " mean-nonzero[indegree over C->C mid-thread responses]\n", + " mean-nonzero[indegree over C->C responses]\n", " mean-nonzero[indegree over C->c mid-thread responses]\n", - " prop-nonzero[outdegree over C->c mid-thread responses]\n", - " prop-nonzero[indegree over C->c mid-thread responses]\n", - " prop-multiple[outdegree over C->c mid-thread responses]\n", - " prop-multiple[indegree over C->c mid-thread responses]\n", - " entropy[outdegree over C->c mid-thread responses]\n", - " entropy[indegree over C->c mid-thread responses]\n", - " 2nd-largest / max[outdegree over C->c mid-thread responses]\n", - " 2nd-largest / max[indegree over C->c mid-thread responses]\n", - " max[outdegree over C->C mid-thread responses]\n", - " max[indegree over C->C mid-thread responses]\n", - " argmax[outdegree over C->C mid-thread responses]\n", - " argmax[indegree over C->C mid-thread responses]\n", - " norm.max[outdegree over C->C mid-thread responses]\n", - " norm.max[indegree over C->C mid-thread responses]\n", - " 2nd-largest[outdegree over C->C mid-thread responses]\n", - " 2nd-largest[indegree over C->C mid-thread responses]\n", - " 2nd-argmax[outdegree over C->C mid-thread responses]\n", - " 2nd-argmax[indegree over C->C mid-thread responses]\n", - " norm.2nd-largest[outdegree over C->C mid-thread responses]\n", - " norm.2nd-largest[indegree over C->C mid-thread responses]\n", - " mean[outdegree over C->C mid-thread responses]\n", - " mean[indegree over C->C mid-thread responses]\n", + " mean-nonzero[indegree over C->c responses]\n", + " mean-nonzero[indegree over c->c mid-thread responses]\n", + " mean-nonzero[indegree over c->c responses]\n", " mean-nonzero[outdegree over C->C mid-thread responses]\n", - " mean-nonzero[indegree over C->C mid-thread responses]\n", - " prop-nonzero[outdegree over C->C mid-thread responses]\n", - " prop-nonzero[indegree over C->C mid-thread responses]\n", - " prop-multiple[outdegree over C->C mid-thread responses]\n", + " mean-nonzero[outdegree over C->C responses]\n", + " mean-nonzero[outdegree over C->c mid-thread responses]\n", + " mean-nonzero[outdegree over C->c responses]\n", + " mean[indegree over C->C mid-thread responses]\n", + " mean[indegree over C->C responses]\n", + " mean[indegree over C->c mid-thread responses]\n", + " mean[indegree over C->c responses]\n", + " mean[indegree over c->c mid-thread responses]\n", + " mean[indegree over c->c responses]\n", + " mean[outdegree over C->C mid-thread responses]\n", + " mean[outdegree over C->C responses]\n", + " mean[outdegree over C->c mid-thread responses]\n", + " mean[outdegree over C->c responses]\n", + " norm.2nd-largest[indegree over C->C mid-thread responses]\n", + " norm.2nd-largest[indegree over C->C responses]\n", + " norm.2nd-largest[indegree over C->c mid-thread responses]\n", + " norm.2nd-largest[indegree over C->c responses]\n", + " norm.2nd-largest[indegree over c->c mid-thread responses]\n", + " norm.2nd-largest[indegree over c->c responses]\n", + " norm.2nd-largest[outdegree over C->C mid-thread responses]\n", + " norm.2nd-largest[outdegree over C->C responses]\n", + " norm.2nd-largest[outdegree over C->c mid-thread responses]\n", + " norm.2nd-largest[outdegree over C->c responses]\n", + " norm.max[indegree over C->C mid-thread responses]\n", + " norm.max[indegree over C->C responses]\n", + " norm.max[indegree over C->c mid-thread responses]\n", + " norm.max[indegree over C->c responses]\n", + " norm.max[indegree over c->c mid-thread responses]\n", + " norm.max[indegree over c->c responses]\n", + " norm.max[outdegree over C->C mid-thread responses]\n", + " norm.max[outdegree over C->C responses]\n", + " norm.max[outdegree over C->c mid-thread responses]\n", + " norm.max[outdegree over C->c responses]\n", " prop-multiple[indegree over C->C mid-thread responses]\n", - " entropy[outdegree over C->C mid-thread responses]\n", - " entropy[indegree over C->C mid-thread responses]\n", - " 2nd-largest / max[outdegree over C->C mid-thread responses]\n", - " 2nd-largest / max[indegree over C->C mid-thread responses]\n", - " is-present[reciprocity motif over mid-thread]\n", - " count[reciprocity motif over mid-thread]\n", - " is-present[external reciprocity motif over mid-thread]\n", - " count[external reciprocity motif over mid-thread]\n", - " is-present[dyadic interaction motif over mid-thread]\n", - " count[dyadic interaction motif over mid-thread]\n", - " is-present[incoming triads over mid-thread]\n", - " count[incoming triads over mid-thread]\n", - " is-present[outgoing triads over mid-thread]\n", - " count[outgoing triads over mid-thread]\n", + " prop-multiple[indegree over C->C responses]\n", + " prop-multiple[indegree over C->c mid-thread responses]\n", + " prop-multiple[indegree over C->c responses]\n", + " prop-multiple[indegree over c->c mid-thread responses]\n", + " prop-multiple[indegree over c->c responses]\n", + " prop-multiple[outdegree over C->C mid-thread responses]\n", + " prop-multiple[outdegree over C->C responses]\n", + " prop-multiple[outdegree over C->c mid-thread responses]\n", + " prop-multiple[outdegree over C->c responses]\n", + " prop-nonzero[indegree over C->C mid-thread responses]\n", + " prop-nonzero[indegree over C->C responses]\n", + " prop-nonzero[indegree over C->c mid-thread responses]\n", + " prop-nonzero[indegree over C->c responses]\n", + " prop-nonzero[indegree over c->c mid-thread responses]\n", + " prop-nonzero[indegree over c->c responses]\n", + " prop-nonzero[outdegree over C->C mid-thread responses]\n", + " prop-nonzero[outdegree over C->C responses]\n", + " prop-nonzero[outdegree over C->c mid-thread responses]\n", + " prop-nonzero[outdegree over C->c responses]\n", " \n", " \n", " \n", " \n", - " e5hm9mp\n", - " 3.0\n", + " e6p7yrp\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", " 1.0\n", - " 0.333333\n", " 1.0\n", - " 0.0\n", - " 0.111111\n", - " 0.9\n", - " 1.285714\n", - " 0.7\n", - " 0.142857\n", - " 1.831020\n", - " 0.333333\n", - " 2.0\n", - " 3.0\n", - " 0.0\n", " 1.0\n", - " 0.222222\n", - " 0.333333\n", " 2.0\n", " 1.0\n", " 2.0\n", - " 0.0\n", - " 0.222222\n", - " 0.111111\n", - " 1.800000\n", - " 0.9\n", - " 1.800000\n", - " 1.285714\n", - " 1.000000\n", - " 0.7\n", - " 0.800000\n", - " 0.142857\n", - " 1.581094\n", - " 1.831020\n", - " 1.000000\n", - " 0.333333\n", - " 2.0\n", " 3.0\n", - " 0.0\n", - " 1.0\n", - " 0.250000\n", - " 0.375000\n", - " 2.0\n", - " 2.0\n", - " 2.0\n", - " 0.0\n", - " 0.250000\n", - " 0.250000\n", - " 1.600000\n", - " 1.600000\n", - " 1.600000\n", - " 1.600000\n", - " 1.000000\n", + " 0.333333\n", + " 0.200000\n", " 1.000000\n", - " 0.600000\n", - " 0.400000\n", - " 1.559581\n", - " 1.494175\n", + " 0.333333\n", " 1.000000\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", " 0.666667\n", " 1.0\n", - " 4.0\n", " 1.0\n", - " 4.0\n", " 1.0\n", - " 3.0\n", " 1.0\n", - " 4.0\n", " 1.0\n", - " 3.0\n", - " 3.0\n", - " 0.0\n", - " 0.375000\n", " 1.0\n", " 1.0\n", - " 0.125000\n", - " 0.888889\n", - " 1.333333\n", - " 0.666667\n", - " 0.166667\n", - " 1.667462\n", - " 0.333333\n", - " 2.0\n", - " 3.0\n", " 1.0\n", - " 0.0\n", - " 0.250000\n", - " 0.375000\n", - " 2.0\n", " 1.0\n", " 2.0\n", " 1.0\n", - " 0.250000\n", - " 0.125000\n", - " 1.600000\n", - " 0.888889\n", - " 2.000000\n", - " 1.333333\n", - " 0.800000\n", - " 0.666667\n", - " 1.000000\n", - " 0.166667\n", - " 1.386294\n", - " 1.667462\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " 1.0\n", - " 0.333333\n", - " 2.0\n", - " 3.0\n", + " 0.0\n", " 1.0\n", " 0.0\n", - " 0.285714\n", - " 0.428571\n", " 2.0\n", - " 1.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 10.0\n", + " 3.0\n", + " 3.0\n", " 2.0\n", + " 4.0\n", + " 1.242453\n", + " 1.073543\n", + " 1.791759\n", + " 1.831020\n", + " 1.791759\n", + " 1.831020\n", + " 1.242453\n", + " 1.667462\n", + " 1.242453\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 1.0\n", - " 0.285714\n", - " 0.142857\n", - " 1.400000\n", - " 1.400000\n", - " 1.750000\n", - " 1.400000\n", - " 0.800000\n", - " 1.000000\n", - " 0.750000\n", - " 0.200000\n", - " 1.351784\n", - " 1.475076\n", " 1.0\n", - " 0.333333\n", " 1.0\n", - " 3.0\n", " 1.0\n", - " 2.0\n", " 1.0\n", - " 2.0\n", " 1.0\n", " 3.0\n", + " 5.0\n", " 1.0\n", " 3.0\n", - " \n", - " \n", - " e5ytz1d\n", + " 1.0\n", " 3.0\n", - " 0.0\n", - " 0.333333\n", " 3.0\n", - " 2.0\n", - " 0.333333\n", - " 0.9\n", - " 2.250000\n", - " 0.4\n", - " 0.750000\n", - " 1.310784\n", - " 1.000000\n", - " 1.0\n", " 3.0\n", - " 0.0\n", - " 0.0\n", - " 0.111111\n", - " 0.333333\n", - " 1.0\n", " 3.0\n", - " 1.0\n", - " 2.0\n", - " 0.111111\n", - " 0.333333\n", + " 3.0\n", + " 1.500000\n", + " 2.000000\n", + " 1.000000\n", + " 1.285714\n", " 1.000000\n", + " 1.285714\n", + " 1.500000\n", + " 1.333333\n", + " 1.500000\n", + " 1.500000\n", + " 1.000000\n", + " 1.333333\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", " 0.9\n", " 1.000000\n", - " 2.250000\n", + " 1.333333\n", " 1.000000\n", - " 0.4\n", + " 1.500000\n", + " 0.166667\n", + " 0.125000\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.125000\n", + " 0.166667\n", + " 0.222222\n", + " 0.500000\n", + " 0.625000\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.500000\n", + " 0.375000\n", + " 0.500000\n", + " 0.333333\n", + " 0.250000\n", + " 0.250000\n", " 0.000000\n", - " 0.750000\n", - " 2.197225\n", - " 1.310784\n", + " 0.142857\n", + " 0.000000\n", + " 0.142857\n", + " 0.250000\n", + " 0.166667\n", + " 0.250000\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", " 1.000000\n", + " 0.666667\n", " 1.000000\n", + " \n", + " \n", + " e5ywqyk\n", + " 1.0\n", + " 1.0\n", " 1.0\n", - " 3.0\n", " 0.0\n", + " 1.0\n", " 0.0\n", - " 0.111111\n", - " 0.333333\n", + " 2.0\n", " 1.0\n", " 3.0\n", - " 1.0\n", - " 2.0\n", - " 0.111111\n", - " 0.333333\n", - " 1.000000\n", - " 1.000000\n", - " 1.000000\n", - " 2.250000\n", + " 3.0\n", + " 0.500000\n", " 1.000000\n", - " 0.444444\n", - " 0.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.750000\n", " 0.750000\n", - " 2.197225\n", - " 1.310784\n", - " 1.000000\n", - " 1.000000\n", " 1.0\n", + " 2.0\n", " 1.0\n", " 1.0\n", - " 5.0\n", " 1.0\n", " 1.0\n", " 1.0\n", - " 7.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 0.0\n", " 0.0\n", " 0.0\n", - " 3.0\n", " 1.0\n", - " 0.500000\n", - " 2.0\n", " 0.0\n", - " 0.333333\n", - " 0.666667\n", - " 2.000000\n", - " 0.333333\n", - " 0.666667\n", - " 1.011404\n", - " 0.666667\n", " 1.0\n", - " 3.0\n", - " 3.0\n", " 1.0\n", - " 0.166667\n", - " 0.500000\n", + " 0.0\n", " 1.0\n", - " 2.0\n", - " 4.0\n", " 0.0\n", - " 0.166667\n", - " 0.333333\n", - " 0.666667\n", - " 0.666667\n", - " 1.000000\n", - " 2.000000\n", - " 0.666667\n", - " 0.333333\n", - " 0.000000\n", - " 0.666667\n", - " 1.791759\n", - " 1.011404\n", - " 1.0\n", - " 0.666667\n", " 1.0\n", - " 3.0\n", - " 3.0\n", + " 2.0\n", " 1.0\n", - " 0.166667\n", - " 0.500000\n", + " 2.0\n", " 1.0\n", " 2.0\n", - " 4.0\n", - " 0.0\n", - " 0.166667\n", - " 0.333333\n", - " 0.666667\n", - " 0.666667\n", - " 1.000000\n", - " 2.000000\n", - " 0.666667\n", - " 0.333333\n", - " 0.000000\n", - " 0.666667\n", - " 1.791759\n", - " 1.011404\n", " 1.0\n", - " 0.666667\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", " 1.0\n", - " 4.0\n", - " 0.0\n", - " 0.0\n", - " \n", - " \n", - " e6ls80j\n", - " 4.0\n", - " 0.0\n", - " 0.444444\n", - " 3.0\n", - " 2.0\n", - " 0.333333\n", - " 0.9\n", - " 2.250000\n", - " 0.4\n", - " 0.500000\n", + " 5.0\n", + " 6.0\n", + " 1.039721\n", + " 1.054920\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 1.039721\n", + " 1.332179\n", + " 0.974315\n", " 1.214890\n", - " 0.750000\n", - " 3.0\n", - " 4.0\n", - " 3.0\n", - " 0.0\n", - " 0.333333\n", - " 0.444444\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 2.0\n", - " 3.0\n", " 2.0\n", " 2.0\n", - " 0.222222\n", - " 0.333333\n", - " 1.285714\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 4.0\n", + " 1.333333\n", + " 1.666667\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.333333\n", + " 1.250000\n", + " 2.666667\n", + " 2.250000\n", + " 1.000000\n", + " 1.250000\n", + " 0.888889\n", " 0.9\n", - " 1.500000\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.250000\n", + " 2.000000\n", " 2.250000\n", - " 0.857143\n", - " 0.4\n", + " 0.250000\n", + " 0.400000\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.250000\n", + " 0.200000\n", + " 0.375000\n", " 0.333333\n", " 0.500000\n", - " 1.676988\n", - " 1.214890\n", + " 0.400000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.444444\n", + " 0.333333\n", + " 0.666667\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.333333\n", + " 0.250000\n", " 0.666667\n", + " 0.500000\n", + " 0.750000\n", + " 0.750000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", " 0.750000\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", + " \n", + " \n", + " e5qv9rj\n", + " 0.0\n", " 3.0\n", - " 4.0\n", + " 0.0\n", " 3.0\n", " 0.0\n", - " 0.333333\n", - " 0.444444\n", - " 2.0\n", + " 3.0\n", " 3.0\n", " 2.0\n", + " 3.0\n", " 2.0\n", - " 0.222222\n", " 0.333333\n", - " 1.285714\n", - " 1.285714\n", - " 1.500000\n", - " 2.250000\n", - " 0.857143\n", - " 0.571429\n", + " 1.000000\n", " 0.333333\n", - " 0.500000\n", - " 1.676988\n", - " 1.214890\n", - " 0.666667\n", - " 0.750000\n", - " 1.0\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", " 1.0\n", + " 3.0\n", " 1.0\n", - " 4.0\n", + " 3.0\n", " 1.0\n", + " 3.0\n", " 1.0\n", " 1.0\n", - " 9.0\n", " 1.0\n", - " 4.0\n", - " 3.0\n", " 1.0\n", - " 0.600000\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", " 1.0\n", - " 4.0\n", - " 0.200000\n", - " 0.555556\n", - " 1.666667\n", - " 0.333333\n", - " 0.333333\n", - " 0.950271\n", - " 0.333333\n", " 2.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 5.0\n", + " 6.0\n", " 3.0\n", - " 2.0\n", + " 6.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.242453\n", + " 1.464816\n", + " 1.242453\n", + " 1.464816\n", + " 1.242453\n", + " 1.464816\n", + " 1.791759\n", + " 2.197225\n", + " 1.791759\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", " 1.0\n", - " 0.400000\n", - " 0.600000\n", " 1.0\n", " 1.0\n", " 1.0\n", - " 4.0\n", - " 0.200000\n", - " 0.200000\n", - " 0.833333\n", - " 0.555556\n", - " 1.250000\n", - " 1.666667\n", - " 0.666667\n", - " 0.333333\n", - " 0.250000\n", - " 0.333333\n", - " 1.332179\n", - " 0.950271\n", - " 0.5\n", - " 0.333333\n", - " 2.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", " 3.0\n", - " 2.0\n", " 1.0\n", - " 0.400000\n", - " 0.600000\n", " 1.0\n", " 1.0\n", " 1.0\n", - " 4.0\n", - " 0.200000\n", - " 0.200000\n", - " 0.833333\n", - " 0.833333\n", - " 1.250000\n", - " 1.666667\n", - " 0.666667\n", + " 1.500000\n", + " 1.800000\n", + " 1.500000\n", + " 1.800000\n", + " 1.500000\n", + " 1.800000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.666667\n", + " 0.900000\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.900000\n", + " 0.666667\n", + " 0.900000\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", " 0.500000\n", - " 0.250000\n", " 0.333333\n", - " 1.332179\n", - " 0.950271\n", - " 0.5\n", + " 0.500000\n", " 0.333333\n", - " 1.0\n", - " 1.0\n", - " 1.0\n", - " 1.0\n", - " 1.0\n", - " 1.0\n", - " 1.0\n", - " 3.0\n", - " 1.0\n", - " 1.0\n", + " 0.500000\n", + " 0.333333\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.444444\n", + " 0.500000\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.666667\n", + " 0.900000\n", + " 0.666667\n", + " 0.900000\n", " \n", " \n", - " e5mhgl5\n", - " 2.0\n", + " e6jhojf\n", + " 0.0\n", " 1.0\n", - " 0.222222\n", - " 2.0\n", - " 4.0\n", - " 0.222222\n", - " 0.9\n", - " 1.285714\n", - " 0.7\n", - " 0.285714\n", - " 1.889159\n", - " 1.000000\n", - " 2.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " 2.0\n", - " 4.0\n", " 1.0\n", - " 0.222222\n", - " 0.222222\n", - " 2.0\n", - " 2.0\n", - " 6.0\n", - " 4.0\n", - " 0.222222\n", - " 0.222222\n", - " 1.125000\n", - " 0.9\n", - " 1.285714\n", - " 1.285714\n", - " 0.875000\n", - " 0.7\n", - " 0.285714\n", - " 0.285714\n", - " 1.889159\n", - " 1.889159\n", - " 1.000000\n", - " 1.000000\n", " 2.0\n", " 2.0\n", - " 4.0\n", - " 1.0\n", + " 0.333333\n", " 0.250000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", " 0.250000\n", - " 1.0\n", - " 2.0\n", - " 1.0\n", - " 4.0\n", - " 0.125000\n", " 0.250000\n", - " 1.000000\n", - " 1.000000\n", - " 1.142857\n", - " 1.333333\n", - " 0.875000\n", - " 0.750000\n", - " 0.142857\n", - " 0.333333\n", - " 1.906155\n", - " 1.732868\n", " 0.500000\n", - " 1.000000\n", + " 0.500000\n", + " 1.0\n", " 1.0\n", - " 2.0\n", " 1.0\n", - " 6.0\n", " 1.0\n", " 1.0\n", " 1.0\n", - " 2.0\n", " 1.0\n", " 1.0\n", - " 2.0\n", - " 0.0\n", - " 0.250000\n", - " 2.0\n", - " 3.0\n", - " 0.250000\n", - " 0.888889\n", - " 1.333333\n", - " 0.666667\n", - " 0.333333\n", - " 1.732868\n", - " 1.000000\n", " 2.0\n", " 2.0\n", - " 3.0\n", + " 1.0\n", " 0.0\n", - " 0.250000\n", - " 0.250000\n", - " 2.0\n", - " 2.0\n", - " 5.0\n", " 3.0\n", - " 0.250000\n", - " 0.250000\n", - " 1.142857\n", - " 0.888889\n", - " 1.333333\n", - " 1.333333\n", - " 0.857143\n", - " 0.666667\n", - " 0.333333\n", - " 0.333333\n", - " 1.732868\n", - " 1.732868\n", - " 1.0\n", - " 1.000000\n", - " 2.0\n", - " 2.0\n", + " 4.0\n", " 3.0\n", - " 0.0\n", - " 0.285714\n", - " 0.285714\n", + " 4.0\n", " 1.0\n", - " 2.0\n", + " 0.0\n", " 1.0\n", + " 0.0\n", " 3.0\n", - " 0.142857\n", - " 0.285714\n", - " 1.000000\n", - " 1.000000\n", - " 1.166667\n", - " 1.400000\n", - " 0.857143\n", - " 0.714286\n", - " 0.166667\n", - " 0.400000\n", - " 1.747868\n", - " 1.549826\n", - " 0.5\n", - " 1.000000\n", - " 1.0\n", - " 2.0\n", - " 1.0\n", " 4.0\n", + " 4.0\n", + " 4.0\n", + " 3.0\n", + " 6.0\n", + " 6.0\n", + " 6.0\n", + " 3.0\n", + " 4.0\n", + " 1.475076\n", + " 1.386294\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 1.153742\n", + " 1.386294\n", + " 1.213008\n", + " 1.427061\n", " 1.0\n", " 1.0\n", " 1.0\n", - " 2.0\n", - " 1.0\n", - " 1.0\n", - " \n", - " \n", - " e6w6fah\n", - " 9.0\n", - " 0.0\n", - " 1.000000\n", - " 0.0\n", - " 1.0\n", - " 0.000000\n", - " 0.9\n", - " 9.000000\n", - " 0.1\n", - " 1.000000\n", - " 0.000000\n", - " 0.000000\n", " 1.0\n", - " 8.0\n", " 1.0\n", - " 0.0\n", - " 0.125000\n", - " 1.000000\n", " 1.0\n", - " 0.0\n", - " 2.0\n", " 1.0\n", - " 0.125000\n", - " 0.000000\n", - " 0.888889\n", - " 0.8\n", - " 1.000000\n", - " 8.000000\n", - " 0.888889\n", - " 0.1\n", - " 0.000000\n", - " 1.000000\n", - " 2.079442\n", - " 0.000000\n", - " 1.000000\n", - " 0.000000\n", " 1.0\n", - " 8.0\n", " 1.0\n", - " 0.0\n", - " 0.125000\n", - " 1.000000\n", " 1.0\n", - " 0.0\n", + " 3.0\n", + " 4.0\n", " 2.0\n", - " 1.0\n", - " 0.125000\n", - " 0.000000\n", - " 0.888889\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.400000\n", + " 1.600000\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.750000\n", + " 1.600000\n", + " 2.000000\n", + " 1.800000\n", + " 1.400000\n", + " 1.600000\n", " 0.888889\n", - " 1.000000\n", - " 8.000000\n", + " 0.9\n", " 0.888889\n", + " 0.9\n", + " 1.400000\n", + " 1.600000\n", + " 1.600000\n", + " 1.800000\n", + " 0.142857\n", + " 0.125000\n", + " 0.125000\n", " 0.111111\n", - " 0.000000\n", + " 0.125000\n", + " 0.111111\n", + " 0.142857\n", + " 0.125000\n", + " 0.250000\n", + " 0.222222\n", + " 0.428571\n", + " 0.500000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.571429\n", + " 0.500000\n", + " 0.500000\n", + " 0.444444\n", + " 0.200000\n", + " 0.200000\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.250000\n", + " 0.200000\n", + " 0.500000\n", + " 0.400000\n", " 1.000000\n", - " 2.079442\n", - " 0.000000\n", " 1.000000\n", - " 0.000000\n", - " 0.0\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 0.800000\n", + " 1.000000\n", + " 0.800000\n", + " 1.000000\n", + " \n", + " \n", + " e6989ii\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 0.0\n", " 0.0\n", + " 1.0\n", + " 1.0\n", " 0.0\n", + " 1.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", " 0.0\n", " 0.0\n", - " 1.0\n", - " 28.0\n", " 0.0\n", " 0.0\n", + " 5.0\n", + " 6.0\n", " 0.0\n", " 0.0\n", - " 0.000000\n", + " 1.0\n", " 0.0\n", " 1.0\n", - " NaN\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " NaN\n", - " NaN\n", + " 1.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", - " 0.000000\n", - " 0.000000\n", " 0.0\n", " 0.0\n", + " 7.0\n", + " 8.0\n", + " 0.693147\n", + " 0.693147\n", + " 1.945910\n", + " 2.079442\n", + " 1.906155\n", + " 2.043192\n", + " 0.693147\n", + " 0.693147\n", + " 0.682908\n", + " 0.693147\n", " 1.0\n", " 1.0\n", - " NaN\n", - " NaN\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " 0.000000\n", - " NaN\n", - " NaN\n", - " NaN\n", - " NaN\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", - " 0.000000\n", - " 0.000000\n", " 0.0\n", " 0.0\n", " 1.0\n", " 1.0\n", - " NaN\n", - " NaN\n", - " 0.000000\n", - " 0.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.142857\n", + " 1.125000\n", + " 1.000000\n", + " 1.000000\n", + " 3.500000\n", + " 4.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 3.500000\n", + " 4.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.142857\n", + " 0.125000\n", + " 0.125000\n", + " 0.111111\n", + " 0.500000\n", + " 0.500000\n", + " 0.428571\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.142857\n", + " 0.125000\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.500000\n", + " 0.571429\n", + " 0.500000\n", " 0.000000\n", " 0.000000\n", " 0.000000\n", " 0.000000\n", + " 0.142857\n", + " 0.125000\n", " 0.000000\n", " 0.000000\n", - " NaN\n", - " NaN\n", - " NaN\n", - " NaN\n", - " 0.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e69lgse\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", " 0.0\n", + " 1.0\n", " 0.0\n", + " 1.0\n", " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 3.0\n", + " 6.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.242453\n", + " 1.255482\n", + " 1.242453\n", + " 1.464816\n", + " 1.242453\n", + " 1.464816\n", + " 1.560710\n", + " 1.906155\n", + " 1.560710\n", + " 1.889159\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.500000\n", + " 2.000000\n", + " 1.500000\n", + " 1.800000\n", + " 1.500000\n", + " 1.800000\n", + " 1.200000\n", + " 1.142857\n", + " 1.200000\n", + " 1.285714\n", + " 0.857143\n", + " 1.142857\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.857143\n", + " 1.142857\n", + " 0.857143\n", + " 1.285714\n", + " 0.166667\n", + " 0.375000\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.125000\n", + " 0.166667\n", + " 0.222222\n", + " 0.500000\n", + " 0.375000\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.333333\n", + " 0.250000\n", + " 0.333333\n", + " 0.222222\n", + " 0.250000\n", + " 0.500000\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.200000\n", + " 0.142857\n", + " 0.200000\n", + " 0.285714\n", + " 0.571429\n", + " 0.571429\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.714286\n", + " 1.000000\n", + " 0.714286\n", + " 1.000000\n", + " \n", + " \n", + " e5kwkg2\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 3.0\n", + " 6.0\n", + " 1.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.560710\n", + " 1.676988\n", + " 1.560710\n", + " 1.676988\n", + " 1.560710\n", + " 1.676988\n", + " 1.791759\n", + " 2.197225\n", + " 1.791759\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.200000\n", + " 1.500000\n", + " 1.200000\n", + " 1.500000\n", + " 1.200000\n", + " 1.500000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.666667\n", + " 0.900000\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.900000\n", + " 0.666667\n", + " 0.900000\n", + " 0.166667\n", + " 0.222222\n", + " 0.166667\n", + " 0.222222\n", + " 0.166667\n", + " 0.222222\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.555556\n", + " 0.600000\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.666667\n", + " 0.900000\n", + " 0.666667\n", + " 0.900000\n", " \n", " \n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", - " ...\n", + " e6mehe7\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.333333\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 7.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 4.0\n", + " 1.332179\n", + " 1.213008\n", + " 1.560710\n", + " 1.676988\n", + " 1.560710\n", + " 1.676988\n", + " 0.950271\n", + " 1.667462\n", + " 1.011404\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 1.250000\n", + " 2.000000\n", + " 1.200000\n", + " 1.500000\n", + " 1.200000\n", + " 1.500000\n", + " 1.666667\n", + " 1.333333\n", + " 2.000000\n", + " 1.500000\n", + " 0.833333\n", + " 1.333333\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.833333\n", + " 1.333333\n", + " 1.000000\n", + " 1.500000\n", + " 0.200000\n", + " 0.250000\n", + " 0.166667\n", + " 0.222222\n", + " 0.166667\n", + " 0.222222\n", + " 0.200000\n", + " 0.125000\n", + " 0.333333\n", + " 0.222222\n", + " 0.400000\n", + " 0.500000\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.600000\n", + " 0.375000\n", + " 0.500000\n", + " 0.333333\n", + " 0.250000\n", + " 0.500000\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.333333\n", + " 0.166667\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " \n", + " \n", + " e6m0hsd\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.750000\n", + " 0.600000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 6.0\n", + " 7.0\n", + " 1.039721\n", + " 1.039721\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 1.039721\n", + " 1.039721\n", + " 0.974315\n", + " 0.936888\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 5.0\n", + " 1.333333\n", + " 1.333333\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.333333\n", + " 1.333333\n", + " 2.666667\n", + " 3.000000\n", + " 1.333333\n", + " 1.333333\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.333333\n", + " 1.333333\n", + " 2.666667\n", + " 3.000000\n", + " 0.250000\n", + " 0.250000\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.250000\n", + " 0.250000\n", + " 0.375000\n", + " 0.333333\n", + " 0.500000\n", + " 0.500000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.555556\n", + " 0.333333\n", + " 0.333333\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.333333\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e64r385\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 5.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.747868\n", + " 1.889159\n", + " 1.747868\n", + " 1.889159\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.166667\n", + " 1.285714\n", + " 1.166667\n", + " 1.285714\n", + " 1.000000\n", + " 1.125000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.166667\n", + " 0.285714\n", + " 0.166667\n", + " 0.285714\n", + " 0.714286\n", + " 0.750000\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.857143\n", + " 0.875000\n", + " 0.857143\n", + " 0.875000\n", + " \n", + " \n", + " e5surbt\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 1.000000\n", + " 0.200000\n", + " 1.000000\n", + " 0.200000\n", + " 1.000000\n", + " 0.200000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 3.0\n", + " 4.0\n", + " 0.0\n", + " 10.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.386294\n", + " 1.303092\n", + " 1.386294\n", + " 1.303092\n", + " 1.386294\n", + " 1.303092\n", + " 1.386294\n", + " 2.197225\n", + " 1.386294\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.000000\n", + " 1.800000\n", + " 1.000000\n", + " 1.800000\n", + " 1.000000\n", + " 1.800000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.444444\n", + " 0.900000\n", + " 0.444444\n", + " 0.9\n", + " 0.444444\n", + " 0.9\n", + " 0.444444\n", + " 0.900000\n", + " 0.444444\n", + " 0.900000\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.555556\n", + " 0.250000\n", + " 0.555556\n", + " 0.250000\n", + " 0.555556\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.000000\n", + " 0.200000\n", + " 0.000000\n", + " 0.200000\n", + " 0.000000\n", + " 0.200000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.444444\n", + " 0.500000\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.900000\n", + " 0.444444\n", + " 0.900000\n", + " \n", + " \n", + " e58gxii\n", + " 4.0\n", + " 4.0\n", + " 6.0\n", + " 2.0\n", + " 6.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.666667\n", + " 0.500000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.666667\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 4.0\n", + " 7.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 4.0\n", + " 1.277034\n", + " 1.213008\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.475076\n", + " 1.667462\n", + " 1.475076\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 1.750000\n", + " 2.000000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.333333\n", + " 1.400000\n", + " 1.500000\n", + " 1.166667\n", + " 1.333333\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 1.166667\n", + " 1.333333\n", + " 1.166667\n", + " 1.500000\n", + " 0.285714\n", + " 0.250000\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.222222\n", + " 0.428571\n", + " 0.500000\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.428571\n", + " 0.375000\n", + " 0.428571\n", + " 0.333333\n", + " 0.500000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.200000\n", + " 0.166667\n", + " 0.200000\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.833333\n", + " 1.000000\n", + " 0.833333\n", + " 1.000000\n", + " \n", + " \n", + " e64vc8y\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 6.0\n", + " 2.0\n", + " 6.0\n", + " 2.0\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 3.0\n", + " 0.0\n", + " 15.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 2.197225\n", + " 1.098612\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.333333\n", + " 0.400000\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.900000\n", + " \n", + " \n", + " e57504g\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 6.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 5.0\n", + " 5.0\n", + " 5.0\n", + " 5.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 6.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.747868\n", + " 1.889159\n", + " 1.747868\n", + " 1.889159\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.166667\n", + " 1.285714\n", + " 1.166667\n", + " 1.285714\n", + " 1.000000\n", + " 1.125000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.166667\n", + " 0.285714\n", + " 0.166667\n", + " 0.285714\n", + " 0.714286\n", + " 0.750000\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.857143\n", + " 0.875000\n", + " 0.857143\n", + " 0.875000\n", + " \n", + " \n", + " e5borjq\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 3.0\n", + " 4.0\n", + " 3.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 5.0\n", + " 4.0\n", + " 5.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.277034\n", + " 1.522955\n", + " 1.277034\n", + " 1.522955\n", + " 1.277034\n", + " 1.522955\n", + " 1.945910\n", + " 2.043192\n", + " 1.945910\n", + " 2.043192\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.750000\n", + " 1.800000\n", + " 1.750000\n", + " 1.800000\n", + " 1.750000\n", + " 1.800000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.875000\n", + " 1.125000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 0.875000\n", + " 1.125000\n", + " 0.875000\n", + " 1.125000\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.142857\n", + " 0.111111\n", + " 0.142857\n", + " 0.111111\n", + " 0.428571\n", + " 0.333333\n", + " 0.428571\n", + " 0.333333\n", + " 0.428571\n", + " 0.333333\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.500000\n", + " 0.600000\n", + " 0.500000\n", + " 0.600000\n", + " 0.500000\n", + " 0.600000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.500000\n", + " 0.625000\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.875000\n", + " 1.000000\n", + " 0.875000\n", + " 1.000000\n", + " \n", + " \n", + " e64n9zv\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 2.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 0.750000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 6.0\n", + " 7.0\n", + " 1.039721\n", + " 1.039721\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 1.039721\n", + " 1.039721\n", + " 1.082196\n", + " 1.060857\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 4.0\n", + " 1.333333\n", + " 1.333333\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.333333\n", + " 1.333333\n", + " 2.666667\n", + " 3.000000\n", + " 1.333333\n", + " 1.333333\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.333333\n", + " 1.333333\n", + " 2.666667\n", + " 3.000000\n", + " 0.250000\n", + " 0.250000\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.250000\n", + " 0.250000\n", + " 0.375000\n", + " 0.333333\n", + " 0.500000\n", + " 0.500000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.500000\n", + " 0.375000\n", + " 0.444444\n", + " 0.333333\n", + " 0.333333\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.333333\n", + " 0.333333\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e582ud3\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 6.0\n", + " 2.0\n", + " 6.0\n", + " 2.0\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 3.0\n", + " 1.0\n", + " 16.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.636514\n", + " 0.848686\n", + " 0.636514\n", + " 0.848686\n", + " 0.636514\n", + " 0.848686\n", + " 1.098612\n", + " 2.197225\n", + " 1.098612\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 6.0\n", + " 2.0\n", + " 6.0\n", + " 2.0\n", + " 6.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.500000\n", + " 3.000000\n", + " 1.500000\n", + " 3.000000\n", + " 1.500000\n", + " 3.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.222222\n", + " 0.300000\n", + " 0.222222\n", + " 0.3\n", + " 0.222222\n", + " 0.3\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.900000\n", + " \n", + " \n", + " e64i9cf\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 3.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.333333\n", + " 0.666667\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 7.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 1.039721\n", + " 0.955700\n", + " 1.332179\n", + " 1.427061\n", + " 1.332179\n", + " 1.427061\n", + " 1.039721\n", + " 1.747868\n", + " 0.950271\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 1.333333\n", + " 2.333333\n", + " 1.250000\n", + " 1.800000\n", + " 1.250000\n", + " 1.800000\n", + " 1.333333\n", + " 1.166667\n", + " 1.666667\n", + " 1.500000\n", + " 0.666667\n", + " 1.166667\n", + " 0.555556\n", + " 0.9\n", + " 0.555556\n", + " 0.9\n", + " 0.666667\n", + " 1.166667\n", + " 0.833333\n", + " 1.500000\n", + " 0.250000\n", + " 0.285714\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.250000\n", + " 0.142857\n", + " 0.200000\n", + " 0.222222\n", + " 0.500000\n", + " 0.571429\n", + " 0.400000\n", + " 0.444444\n", + " 0.400000\n", + " 0.444444\n", + " 0.500000\n", + " 0.285714\n", + " 0.600000\n", + " 0.333333\n", + " 0.333333\n", + " 0.666667\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.333333\n", + " 0.500000\n", + " 0.500000\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " \n", + " \n", + " e6q9204\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 0.333333\n", + " 0.666667\n", + " 0.500000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 4.0\n", + " 1.011404\n", + " 1.277034\n", + " 1.732868\n", + " 1.889159\n", + " 1.732868\n", + " 1.889159\n", + " 1.560710\n", + " 1.475076\n", + " 1.494175\n", + " 1.427061\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 4.0\n", + " 2.000000\n", + " 1.750000\n", + " 1.333333\n", + " 1.285714\n", + " 1.333333\n", + " 1.285714\n", + " 1.200000\n", + " 1.400000\n", + " 1.600000\n", + " 1.800000\n", + " 1.200000\n", + " 1.166667\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.200000\n", + " 1.166667\n", + " 1.600000\n", + " 1.500000\n", + " 0.333333\n", + " 0.285714\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.166667\n", + " 0.142857\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.428571\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.333333\n", + " 0.428571\n", + " 0.375000\n", + " 0.444444\n", + " 0.666667\n", + " 0.500000\n", + " 0.333333\n", + " 0.285714\n", + " 0.333333\n", + " 0.285714\n", + " 0.200000\n", + " 0.200000\n", + " 0.400000\n", + " 0.400000\n", + " 0.600000\n", + " 0.666667\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 1.000000\n", + " 0.833333\n", + " 1.000000\n", + " 0.833333\n", + " \n", + " \n", + " e5modd7\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 6.0\n", + " 8.0\n", + " 8.0\n", + " 8.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.039721\n", + " 1.273028\n", + " 1.039721\n", + " 1.273028\n", + " 1.039721\n", + " 1.273028\n", + " 2.079442\n", + " 2.197225\n", + " 2.079442\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.666667\n", + " 2.250000\n", + " 2.666667\n", + " 2.250000\n", + " 2.666667\n", + " 2.250000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.888889\n", + " 0.900000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.900000\n", + " 0.888889\n", + " 0.900000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.500000\n", + " 0.444444\n", + " 0.500000\n", + " 0.444444\n", + " 0.500000\n", + " 0.444444\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", + " 0.750000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.333333\n", + " 0.400000\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.888889\n", + " 0.900000\n", + " 0.888889\n", + " 0.900000\n", + " \n", + " \n", + " e5xhbyd\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 3.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 5.0\n", + " 6.0\n", + " 1.329661\n", + " 1.329661\n", + " 1.732868\n", + " 1.889159\n", + " 1.732868\n", + " 1.889159\n", + " 1.329661\n", + " 1.329661\n", + " 1.320888\n", + " 1.310784\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 1.500000\n", + " 1.500000\n", + " 1.333333\n", + " 1.285714\n", + " 1.333333\n", + " 1.285714\n", + " 1.500000\n", + " 1.500000\n", + " 2.000000\n", + " 2.250000\n", + " 1.500000\n", + " 1.500000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.500000\n", + " 1.500000\n", + " 2.000000\n", + " 2.250000\n", + " 0.333333\n", + " 0.333333\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.333333\n", + " 0.333333\n", + " 0.250000\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.333333\n", + " 0.333333\n", + " 0.375000\n", + " 0.333333\n", + " 0.500000\n", + " 0.500000\n", + " 0.333333\n", + " 0.285714\n", + " 0.333333\n", + " 0.285714\n", + " 0.500000\n", + " 0.500000\n", + " 0.750000\n", + " 0.750000\n", + " 1.000000\n", + " 1.000000\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e5oaf7h\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.333333\n", + " 0.333333\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.750000\n", + " 0.600000\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 1.0\n", + " 1.0\n", + " 5.0\n", + " 6.0\n", + " 0.562335\n", + " 0.562335\n", + " 1.732868\n", + " 1.889159\n", + " 1.732868\n", + " 1.889159\n", + " 1.039721\n", + " 1.039721\n", + " 0.974315\n", + " 0.936888\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 5.0\n", + " 2.000000\n", + " 2.000000\n", + " 1.333333\n", + " 1.285714\n", + " 1.333333\n", + " 1.285714\n", + " 1.333333\n", + " 1.333333\n", + " 2.666667\n", + " 3.000000\n", + " 1.333333\n", + " 1.333333\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.333333\n", + " 1.333333\n", + " 2.666667\n", + " 3.000000\n", + " 0.250000\n", + " 0.250000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.250000\n", + " 0.375000\n", + " 0.333333\n", + " 0.750000\n", + " 0.750000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.555556\n", + " 0.500000\n", + " 0.500000\n", + " 0.333333\n", + " 0.285714\n", + " 0.333333\n", + " 0.285714\n", + " 0.333333\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e6nir3u\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 6.0\n", + " 7.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 7.0\n", + " 8.0\n", + " 0.693147\n", + " 0.693147\n", + " 1.945910\n", + " 2.079442\n", + " 1.906155\n", + " 2.043192\n", + " 0.693147\n", + " 0.693147\n", + " 0.682908\n", + " 0.693147\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.142857\n", + " 1.125000\n", + " 1.000000\n", + " 1.000000\n", + " 3.500000\n", + " 4.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 3.500000\n", + " 4.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.142857\n", + " 0.125000\n", + " 0.125000\n", + " 0.111111\n", + " 0.500000\n", + " 0.500000\n", + " 0.428571\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.142857\n", + " 0.125000\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.500000\n", + " 0.571429\n", + " 0.500000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.142857\n", + " 0.125000\n", + " 0.000000\n", + " 0.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e6c3xdn\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 6.0\n", + " 8.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 2.079442\n", + " 2.197225\n", + " 2.079442\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.888889\n", + " 0.900000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.900000\n", + " 0.888889\n", + " 0.900000\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.777778\n", + " 0.800000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 0.888889\n", + " 0.900000\n", + " 0.888889\n", + " 0.900000\n", + " \n", + " \n", + " e5d3zaa\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.666667\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 1.054920\n", + " 1.351784\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.609438\n", + " 1.747868\n", + " 1.549826\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 1.666667\n", + " 1.750000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.000000\n", + " 1.166667\n", + " 1.400000\n", + " 1.500000\n", + " 0.833333\n", + " 1.000000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 0.833333\n", + " 1.000000\n", + " 1.166667\n", + " 1.285714\n", + " 0.400000\n", + " 0.285714\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.200000\n", + " 0.142857\n", + " 0.285714\n", + " 0.222222\n", + " 0.400000\n", + " 0.285714\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.200000\n", + " 0.285714\n", + " 0.285714\n", + " 0.333333\n", + " 0.666667\n", + " 0.750000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.000000\n", + " 0.166667\n", + " 0.400000\n", + " 0.333333\n", + " 0.500000\n", + " 0.571429\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.833333\n", + " 0.857143\n", + " 0.833333\n", + " 0.857143\n", + " \n", + " \n", + " e5gnjv9\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.500000\n", + " 0.000000\n", + " 0.500000\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 6.0\n", + " 7.0\n", + " 6.0\n", + " 7.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 28.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.000000\n", + " 0.348832\n", + " 0.000000\n", + " 0.348832\n", + " 0.000000\n", + " 0.348832\n", + " 0.000000\n", + " 2.043192\n", + " 0.000000\n", + " 2.043192\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 8.0\n", + " 1.0\n", + " 8.0\n", + " 1.0\n", + " 8.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.000000\n", + " 4.500000\n", + " 1.000000\n", + " 4.500000\n", + " 1.000000\n", + " 4.500000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.125000\n", + " 1.000000\n", + " 0.111111\n", + " 0.9\n", + " 0.111111\n", + " 0.9\n", + " 0.125000\n", + " 1.000000\n", + " 0.125000\n", + " 1.000000\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", + " 0.111111\n", + " 1.000000\n", + " 0.888889\n", + " 1.000000\n", + " 0.888889\n", + " 1.000000\n", + " 0.888889\n", + " 1.000000\n", + " 0.222222\n", + " 1.000000\n", + " 0.222222\n", + " 0.000000\n", + " 0.500000\n", + " 0.000000\n", + " 0.500000\n", + " 0.000000\n", + " 0.500000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.125000\n", + " 0.222222\n", + " 0.111111\n", + " 0.2\n", + " 0.111111\n", + " 0.2\n", + " 0.125000\n", + " 0.888889\n", + " 0.125000\n", + " 0.888889\n", + " \n", + " \n", + " e69gw2t\n", + " 2.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 5.0\n", + " 1.054920\n", + " 1.351784\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.332179\n", + " 1.549826\n", + " 1.277034\n", + " 1.464816\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 1.666667\n", + " 1.750000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.250000\n", + " 1.400000\n", + " 1.750000\n", + " 1.800000\n", + " 1.000000\n", + " 1.400000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 1.000000\n", + " 1.400000\n", + " 1.400000\n", + " 1.800000\n", + " 0.400000\n", + " 0.285714\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.200000\n", + " 0.285714\n", + " 0.285714\n", + " 0.333333\n", + " 0.400000\n", + " 0.285714\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.400000\n", + " 0.285714\n", + " 0.428571\n", + " 0.333333\n", + " 0.666667\n", + " 0.750000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.250000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.600000\n", + " 0.800000\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.800000\n", + " 1.000000\n", + " 0.800000\n", + " 1.000000\n", + " \n", + " \n", + " e5syrih\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 3.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 6.0\n", + " 8.0\n", + " 8.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.039721\n", + " 1.273028\n", + " 1.494175\n", + " 1.676988\n", + " 1.494175\n", + " 1.676988\n", + " 1.906155\n", + " 1.889159\n", + " 1.906155\n", + " 1.889159\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.666667\n", + " 2.250000\n", + " 1.600000\n", + " 1.500000\n", + " 1.600000\n", + " 1.500000\n", + " 1.142857\n", + " 1.285714\n", + " 1.142857\n", + " 1.285714\n", + " 1.142857\n", + " 1.285714\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.142857\n", + " 1.285714\n", + " 1.142857\n", + " 1.285714\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.125000\n", + " 0.222222\n", + " 0.125000\n", + " 0.222222\n", + " 0.500000\n", + " 0.444444\n", + " 0.375000\n", + " 0.333333\n", + " 0.375000\n", + " 0.333333\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 1.000000\n", + " 0.750000\n", + " 0.400000\n", + " 0.333333\n", + " 0.400000\n", + " 0.333333\n", + " 0.142857\n", + " 0.285714\n", + " 0.142857\n", + " 0.285714\n", + " 0.428571\n", + " 0.571429\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e5sa2yf\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.800000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 7.0\n", + " 8.0\n", + " 0.693147\n", + " 0.693147\n", + " 2.079442\n", + " 2.197225\n", + " 2.079442\n", + " 2.197225\n", + " 0.693147\n", + " 0.693147\n", + " 0.693147\n", + " 0.686962\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 5.0\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 4.000000\n", + " 4.500000\n", + " 1.000000\n", + " 1.000000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 4.000000\n", + " 4.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.444444\n", + " 0.500000\n", + " 0.500000\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.555556\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e6ai7z5\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 0.500000\n", + " 0.666667\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 0.250000\n", + " 0.250000\n", + " 0.250000\n", + " 0.200000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 6.0\n", + " 6.0\n", + " 2.0\n", + " 2.0\n", + " 1.560710\n", + " 1.494175\n", + " 1.791759\n", + " 1.831020\n", + " 1.791759\n", + " 1.831020\n", + " 0.867563\n", + " 1.386294\n", + " 0.867563\n", + " 1.303092\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 5.0\n", + " 1.200000\n", + " 1.600000\n", + " 1.000000\n", + " 1.285714\n", + " 1.000000\n", + " 1.285714\n", + " 2.000000\n", + " 1.600000\n", + " 2.000000\n", + " 1.800000\n", + " 1.200000\n", + " 1.600000\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 1.200000\n", + " 1.600000\n", + " 1.200000\n", + " 1.800000\n", + " 0.166667\n", + " 0.250000\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.125000\n", + " 0.166667\n", + " 0.111111\n", + " 0.333333\n", + " 0.375000\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.555556\n", + " 0.200000\n", + " 0.400000\n", + " 0.000000\n", + " 0.142857\n", + " 0.000000\n", + " 0.142857\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 1.000000\n", + " 1.000000\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 0.600000\n", + " 1.000000\n", + " 0.600000\n", + " 1.000000\n", + " \n", + " \n", + " ...\n", " ...\n", " ...\n", " ...\n", @@ -2241,2506 +5794,12967 @@ " ...\n", " ...\n", " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " ...\n", + " \n", + " \n", + " e5smhzk\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.750000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 5.0\n", + " 7.0\n", + " 1.098612\n", + " 1.039721\n", + " 1.945910\n", + " 2.043192\n", + " 1.945910\n", + " 2.043192\n", + " 0.636514\n", + " 1.039721\n", + " 0.682908\n", + " 0.964963\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 4.0\n", + " 1.000000\n", + " 1.333333\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 1.500000\n", + " 1.333333\n", + " 3.500000\n", + " 3.000000\n", + " 1.000000\n", + " 1.333333\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 1.000000\n", + " 1.333333\n", + " 2.333333\n", + " 3.000000\n", + " 0.333333\n", + " 0.250000\n", + " 0.142857\n", + " 0.111111\n", + " 0.142857\n", + " 0.111111\n", + " 0.333333\n", + " 0.250000\n", + " 0.428571\n", + " 0.444444\n", + " 0.333333\n", + " 0.500000\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.666667\n", + " 0.500000\n", + " 0.571429\n", + " 0.444444\n", + " 0.000000\n", + " 0.333333\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.500000\n", + " 0.333333\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " \n", + " \n", + " e5v91s0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 11.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.039721\n", + " 1.149060\n", + " 1.039721\n", + " 1.149060\n", + " 1.039721\n", + " 1.149060\n", + " 1.386294\n", + " 2.197225\n", + " 1.386294\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 5.0\n", + " 2.0\n", + " 5.0\n", + " 2.0\n", + " 5.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.333333\n", + " 2.250000\n", + " 1.333333\n", + " 2.250000\n", + " 1.333333\n", + " 2.250000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.444444\n", + " 0.900000\n", + " 0.444444\n", + " 0.9\n", + " 0.444444\n", + " 0.9\n", + " 0.444444\n", + " 0.900000\n", + " 0.444444\n", + " 0.900000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.500000\n", + " 0.555556\n", + " 0.500000\n", + " 0.555556\n", + " 0.500000\n", + " 0.555556\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.333333\n", + " 0.400000\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.444444\n", + " 0.900000\n", + " 0.444444\n", + " 0.900000\n", + " \n", + " \n", + " e6n6di6\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 5.0\n", + " 6.0\n", + " 5.0\n", + " 6.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 5.0\n", + " 7.0\n", + " 7.0\n", + " 7.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.213008\n", + " 1.427061\n", + " 1.494175\n", + " 1.676988\n", + " 1.494175\n", + " 1.676988\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.000000\n", + " 1.800000\n", + " 1.600000\n", + " 1.500000\n", + " 1.600000\n", + " 1.500000\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.000000\n", + " 1.000000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.500000\n", + " 0.444444\n", + " 0.375000\n", + " 0.333333\n", + " 0.375000\n", + " 0.333333\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.400000\n", + " 0.400000\n", + " 0.333333\n", + " 0.400000\n", + " 0.333333\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.500000\n", + " 0.555556\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.875000\n", + " 0.888889\n", + " 0.875000\n", + " 0.888889\n", + " \n", + " \n", + " e6iqq30\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 15.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 2.197225\n", + " 1.098612\n", + " 2.197225\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.333333\n", + " 0.444444\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " \n", + " \n", + " e5bfad7\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 4.0\n", + " 2.0\n", + " 8.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 1.329661\n", + " 1.273028\n", + " 1.329661\n", + " 1.522955\n", + " 1.329661\n", + " 1.522955\n", + " 1.560710\n", + " 2.043192\n", + " 1.560710\n", + " 2.043192\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.500000\n", + " 2.250000\n", + " 1.500000\n", + " 1.800000\n", + " 1.500000\n", + " 1.800000\n", + " 1.200000\n", + " 1.125000\n", + " 1.200000\n", + " 1.125000\n", + " 0.750000\n", + " 1.125000\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.750000\n", + " 1.125000\n", + " 0.750000\n", + " 1.125000\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.222222\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.333333\n", + " 0.444444\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.222222\n", + " 0.500000\n", + " 0.750000\n", + " 0.500000\n", + " 0.600000\n", + " 0.500000\n", + " 0.600000\n", + " 0.200000\n", + " 0.125000\n", + " 0.200000\n", + " 0.125000\n", + " 0.500000\n", + " 0.500000\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.625000\n", + " 1.000000\n", + " 0.625000\n", + " 1.000000\n", + " \n", + " \n", + " e6x5he5\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.000000\n", + " 0.250000\n", + " 1.000000\n", + " 0.250000\n", + " 0.500000\n", + " 0.500000\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.250000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 6.0\n", + " 6.0\n", + " 6.0\n", + " 0.0\n", + " 5.0\n", + " 1.386294\n", + " 1.386294\n", + " 1.386294\n", + " 1.386294\n", + " 1.332179\n", + " 1.427061\n", + " 0.000000\n", + " 1.386294\n", + " 0.000000\n", + " 1.386294\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.000000\n", + " 1.600000\n", + " 1.000000\n", + " 1.600000\n", + " 1.250000\n", + " 1.800000\n", + " 4.000000\n", + " 1.600000\n", + " 4.000000\n", + " 1.600000\n", + " 0.800000\n", + " 1.600000\n", + " 0.444444\n", + " 0.8\n", + " 0.555556\n", + " 0.9\n", + " 0.800000\n", + " 1.600000\n", + " 0.800000\n", + " 1.600000\n", + " 0.250000\n", + " 0.125000\n", + " 0.250000\n", + " 0.125000\n", + " 0.200000\n", + " 0.222222\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.250000\n", + " 0.500000\n", + " 0.250000\n", + " 0.500000\n", + " 0.400000\n", + " 0.444444\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 0.000000\n", + " 0.200000\n", + " 0.000000\n", + " 0.200000\n", + " 0.250000\n", + " 0.400000\n", + " 1.000000\n", + " 0.200000\n", + " 1.000000\n", + " 0.200000\n", + " 0.800000\n", + " 1.000000\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.200000\n", + " 1.000000\n", + " 0.200000\n", + " 1.000000\n", + " \n", + " \n", + " e6l9uyf\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 3.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 11.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.039721\n", + " 1.149060\n", + " 1.039721\n", + " 1.149060\n", + " 1.039721\n", + " 1.149060\n", + " 1.386294\n", + " 2.043192\n", + " 1.386294\n", + " 2.043192\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 5.0\n", + " 2.0\n", + " 5.0\n", + " 2.0\n", + " 5.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.333333\n", + " 2.250000\n", + " 1.333333\n", + " 2.250000\n", + " 1.333333\n", + " 2.250000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.500000\n", + " 1.000000\n", + " 0.444444\n", + " 0.9\n", + " 0.444444\n", + " 0.9\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.500000\n", + " 0.555556\n", + " 0.500000\n", + " 0.555556\n", + " 0.500000\n", + " 0.555556\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.375000\n", + " 0.444444\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.500000\n", + " 0.888889\n", + " 0.500000\n", + " 0.888889\n", + " \n", + " \n", + " e57hyr1\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 7.0\n", + " 0.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 1.332179\n", + " 1.427061\n", + " 1.332179\n", + " 1.427061\n", + " 1.332179\n", + " 1.427061\n", + " 1.609438\n", + " 1.735126\n", + " 1.609438\n", + " 1.735126\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.250000\n", + " 1.800000\n", + " 1.250000\n", + " 1.800000\n", + " 1.250000\n", + " 1.800000\n", + " 1.000000\n", + " 1.500000\n", + " 1.000000\n", + " 1.500000\n", + " 0.833333\n", + " 1.285714\n", + " 0.555556\n", + " 0.9\n", + " 0.555556\n", + " 0.9\n", + " 0.833333\n", + " 1.285714\n", + " 0.833333\n", + " 1.285714\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.400000\n", + " 0.444444\n", + " 0.400000\n", + " 0.444444\n", + " 0.400000\n", + " 0.444444\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.000000\n", + " 0.500000\n", + " 0.000000\n", + " 0.500000\n", + " 0.666667\n", + " 0.714286\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.833333\n", + " 0.857143\n", + " 0.833333\n", + " 0.857143\n", + " \n", + " \n", + " e5b8sj7\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.500000\n", + " 0.750000\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 7.0\n", + " 10.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 0.636514\n", + " 1.060857\n", + " 1.011404\n", + " 1.310784\n", + " 1.011404\n", + " 1.310784\n", + " 1.791759\n", + " 2.043192\n", + " 1.791759\n", + " 2.043192\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 3.000000\n", + " 3.000000\n", + " 2.000000\n", + " 2.250000\n", + " 2.000000\n", + " 2.250000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.750000\n", + " 1.125000\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.750000\n", + " 1.125000\n", + " 0.750000\n", + " 1.125000\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.666667\n", + " 0.444444\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.166667\n", + " 0.222222\n", + " 0.166667\n", + " 0.222222\n", + " 1.000000\n", + " 1.000000\n", + " 0.666667\n", + " 0.750000\n", + " 0.666667\n", + " 0.750000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.250000\n", + " 0.375000\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.750000\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", + " \n", + " \n", + " e6nlep7\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 1.000000\n", + " NaN\n", + " 1.000000\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 36.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 2.197225\n", + " NaN\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 9.0\n", + " 0.0\n", + " 9.0\n", + " 0.0\n", + " 9.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.000000\n", + " 9.000000\n", + " 0.000000\n", + " 9.000000\n", + " 0.000000\n", + " 9.000000\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 0.900000\n", + " 0.000000\n", + " 0.9\n", + " 0.000000\n", + " 0.9\n", + " 0.000000\n", + " 0.900000\n", + " 0.000000\n", + " 0.900000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.111111\n", + " NaN\n", + " 0.111111\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 1.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.100000\n", + " 0.000000\n", + " 0.1\n", + " 0.000000\n", + " 0.1\n", + " 0.000000\n", + " 0.900000\n", + " 0.000000\n", + " 0.900000\n", + " \n", + " \n", + " e6ltazd\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 5.0\n", + " 0.250000\n", + " 0.250000\n", + " 0.250000\n", + " 0.250000\n", + " 0.250000\n", + " 0.250000\n", + " 1.000000\n", + " 0.500000\n", + " 0.666667\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 4.0\n", + " 6.0\n", + " 6.0\n", + " 0.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 0.500402\n", + " 0.867563\n", + " 1.386294\n", + " 1.581094\n", + " 1.386294\n", + " 1.581094\n", + " 1.609438\n", + " 1.560710\n", + " 1.494175\n", + " 1.464816\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 2.500000\n", + " 2.000000\n", + " 1.600000\n", + " 1.500000\n", + " 1.600000\n", + " 1.500000\n", + " 1.000000\n", + " 1.200000\n", + " 1.600000\n", + " 1.800000\n", + " 1.000000\n", + " 1.000000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 1.600000\n", + " 1.500000\n", + " 0.200000\n", + " 0.166667\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.200000\n", + " 0.166667\n", + " 0.250000\n", + " 0.333333\n", + " 0.800000\n", + " 0.666667\n", + " 0.500000\n", + " 0.444444\n", + " 0.500000\n", + " 0.444444\n", + " 0.200000\n", + " 0.333333\n", + " 0.375000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.200000\n", + " 0.166667\n", + " 0.200000\n", + " 0.166667\n", + " 0.000000\n", + " 0.200000\n", + " 0.400000\n", + " 0.400000\n", + " 0.400000\n", + " 0.500000\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 1.000000\n", + " 0.833333\n", + " 1.000000\n", + " 0.833333\n", + " \n", + " \n", + " e57a6qq\n", + " 1.0\n", + " 0.0\n", + " 4.0\n", + " 5.0\n", + " 4.0\n", + " 5.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.666667\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 4.0\n", + " 4.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 5.0\n", + " 1.011404\n", + " 1.011404\n", + " 1.732868\n", + " 1.889159\n", + " 1.732868\n", + " 1.889159\n", + " 1.560710\n", + " 1.560710\n", + " 1.494175\n", + " 1.464816\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 2.000000\n", + " 2.000000\n", + " 1.333333\n", + " 1.285714\n", + " 1.333333\n", + " 1.285714\n", + " 1.200000\n", + " 1.200000\n", + " 1.600000\n", + " 1.800000\n", + " 1.200000\n", + " 1.200000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.200000\n", + " 1.200000\n", + " 1.600000\n", + " 1.800000\n", + " 0.333333\n", + " 0.333333\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.166667\n", + " 0.166667\n", + " 0.250000\n", + " 0.333333\n", + " 0.500000\n", + " 0.500000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.333333\n", + " 0.333333\n", + " 0.375000\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 0.333333\n", + " 0.285714\n", + " 0.333333\n", + " 0.285714\n", + " 0.200000\n", + " 0.200000\n", + " 0.400000\n", + " 0.400000\n", + " 0.600000\n", + " 0.600000\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e5qc7eb\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 0.0\n", + " 3.0\n", + " 4.0\n", + " 3.0\n", + " 4.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 6.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 1.011404\n", + " 1.004242\n", + " 1.277034\n", + " 1.522955\n", + " 1.277034\n", + " 1.522955\n", + " 1.791759\n", + " 1.747868\n", + " 1.747868\n", + " 1.735126\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.000000\n", + " 2.333333\n", + " 1.750000\n", + " 1.800000\n", + " 1.750000\n", + " 1.800000\n", + " 1.000000\n", + " 1.166667\n", + " 1.166667\n", + " 1.500000\n", + " 1.000000\n", + " 1.166667\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 1.000000\n", + " 1.166667\n", + " 1.166667\n", + " 1.500000\n", + " 0.333333\n", + " 0.428571\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.166667\n", + " 0.142857\n", + " 0.142857\n", + " 0.222222\n", + " 0.500000\n", + " 0.428571\n", + " 0.428571\n", + " 0.333333\n", + " 0.428571\n", + " 0.333333\n", + " 0.166667\n", + " 0.285714\n", + " 0.285714\n", + " 0.222222\n", + " 0.666667\n", + " 0.666667\n", + " 0.500000\n", + " 0.600000\n", + " 0.500000\n", + " 0.600000\n", + " 0.000000\n", + " 0.166667\n", + " 0.166667\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " \n", + " \n", + " e6hqt5y\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 0.250000\n", + " 1.000000\n", + " 0.250000\n", + " 1.000000\n", + " 0.250000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 5.0\n", + " 6.0\n", + " 12.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.500402\n", + " 0.964963\n", + " 0.500402\n", + " 0.964963\n", + " 0.500402\n", + " 0.964963\n", + " 1.609438\n", + " 2.043192\n", + " 1.609438\n", + " 2.043192\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.500000\n", + " 3.000000\n", + " 2.500000\n", + " 3.000000\n", + " 2.500000\n", + " 3.000000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.625000\n", + " 1.000000\n", + " 0.555556\n", + " 0.9\n", + " 0.555556\n", + " 0.9\n", + " 0.625000\n", + " 1.000000\n", + " 0.625000\n", + " 1.000000\n", + " 0.200000\n", + " 0.444444\n", + " 0.200000\n", + " 0.444444\n", + " 0.200000\n", + " 0.444444\n", + " 0.200000\n", + " 0.111111\n", + " 0.200000\n", + " 0.111111\n", + " 0.800000\n", + " 0.444444\n", + " 0.800000\n", + " 0.444444\n", + " 0.800000\n", + " 0.444444\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.250000\n", + " 0.333333\n", + " 0.222222\n", + " 0.3\n", + " 0.222222\n", + " 0.3\n", + " 0.625000\n", + " 0.888889\n", + " 0.625000\n", + " 0.888889\n", + " \n", + " \n", + " e5ua84v\n", + " 3.0\n", + " 4.0\n", + " 4.0\n", + " 5.0\n", + " 4.0\n", + " 5.0\n", + " 3.0\n", + " 4.0\n", + " 3.0\n", + " 4.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 5.0\n", + " 7.0\n", + " 7.0\n", + " 2.0\n", + " 4.0\n", + " 3.0\n", + " 3.0\n", + " 1.213008\n", + " 1.427061\n", + " 1.494175\n", + " 1.676988\n", + " 1.494175\n", + " 1.676988\n", + " 1.732868\n", + " 1.676988\n", + " 1.732868\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.000000\n", + " 1.800000\n", + " 1.600000\n", + " 1.500000\n", + " 1.600000\n", + " 1.500000\n", + " 1.333333\n", + " 1.500000\n", + " 1.333333\n", + " 1.500000\n", + " 1.333333\n", + " 1.285714\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.333333\n", + " 1.285714\n", + " 1.333333\n", + " 1.285714\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.500000\n", + " 0.444444\n", + " 0.375000\n", + " 0.333333\n", + " 0.375000\n", + " 0.333333\n", + " 0.250000\n", + " 0.333333\n", + " 0.250000\n", + " 0.333333\n", + " 0.500000\n", + " 0.400000\n", + " 0.400000\n", + " 0.333333\n", + " 0.400000\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.666667\n", + " 0.714286\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 1.000000\n", + " 0.857143\n", + " 1.000000\n", + " 0.857143\n", + " \n", + " \n", + " e65m7kq\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 0.333333\n", + " 0.750000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 3.0\n", + " 9.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.950271\n", + " 1.214890\n", + " 1.332179\n", + " 1.427061\n", + " 1.332179\n", + " 1.427061\n", + " 1.609438\n", + " 2.043192\n", + " 1.609438\n", + " 2.043192\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.666667\n", + " 2.250000\n", + " 1.250000\n", + " 1.800000\n", + " 1.250000\n", + " 1.800000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.625000\n", + " 1.000000\n", + " 0.555556\n", + " 0.9\n", + " 0.555556\n", + " 0.9\n", + " 0.625000\n", + " 1.000000\n", + " 0.625000\n", + " 1.000000\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.111111\n", + " 0.200000\n", + " 0.111111\n", + " 0.600000\n", + " 0.444444\n", + " 0.400000\n", + " 0.444444\n", + " 0.400000\n", + " 0.444444\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.333333\n", + " 0.500000\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.375000\n", + " 0.444444\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.625000\n", + " 0.888889\n", + " 0.625000\n", + " 0.888889\n", + " \n", + " \n", + " e5ggtru\n", + " 3.0\n", + " 4.0\n", + " 4.0\n", + " 5.0\n", + " 4.0\n", + " 5.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 0.400000\n", + " 0.400000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 4.0\n", + " 8.0\n", + " 11.0\n", + " 11.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.900256\n", + " 1.149060\n", + " 1.213008\n", + " 1.427061\n", + " 1.213008\n", + " 1.427061\n", + " 2.079442\n", + " 2.043192\n", + " 2.079442\n", + " 2.043192\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 5.0\n", + " 5.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 2.666667\n", + " 2.250000\n", + " 2.000000\n", + " 1.800000\n", + " 2.000000\n", + " 1.800000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.000000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.625000\n", + " 0.555556\n", + " 0.500000\n", + " 0.444444\n", + " 0.500000\n", + " 0.444444\n", + " 0.125000\n", + " 0.222222\n", + " 0.125000\n", + " 0.222222\n", + " 0.666667\n", + " 0.500000\n", + " 0.500000\n", + " 0.400000\n", + " 0.500000\n", + " 0.400000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.375000\n", + " 0.444444\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 1.000000\n", + " 0.888889\n", + " 1.000000\n", + " 0.888889\n", + " \n", + " \n", + " e5pmmig\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 3.0\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 3.0\n", + " 3.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 4.0\n", + " 4.0\n", + " 1.329661\n", + " 1.549826\n", + " 1.906155\n", + " 2.043192\n", + " 1.906155\n", + " 2.043192\n", + " 1.329661\n", + " 1.277034\n", + " 1.320888\n", + " 1.310784\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 1.500000\n", + " 1.400000\n", + " 1.142857\n", + " 1.125000\n", + " 1.142857\n", + " 1.125000\n", + " 1.500000\n", + " 1.750000\n", + " 2.000000\n", + " 2.250000\n", + " 1.500000\n", + " 1.400000\n", + " 0.888889\n", + " 0.9\n", + " 0.888889\n", + " 0.9\n", + " 1.500000\n", + " 1.400000\n", + " 2.000000\n", + " 1.800000\n", + " 0.333333\n", + " 0.285714\n", + " 0.125000\n", + " 0.111111\n", + " 0.125000\n", + " 0.111111\n", + " 0.333333\n", + " 0.285714\n", + " 0.250000\n", + " 0.333333\n", + " 0.333333\n", + " 0.285714\n", + " 0.250000\n", + " 0.222222\n", + " 0.250000\n", + " 0.222222\n", + " 0.333333\n", + " 0.428571\n", + " 0.375000\n", + " 0.333333\n", + " 0.500000\n", + " 0.400000\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.125000\n", + " 0.500000\n", + " 0.500000\n", + " 0.750000\n", + " 0.750000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 1.000000\n", + " 0.800000\n", + " 1.000000\n", + " 0.800000\n", + " \n", + " \n", + " e64l6vq\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 5.0\n", + " 0.0\n", + " 5.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.666667\n", + " 0.500000\n", + " 0.500000\n", + " 0.333333\n", + " 0.666667\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 1.332179\n", + " 1.277034\n", + " 1.560710\n", + " 1.676988\n", + " 1.560710\n", + " 1.676988\n", + " 1.332179\n", + " 1.747868\n", + " 1.242453\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 1.250000\n", + " 1.750000\n", + " 1.200000\n", + " 1.500000\n", + " 1.200000\n", + " 1.500000\n", + " 1.250000\n", + " 1.166667\n", + " 1.500000\n", + " 1.500000\n", + " 0.833333\n", + " 1.166667\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.833333\n", + " 1.166667\n", + " 1.000000\n", + " 1.500000\n", + " 0.200000\n", + " 0.285714\n", + " 0.166667\n", + " 0.222222\n", + " 0.166667\n", + " 0.222222\n", + " 0.200000\n", + " 0.142857\n", + " 0.166667\n", + " 0.222222\n", + " 0.400000\n", + " 0.428571\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.400000\n", + " 0.285714\n", + " 0.500000\n", + " 0.333333\n", + " 0.250000\n", + " 0.500000\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.250000\n", + " 0.166667\n", + " 0.250000\n", + " 0.333333\n", + " 0.666667\n", + " 0.666667\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " \n", + " \n", + " e6fjx0d\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 0.750000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 6.0\n", + " 6.0\n", + " 0.693147\n", + " 1.039721\n", + " 1.945910\n", + " 2.043192\n", + " 1.945910\n", + " 2.043192\n", + " 0.693147\n", + " 1.039721\n", + " 0.682908\n", + " 0.964963\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 4.0\n", + " 1.000000\n", + " 1.333333\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.333333\n", + " 3.500000\n", + " 3.000000\n", + " 0.666667\n", + " 1.000000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 0.666667\n", + " 1.000000\n", + " 2.333333\n", + " 2.250000\n", + " 0.500000\n", + " 0.250000\n", + " 0.142857\n", + " 0.111111\n", + " 0.142857\n", + " 0.111111\n", + " 0.500000\n", + " 0.250000\n", + " 0.428571\n", + " 0.444444\n", + " 0.500000\n", + " 0.500000\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.500000\n", + " 0.500000\n", + " 0.571429\n", + " 0.444444\n", + " 0.000000\n", + " 0.333333\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.333333\n", + " 1.000000\n", + " 0.666667\n", + " 0.666667\n", + " 0.750000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 0.666667\n", + " 0.750000\n", + " 0.666667\n", + " 0.750000\n", + " \n", + " \n", + " e5h3xyy\n", + " 0.0\n", + " 5.0\n", + " 0.0\n", + " 5.0\n", + " 0.0\n", + " 5.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 0.500000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 4.0\n", + " 0.0\n", + " 2.0\n", + " 5.0\n", + " 2.0\n", + " 5.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 7.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.332179\n", + " 1.427061\n", + " 1.332179\n", + " 1.427061\n", + " 1.332179\n", + " 1.427061\n", + " 1.609438\n", + " 2.043192\n", + " 1.609438\n", + " 2.043192\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.250000\n", + " 1.800000\n", + " 1.250000\n", + " 1.800000\n", + " 1.250000\n", + " 1.800000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.125000\n", + " 0.625000\n", + " 1.000000\n", + " 0.555556\n", + " 0.9\n", + " 0.555556\n", + " 0.9\n", + " 0.625000\n", + " 1.000000\n", + " 0.625000\n", + " 1.000000\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.111111\n", + " 0.200000\n", + " 0.111111\n", + " 0.400000\n", + " 0.444444\n", + " 0.400000\n", + " 0.444444\n", + " 0.400000\n", + " 0.444444\n", + " 0.200000\n", + " 0.222222\n", + " 0.200000\n", + " 0.222222\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.250000\n", + " 0.400000\n", + " 0.000000\n", + " 0.125000\n", + " 0.000000\n", + " 0.125000\n", + " 0.500000\n", + " 0.555556\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.625000\n", + " 0.888889\n", + " 0.625000\n", + " 0.888889\n", + " \n", + " \n", + " e589ri5\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 4.0\n", + " 2.0\n", + " 6.0\n", + " 2.0\n", + " 6.0\n", + " 2.0\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 0.166667\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 3.0\n", + " 0.0\n", + " 15.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 1.002718\n", + " 1.098612\n", + " 2.197225\n", + " 1.098612\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 6.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 2.250000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 0.9\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.111111\n", + " 0.333333\n", + " 0.111111\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.333333\n", + " 0.400000\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.900000\n", + " 0.333333\n", + " 0.900000\n", + " \n", + " \n", + " e5beuqa\n", + " 0.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.250000\n", + " 0.500000\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.666667\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 1.0\n", + " 4.0\n", + " 6.0\n", + " 7.0\n", + " 3.0\n", + " 7.0\n", + " 3.0\n", + " 3.0\n", + " 1.153742\n", + " 1.427061\n", + " 1.475076\n", + " 1.676988\n", + " 1.475076\n", + " 1.676988\n", + " 1.475076\n", + " 1.427061\n", + " 1.475076\n", + " 1.427061\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 4.0\n", + " 4.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 3.0\n", + " 4.0\n", + " 3.0\n", + " 4.0\n", + " 1.750000\n", + " 1.800000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.800000\n", + " 1.400000\n", + " 1.800000\n", + " 1.400000\n", + " 1.500000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.142857\n", + " 0.222222\n", + " 0.571429\n", + " 0.444444\n", + " 0.428571\n", + " 0.333333\n", + " 0.428571\n", + " 0.333333\n", + " 0.428571\n", + " 0.444444\n", + " 0.428571\n", + " 0.444444\n", + " 0.250000\n", + " 0.400000\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.400000\n", + " 0.200000\n", + " 0.400000\n", + " 0.800000\n", + " 0.833333\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 1.000000\n", + " 0.833333\n", + " 1.000000\n", + " 0.833333\n", + " \n", + " \n", + " e5lqoj1\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 4.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 6.0\n", + " 1.000000\n", + " 0.200000\n", + " 1.000000\n", + " 0.200000\n", + " 1.000000\n", + " 0.200000\n", + " 0.500000\n", + " 0.333333\n", + " 1.000000\n", + " 0.666667\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 4.0\n", + " 5.0\n", + " 4.0\n", + " 5.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 2.0\n", + " 0.0\n", + " 10.0\n", + " 1.0\n", + " 3.0\n", + " 2.0\n", + " 2.0\n", + " 1.098612\n", + " 1.073543\n", + " 1.386294\n", + " 1.303092\n", + " 1.386294\n", + " 1.303092\n", + " 0.636514\n", + " 1.667462\n", + " 0.693147\n", + " 1.676988\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 1.0\n", + " 5.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 1.000000\n", + " 2.000000\n", + " 1.000000\n", + " 1.800000\n", + " 1.000000\n", + " 1.800000\n", + " 1.500000\n", + " 1.333333\n", + " 2.000000\n", + " 1.500000\n", + " 0.500000\n", + " 1.142857\n", + " 0.444444\n", + " 0.9\n", + " 0.444444\n", + " 0.9\n", + " 0.500000\n", + " 1.142857\n", + " 0.666667\n", + " 1.285714\n", + " 0.333333\n", + " 0.125000\n", + " 0.250000\n", + " 0.111111\n", + " 0.250000\n", + " 0.111111\n", + " 0.333333\n", + " 0.125000\n", + " 0.500000\n", + " 0.222222\n", + " 0.333333\n", + " 0.625000\n", + " 0.250000\n", + " 0.555556\n", + " 0.250000\n", + " 0.555556\n", + " 0.666667\n", + " 0.375000\n", + " 0.500000\n", + " 0.333333\n", + " 0.000000\n", + " 0.250000\n", + " 0.000000\n", + " 0.200000\n", + " 0.000000\n", + " 0.200000\n", + " 0.500000\n", + " 0.166667\n", + " 1.000000\n", + " 0.333333\n", + " 0.500000\n", + " 0.571429\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.333333\n", + " 0.857143\n", + " 0.333333\n", + " 0.857143\n", + " \n", + " \n", + " e5kvch1\n", + " 1.0\n", + " 4.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 4.0\n", + " 1.0\n", + " 3.0\n", + " 0.500000\n", + " 0.666667\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 0.333333\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 1.000000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 4.0\n", + " 0.0\n", + " 2.0\n", + " 3.0\n", + " 3.0\n", + " 1.332179\n", + " 1.494175\n", + " 1.791759\n", + " 1.831020\n", + " 1.791759\n", + " 1.831020\n", + " 1.609438\n", + " 1.732868\n", + " 1.560710\n", + " 1.735126\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.250000\n", + " 1.600000\n", + " 1.000000\n", + " 1.285714\n", + " 1.000000\n", + " 1.285714\n", + " 1.000000\n", + " 1.333333\n", + " 1.200000\n", + " 1.500000\n", + " 0.833333\n", + " 1.142857\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.833333\n", + " 1.142857\n", + " 1.000000\n", + " 1.285714\n", + " 0.200000\n", + " 0.250000\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.200000\n", + " 0.250000\n", + " 0.166667\n", + " 0.222222\n", + " 0.400000\n", + " 0.375000\n", + " 0.166667\n", + " 0.333333\n", + " 0.166667\n", + " 0.333333\n", + " 0.200000\n", + " 0.250000\n", + " 0.333333\n", + " 0.222222\n", + " 0.250000\n", + " 0.400000\n", + " 0.000000\n", + " 0.142857\n", + " 0.000000\n", + " 0.142857\n", + " 0.000000\n", + " 0.333333\n", + " 0.200000\n", + " 0.500000\n", + " 0.666667\n", + " 0.714286\n", + " 0.666667\n", + " 0.7\n", + " 0.666667\n", + " 0.7\n", + " 0.833333\n", + " 0.857143\n", + " 0.833333\n", + " 0.857143\n", " \n", " \n", - " e65ca8k\n", + " e6srvwm\n", " 2.0\n", - " 0.0\n", - " 0.222222\n", " 1.0\n", " 2.0\n", - " 0.111111\n", - " 0.9\n", - " 1.125000\n", - " 0.8\n", - " 0.125000\n", - " 2.043192\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", + " 0.666667\n", + " 1.000000\n", " 0.500000\n", - " 8.0\n", + " 1.000000\n", + " 0.500000\n", + " 2.0\n", + " 2.0\n", " 2.0\n", + " 2.0\n", + " 2.0\n", + " 2.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 0.0\n", " 0.0\n", - " 0.888889\n", - " 0.222222\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 0.0\n", " 1.0\n", " 1.0\n", " 1.0\n", + " 1.0\n", + " 5.0\n", " 2.0\n", - " 0.111111\n", - " 0.111111\n", - " 4.500000\n", - " 0.9\n", - " 4.500000\n", + " 5.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.329661\n", + " 1.522955\n", + " 1.329661\n", + " 1.522955\n", + " 1.329661\n", + " 1.522955\n", + " 1.791759\n", + " 2.043192\n", + " 1.791759\n", + " 2.043192\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 2.0\n", + " 3.0\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " 1.500000\n", + " 1.800000\n", + " 1.500000\n", + " 1.800000\n", + " 1.500000\n", + " 1.800000\n", + " 1.000000\n", " 1.125000\n", " 1.000000\n", - " 0.8\n", + " 1.125000\n", + " 0.750000\n", + " 1.000000\n", + " 0.666667\n", + " 0.9\n", + " 0.666667\n", + " 0.9\n", + " 0.750000\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.222222\n", + " 0.333333\n", + " 0.222222\n", + " 0.166667\n", + " 0.111111\n", + " 0.166667\n", + " 0.111111\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.333333\n", + " 0.166667\n", + " 0.222222\n", + " 0.166667\n", + " 0.222222\n", + " 0.500000\n", + " 0.600000\n", " 0.500000\n", + " 0.600000\n", + " 0.500000\n", + " 0.600000\n", + " 0.000000\n", " 0.125000\n", - " 0.348832\n", - " 2.043192\n", + " 0.000000\n", " 0.125000\n", " 0.500000\n", + " 0.555556\n", + " 0.444444\n", + " 0.5\n", + " 0.444444\n", + " 0.5\n", + " 0.750000\n", + " 0.888889\n", + " 0.750000\n", + " 0.888889\n", + " \n", + " \n", + " e5o65mk\n", " 1.0\n", - " 2.0\n", - " 0.0\n", - " 0.0\n", - " 0.500000\n", - " 1.000000\n", " 1.0\n", - " 0.0\n", " 1.0\n", " 1.0\n", - " 0.500000\n", - " 0.000000\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 0.0\n", + " 1.0\n", + " 1.000000\n", " 1.000000\n", " 1.000000\n", " 1.000000\n", - " 2.000000\n", " 1.000000\n", " 0.500000\n", - " 0.000000\n", " 1.000000\n", - " 0.693147\n", - " 0.000000\n", " 1.000000\n", - " 0.000000\n", + " 0.750000\n", + " 1.000000\n", " 1.0\n", - " 7.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 3.0\n", + " 4.0\n", + " 0.0\n", + " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", - " 1.0\n", - " 1.0\n", " 0.0\n", " 0.0\n", " 1.0\n", + " 0.0\n", " 1.0\n", - " 0.142857\n", " 1.0\n", - " 2.0\n", - " 0.142857\n", - " 0.777778\n", - " 1.000000\n", - " 0.777778\n", - " 0.000000\n", - " 1.945910\n", - " 1.000000\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 5.0\n", " 7.0\n", + " 0.693147\n", + " 0.693147\n", + " 1.945910\n", + " 2.079442\n", + " 1.945910\n", + " 2.043192\n", + " 0.693147\n", + " 0.693147\n", + " 0.682908\n", + " 0.693147\n", " 1.0\n", " 1.0\n", - " 1.0\n", - " 1.000000\n", - " 0.142857\n", " 0.0\n", - " 1.0\n", " 0.0\n", - " 2.0\n", - " 0.000000\n", - " 0.142857\n", - " 3.500000\n", - " 0.777778\n", - " 7.000000\n", - " 1.000000\n", - " 0.500000\n", - " 0.777778\n", - " 1.000000\n", - " 0.000000\n", - " 0.000000\n", - " 1.945910\n", " 0.0\n", - " 1.000000\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", + " 1.0\n", " 1.0\n", " 1.0\n", " 1.0\n", + " 2.0\n", + " 1.0\n", " 1.0\n", + " 4.0\n", + " 4.0\n", " 1.000000\n", " 1.000000\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.000000\n", - " 0.000000\n", - " 0.500000\n", - " 0.500000\n", " 1.000000\n", " 1.000000\n", + " 1.000000\n", + " 1.125000\n", + " 1.000000\n", + " 1.000000\n", + " 3.500000\n", + " 4.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.9\n", + " 1.000000\n", + " 1.000000\n", + " 3.500000\n", + " 4.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.111111\n", + " 0.500000\n", + " 0.500000\n", + " 0.428571\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", + " 0.142857\n", + " 0.125000\n", + " 0.142857\n", + " 0.222222\n", " 0.500000\n", " 0.500000\n", + " 0.571429\n", + " 0.500000\n", " 0.000000\n", " 0.000000\n", " 0.000000\n", " 0.000000\n", - " 0.0\n", " 0.000000\n", - " 1.0\n", - " 6.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", + " 0.125000\n", + " 0.000000\n", + " 0.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.777778\n", + " 0.8\n", + " 0.777778\n", + " 0.8\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", " \n", " \n", - " e6cdkpy\n", - " 4.0\n", - " 0.0\n", - " 0.444444\n", - " 2.0\n", + " e647cm8\n", + " 1.0\n", + " 1.0\n", " 1.0\n", - " 0.222222\n", - " 0.9\n", - " 2.250000\n", - " 0.4\n", - " 0.750000\n", - " 1.273028\n", - " 0.500000\n", " 1.0\n", - " 4.0\n", " 1.0\n", - " 0.0\n", - " 0.111111\n", - " 0.444444\n", " 1.0\n", " 2.0\n", + " 1.0\n", " 2.0\n", " 1.0\n", - " 0.111111\n", - " 0.222222\n", - " 0.900000\n", - " 0.9\n", - " 1.000000\n", - " 2.250000\n", - " 0.900000\n", - " 0.4\n", - " 0.000000\n", + " 0.333333\n", + " 0.750000\n", + " 0.333333\n", + " 0.750000\n", + " 0.333333\n", " 0.750000\n", - " 2.197225\n", - " 1.273028\n", " 1.000000\n", - " 0.500000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", " 1.0\n", - " 4.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", + " 1.0\n", + " 3.0\n", " 1.0\n", - " 0.0\n", - " 0.111111\n", - " 0.444444\n", " 1.0\n", - " 2.0\n", - " 2.0\n", " 1.0\n", - " 0.111111\n", - " 0.222222\n", - " 0.900000\n", - " 0.900000\n", - " 1.000000\n", - " 2.250000\n", - " 0.900000\n", - " 0.400000\n", - " 0.000000\n", - " 0.750000\n", - " 2.197225\n", - " 1.273028\n", - " 1.000000\n", - " 0.500000\n", - " 0.0\n", - " 0.0\n", " 1.0\n", - " 5.0\n", " 0.0\n", " 0.0\n", - " 1.0\n", - " 8.0\n", " 0.0\n", " 0.0\n", - " 2.0\n", " 0.0\n", - " 0.400000\n", - " 2.0\n", + " 0.0\n", " 1.0\n", - " 0.400000\n", - " 0.555556\n", - " 1.666667\n", - " 0.333333\n", - " 0.666667\n", - " 1.054920\n", - " 1.000000\n", + " 0.0\n", " 1.0\n", - " 2.0\n", - " 2.0\n", " 0.0\n", - " 0.200000\n", - " 0.400000\n", + " 0.0\n", " 1.0\n", - " 2.0\n", + " 1.0\n", + " 4.0\n", " 3.0\n", + " 9.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " 1.0\n", - " 0.200000\n", - " 0.400000\n", - " 0.555556\n", - " 0.555556\n", - " 1.000000\n", - " 1.666667\n", - " 0.555556\n", - " 0.333333\n", - " 0.000000\n", - " 0.666667\n", + " 0.950271\n", + " 1.214890\n", + " 0.950271\n", + " 1.214890\n", + " 0.950271\n", + " 1.214890\n", " 1.609438\n", - " 1.054920\n", - " 1.0\n", - " 1.000000\n", - " 1.0\n", - " 2.0\n", - " 2.0\n", + " 2.197225\n", + " 1.609438\n", + " 2.197225\n", " 0.0\n", - " 0.200000\n", - " 0.400000\n", " 1.0\n", - " 2.0\n", - " 3.0\n", " 1.0\n", - " 0.200000\n", - " 0.400000\n", - " 0.555556\n", - " 0.555556\n", - " 1.000000\n", - " 1.666667\n", - " 0.555556\n", - " 0.333333\n", - " 0.000000\n", - " 0.666667\n", - " 1.609438\n", - " 1.054920\n", " 1.0\n", - " 1.000000\n", - " 0.0\n", - " 0.0\n", " 1.0\n", " 1.0\n", " 0.0\n", " 0.0\n", - " 1.0\n", - " 2.0\n", " 0.0\n", - " 0.0\n", - " \n", - " \n", - " e5wc4tj\n", " 1.0\n", - " 0.0\n", - " 0.111111\n", + " 3.0\n", + " 4.0\n", + " 3.0\n", + " 4.0\n", + " 3.0\n", + " 4.0\n", " 1.0\n", " 1.0\n", - " 0.111111\n", - " 0.9\n", + " 1.0\n", + " 1.0\n", + " 1.666667\n", + " 2.250000\n", + " 1.666667\n", + " 2.250000\n", + " 1.666667\n", + " 2.250000\n", + " 1.000000\n", " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.555556\n", + " 1.000000\n", + " 0.555556\n", " 0.9\n", - " 0.000000\n", - " 2.197225\n", + " 0.555556\n", + " 0.9\n", + " 0.555556\n", " 1.000000\n", - " 5.0\n", - " 1.0\n", - " 1.0\n", - " 0.0\n", " 0.555556\n", + " 1.000000\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", + " 0.333333\n", + " 0.200000\n", " 0.111111\n", - " 4.0\n", - " 1.0\n", - " 0.0\n", - " 1.0\n", + " 0.200000\n", + " 0.111111\n", + " 0.600000\n", + " 0.444444\n", + " 0.600000\n", + " 0.444444\n", + " 0.600000\n", + " 0.444444\n", + " 0.200000\n", + " 0.111111\n", + " 0.200000\n", + " 0.111111\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.333333\n", + " 0.500000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.333333\n", " 0.444444\n", - " 0.111111\n", - " 4.500000\n", - " 0.9\n", - " 4.500000\n", - " 1.000000\n", - " 1.000000\n", - " 0.9\n", + " 0.333333\n", + " 0.4\n", + " 0.333333\n", + " 0.4\n", + " 0.555556\n", " 1.000000\n", - " 0.000000\n", - " 0.686962\n", - " 2.197225\n", - " 0.800000\n", + " 0.555556\n", " 1.000000\n", + " \n", + " \n", + " e58n526\n", " 1.0\n", " 1.0\n", - " 0.0\n", - " 0.0\n", - " 0.500000\n", - " 0.500000\n", " 1.0\n", " 1.0\n", " 1.0\n", " 1.0\n", - " 0.500000\n", - " 0.500000\n", - " 1.000000\n", - " 1.000000\n", - " 1.000000\n", - " 1.000000\n", - " 1.000000\n", - " 1.000000\n", + " 1.0\n", + " 2.0\n", + " 1.0\n", + " 2.0\n", + " NaN\n", " 0.000000\n", + " NaN\n", " 0.000000\n", - " 0.693147\n", - " 0.693147\n", + " NaN\n", + " 0.000000\n", + " NaN\n", " 1.000000\n", + " NaN\n", " 1.000000\n", - " 1.0\n", - " 8.0\n", " 0.0\n", " 0.0\n", - " 1.0\n", - " 1.0\n", " 0.0\n", " 0.0\n", " 0.0\n", " 0.0\n", - " 1.0\n", " 0.0\n", - " 0.125000\n", " 1.0\n", - " 1.0\n", - " 0.125000\n", - " 0.888889\n", - " 1.000000\n", - " 0.888889\n", - " 0.000000\n", - " 2.079442\n", - " 1.000000\n", - " 4.0\n", + " 0.0\n", " 1.0\n", " 0.0\n", " 0.0\n", - " 0.500000\n", - " 0.125000\n", - " 4.0\n", - " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " 1.0\n", + " 0.0\n", " 1.0\n", - " 0.500000\n", - " 0.125000\n", - " 4.000000\n", - " 0.888889\n", - " 4.000000\n", - " 1.000000\n", - " 1.000000\n", - " 0.888889\n", - " 1.000000\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 36.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " NaN\n", " 0.000000\n", - " 0.693147\n", - " 2.079442\n", - " 1.0\n", - " 1.000000\n", - " 1.0\n", - " 1.0\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 2.197225\n", + " NaN\n", + " 2.197225\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " 0.0\n", " 0.0\n", - " 0.500000\n", - " 0.500000\n", - " 1.0\n", " 1.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", + " 9.0\n", + " 0.0\n", + " 9.0\n", + " 0.0\n", + " 9.0\n", + " 0.0\n", " 1.0\n", + " 0.0\n", " 1.0\n", - " 0.500000\n", - " 0.500000\n", + " 0.000000\n", + " 9.000000\n", + " 0.000000\n", + " 9.000000\n", + " 0.000000\n", + " 9.000000\n", + " 0.000000\n", " 1.000000\n", + " 0.000000\n", " 1.000000\n", + " 0.000000\n", + " 0.900000\n", + " 0.000000\n", + " 0.9\n", + " 0.000000\n", + " 0.9\n", + " 0.000000\n", + " 0.900000\n", + " 0.000000\n", + " 0.900000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.000000\n", + " NaN\n", + " 0.111111\n", + " NaN\n", + " 0.111111\n", + " 0.000000\n", " 1.000000\n", + " 0.000000\n", " 1.000000\n", + " 0.000000\n", " 1.000000\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", + " 0.111111\n", + " 0.000000\n", " 1.000000\n", " 0.000000\n", + " 1.000000\n", " 0.000000\n", - " 0.693147\n", - " 0.693147\n", - " 1.0\n", " 1.000000\n", - " 1.0\n", - " 7.0\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.000000\n", + " 0.100000\n", + " 0.000000\n", + " 0.1\n", + " 0.000000\n", + " 0.1\n", + " 0.000000\n", + " 0.900000\n", + " 0.000000\n", + " 0.900000\n", + " \n", + " \n", + " e69r2kg\n", " 0.0\n", " 0.0\n", + " 4.0\n", " 1.0\n", - " 1.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " 0.0\n", - " \n", - " \n", - " e6ua0sb\n", " 4.0\n", - " 2.0\n", - " 0.444444\n", - " 2.0\n", + " 1.0\n", + " 1.0\n", " 0.0\n", - " 0.222222\n", - " 0.9\n", - " 1.800000\n", - " 0.5\n", - " 0.400000\n", - " 1.427061\n", - " 0.500000\n", - " 3.0\n", - " 4.0\n", - " 2.0\n", - " 2.0\n", - " 0.333333\n", - " 0.444444\n", - " 2.0\n", - " 2.0\n", " 1.0\n", " 0.0\n", - " 0.222222\n", - " 0.222222\n", - " 1.800000\n", - " 0.9\n", - " 1.800000\n", - " 1.800000\n", - " 1.000000\n", - " 0.5\n", - " 0.600000\n", - " 0.400000\n", - " 1.522955\n", - " 1.427061\n", " 0.666667\n", + " 0.666667\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 1.000000\n", + " 0.500000\n", + " 0.500000\n", + " 0.500000\n", " 0.500000\n", " 2.0\n", - " 4.0\n", " 2.0\n", - " 0.0\n", - " 0.285714\n", - " 0.571429\n", " 2.0\n", - " 1.0\n", - " 4.0\n", - " 1.0\n", - " 0.285714\n", - " 0.142857\n", - " 1.400000\n", - " 1.400000\n", - " 1.400000\n", - " 1.750000\n", - " 1.000000\n", - " 0.800000\n", - " 0.400000\n", - " 0.250000\n", - " 1.549826\n", - " 1.153742\n", - " 1.000000\n", - " 0.250000\n", - " 1.0\n", - " 3.0\n", - " 1.0\n", - " 4.0\n", - " 1.0\n", + " 2.0\n", + " 2.0\n", " 2.0\n", " 1.0\n", - " 6.0\n", " 1.0\n", - " 2.0\n", - " 4.0\n", " 1.0\n", - " 0.571429\n", " 1.0\n", - " 0.0\n", - " 0.142857\n", - " 0.777778\n", - " 1.750000\n", - " 0.444444\n", - " 0.250000\n", - " 1.153742\n", - " 0.250000\n", " 2.0\n", - " 4.0\n", - " 2.0\n", - " 1.0\n", - " 0.285714\n", - " 0.571429\n", " 2.0\n", - " 1.0\n", - " 4.0\n", " 0.0\n", - " 0.285714\n", - " 0.142857\n", - " 1.400000\n", - " 0.777778\n", - " 1.400000\n", - " 1.750000\n", - " 1.000000\n", - " 0.444444\n", - " 0.400000\n", - " 0.250000\n", - " 1.549826\n", - " 1.153742\n", - " 1.0\n", - " 0.250000\n", + " 0.0\n", + " 0.0\n", + " 0.0\n", " 2.0\n", - " 4.0\n", " 2.0\n", - " 1.0\n", - " 0.285714\n", - " 0.571429\n", " 2.0\n", - " 1.0\n", - " 4.0\n", - " 0.0\n", - " 0.285714\n", - " 0.142857\n", - " 1.400000\n", - " 1.400000\n", - " 1.400000\n", - " 1.750000\n", - " 1.000000\n", - " 0.800000\n", - " 0.400000\n", - " 0.250000\n", - " 1.549826\n", - " 1.153742\n", - " 1.0\n", - " 0.250000\n", - " 1.0\n", " 2.0\n", " 1.0\n", + " 2.0\n", + " 4.0\n", + " 5.0\n", " 4.0\n", + " 5.0\n", " 1.0\n", - " 2.0\n", " 1.0\n", - " 6.0\n", " 1.0\n", " 2.0\n", - " \n", - " \n", - " e5ua84v\n", - " 3.0\n", + " 1.277034\n", + " 1.522955\n", + " 1.549826\n", + " 1.735126\n", + " 1.549826\n", + " 1.735126\n", + " 1.747868\n", + " 2.043192\n", + " 1.747868\n", + " 2.043192\n", " 1.0\n", - " 0.333333\n", - " 2.0\n", - " 5.0\n", - " 0.222222\n", - " 0.9\n", - " 1.500000\n", - " 0.6\n", - " 0.333333\n", - " 1.676988\n", - " 0.666667\n", - " 3.0\n", - " 3.0\n", " 1.0\n", " 1.0\n", - " 0.333333\n", - " 0.333333\n", - " 2.0\n", - " 2.0\n", - " 4.0\n", - " 5.0\n", - " 0.222222\n", - " 0.222222\n", - " 1.285714\n", - " 0.9\n", - " 1.500000\n", - " 1.500000\n", - " 0.857143\n", - " 0.6\n", - " 0.333333\n", - " 0.333333\n", - " 1.676988\n", - " 1.676988\n", - " 0.666667\n", - " 0.666667\n", - " 3.0\n", - " 4.0\n", " 1.0\n", " 1.0\n", - " 0.333333\n", - " 0.444444\n", - " 2.0\n", - " 2.0\n", - " 4.0\n", - " 4.0\n", - " 0.222222\n", - " 0.222222\n", - " 1.285714\n", - " 1.285714\n", - " 1.500000\n", - " 1.800000\n", - " 0.857143\n", - " 0.714286\n", - " 0.333333\n", - " 0.400000\n", - " 1.676988\n", - " 1.427061\n", - " 0.666667\n", - " 0.500000\n", " 1.0\n", - " 3.0\n", " 1.0\n", - " 5.0\n", " 1.0\n", - " 3.0\n", " 1.0\n", - " 7.0\n", " 1.0\n", - " 4.0\n", " 3.0\n", - " 0.0\n", - " 0.375000\n", + " 3.0\n", " 2.0\n", - " 4.0\n", - " 0.250000\n", - " 0.888889\n", - " 1.600000\n", - " 0.555556\n", - " 0.400000\n", - " 1.494175\n", - " 0.666667\n", " 2.0\n", - " 3.0\n", - " 0.0\n", - " 0.0\n", - " 0.250000\n", - " 0.375000\n", " 2.0\n", " 2.0\n", - " 3.0\n", - " 4.0\n", - " 0.250000\n", - " 0.250000\n", - " 1.333333\n", - " 0.888889\n", - " 1.333333\n", - " 1.600000\n", - " 1.000000\n", - " 0.555556\n", - " 0.333333\n", - " 0.400000\n", - " 1.732868\n", - " 1.494175\n", - " 1.0\n", - " 0.666667\n", " 2.0\n", - " 4.0\n", - " 0.0\n", - " 0.0\n", - " 0.250000\n", - " 0.500000\n", " 2.0\n", " 2.0\n", - " 3.0\n", - " 3.0\n", - " 0.250000\n", - " 0.250000\n", - " 1.333333\n", - " 1.333333\n", - " 1.333333\n", - " 2.000000\n", - " 1.000000\n", - " 0.666667\n", + " 2.0\n", + " 1.750000\n", + " 1.800000\n", + " 1.400000\n", + " 1.500000\n", + " 1.400000\n", + " 1.500000\n", + " 1.166667\n", + " 1.125000\n", + " 1.166667\n", + " 1.125000\n", + " 0.875000\n", + " 1.125000\n", + " 0.777778\n", + " 0.9\n", + " 0.777778\n", + " 0.9\n", + " 0.875000\n", + " 1.125000\n", + " 0.875000\n", + " 1.125000\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.142857\n", + " 0.111111\n", + " 0.142857\n", + " 0.111111\n", + " 0.428571\n", " 0.333333\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", + " 0.285714\n", + " 0.222222\n", " 0.500000\n", - " 1.732868\n", - " 1.213008\n", - " 1.0\n", + " 0.600000\n", + " 0.400000\n", " 0.500000\n", - " 1.0\n", - " 3.0\n", - " 1.0\n", - " 2.0\n", - " 1.0\n", - " 3.0\n", - " 1.0\n", - " 7.0\n", - " 1.0\n", - " 2.0\n", + " 0.400000\n", + " 0.500000\n", + " 0.166667\n", + " 0.125000\n", + " 0.166667\n", + " 0.125000\n", + " 0.500000\n", + " 0.625000\n", + " 0.555556\n", + " 0.6\n", + " 0.555556\n", + " 0.6\n", + " 0.750000\n", + " 1.000000\n", + " 0.750000\n", + " 1.000000\n", " \n", " \n", "\n", "

10000 rows × 140 columns

\n", "" - ], - "text/plain": [ - " max[indegree over c->c responses] \\\n", - "e5hm9mp 3.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 4.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 9.0 \n", - "... ... \n", - "e65ca8k 2.0 \n", - "e6cdkpy 4.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 3.0 \n", + ], + "text/plain": [ + " 2nd-argmax[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 3.0 \n", + "e5surbt 1.0 \n", + "e58gxii 4.0 \n", + "e64vc8y 2.0 \n", + "e57504g 1.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 5.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 0.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 2.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 2.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 3.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 0.0 \n", + "e589ri5 4.0 \n", + "e5beuqa 0.0 \n", + "e5lqoj1 4.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 0.0 \n", "\n", - " argmax[indegree over c->c responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 0.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 1.0 \n", + " 2nd-argmax[indegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 4.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 3.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 3.0 \n", + "e57hyr1 5.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 0.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 4.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 5.0 \n", + "e589ri5 2.0 \n", + "e5beuqa 0.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 4.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 0.0 \n", "\n", - " norm.max[indegree over c->c responses] \\\n", - "e5hm9mp 0.333333 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.444444 \n", - "e5mhgl5 0.222222 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 0.222222 \n", - "e6cdkpy 0.444444 \n", - "e5wc4tj 0.111111 \n", - "e6ua0sb 0.444444 \n", - "e5ua84v 0.333333 \n", + " 2nd-argmax[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 3.0 \n", + "e5surbt 1.0 \n", + "e58gxii 6.0 \n", + "e64vc8y 2.0 \n", + "e57504g 1.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 5.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 4.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 0.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 2.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 4.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 2.0 \n", + "e5ua84v 4.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 0.0 \n", + "e589ri5 4.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 4.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 4.0 \n", "\n", - " 2nd-largest[indegree over c->c responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 2.0 \n", + " 2nd-argmax[indegree over C->c responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 3.0 \n", + "e6q9204 2.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 3.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 3.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 4.0 \n", + "e57hyr1 5.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 5.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 5.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 5.0 \n", + "e5pmmig 0.0 \n", + "e64l6vq 5.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 5.0 \n", + "e589ri5 2.0 \n", + "e5beuqa 0.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 1.0 \n", + "\n", + " 2nd-argmax[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 0.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 3.0 \n", + "e5surbt 1.0 \n", + "e58gxii 6.0 \n", + "e64vc8y 2.0 \n", + "e57504g 1.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 5.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 4.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 0.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 4.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 2.0 \n", + "e5ua84v 4.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 0.0 \n", + "e589ri5 4.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 4.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 4.0 \n", "\n", " 2nd-argmax[indegree over c->c responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 2.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 4.0 \n", - "e6w6fah 1.0 \n", + "e6p7yrp 1.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 0.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 3.0 \n", + "e6q9204 2.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 3.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 3.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", "... ... \n", - "e65ca8k 2.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 0.0 \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 4.0 \n", + "e57hyr1 5.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 5.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 1.0 \n", "e5ua84v 5.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 5.0 \n", + "e5pmmig 0.0 \n", + "e64l6vq 5.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 5.0 \n", + "e589ri5 2.0 \n", + "e5beuqa 0.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 1.0 \n", "\n", - " norm.2nd-largest[indegree over c->c responses] \\\n", - "e5hm9mp 0.111111 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.222222 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.111111 \n", - "e6cdkpy 0.222222 \n", - "e5wc4tj 0.111111 \n", - "e6ua0sb 0.222222 \n", - "e5ua84v 0.222222 \n", + " 2nd-argmax[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 2.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 3.0 \n", + "e58gxii 0.0 \n", + "e64vc8y 6.0 \n", + "e57504g 1.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 6.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 1.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 0.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 3.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 4.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 5.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 4.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 0.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 3.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 6.0 \n", + "e5beuqa 0.0 \n", + "e5lqoj1 5.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 2.0 \n", + "e58n526 1.0 \n", + "e69r2kg 1.0 \n", "\n", - " mean[indegree over c->c responses] \\\n", - "e5hm9mp 0.9 \n", - "e5ytz1d 0.9 \n", - "e6ls80j 0.9 \n", - "e5mhgl5 0.9 \n", - "e6w6fah 0.9 \n", - "... ... \n", - "e65ca8k 0.9 \n", - "e6cdkpy 0.9 \n", - "e5wc4tj 0.9 \n", - "e6ua0sb 0.9 \n", - "e5ua84v 0.9 \n", + " 2nd-argmax[outdegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 2.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 3.0 \n", + "e5surbt 2.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 2.0 \n", + "e57504g 6.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 0.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 3.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 2.0 \n", + "e6ltazd 2.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 4.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 2.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 2.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 4.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 2.0 \n", + "e69r2kg 0.0 \n", "\n", - " mean-nonzero[indegree over c->c responses] \\\n", - "e5hm9mp 1.285714 \n", - "e5ytz1d 2.250000 \n", - "e6ls80j 2.250000 \n", - "e5mhgl5 1.285714 \n", - "e6w6fah 9.000000 \n", - "... ... \n", - "e65ca8k 1.125000 \n", - "e6cdkpy 2.250000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.800000 \n", - "e5ua84v 1.500000 \n", + " 2nd-argmax[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 2.0 \n", + "e5ywqyk 3.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 3.0 \n", + "e58gxii 0.0 \n", + "e64vc8y 6.0 \n", + "e57504g 1.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 6.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 0.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 3.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 4.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 5.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 4.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 0.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 3.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 0.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 6.0 \n", + "e5beuqa 0.0 \n", + "e5lqoj1 5.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 2.0 \n", + "e58n526 1.0 \n", + "e69r2kg 1.0 \n", "\n", - " prop-nonzero[indegree over c->c responses] \\\n", - "e5hm9mp 0.7 \n", - "e5ytz1d 0.4 \n", - "e6ls80j 0.4 \n", - "e5mhgl5 0.7 \n", - "e6w6fah 0.1 \n", - "... ... \n", - "e65ca8k 0.8 \n", - "e6cdkpy 0.4 \n", - "e5wc4tj 0.9 \n", - "e6ua0sb 0.5 \n", - "e5ua84v 0.6 \n", + " 2nd-argmax[outdegree over C->c responses] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 3.0 \n", + "e5qv9rj 2.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 3.0 \n", + "e5surbt 2.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 2.0 \n", + "e57504g 6.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 3.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 3.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 2.0 \n", + "e6ltazd 5.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 4.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 2.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 2.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 6.0 \n", + "e5kvch1 3.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 2.0 \n", + "e69r2kg 0.0 \n", "\n", - " prop-multiple[indegree over c->c responses] \\\n", - "e5hm9mp 0.142857 \n", - "e5ytz1d 0.750000 \n", - "e6ls80j 0.500000 \n", - "e5mhgl5 0.285714 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 0.125000 \n", - "e6cdkpy 0.750000 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.400000 \n", - "e5ua84v 0.333333 \n", + " 2nd-largest / max[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.333333 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.500000 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 1.000000 \n", + "e5surbt 1.000000 \n", + "e58gxii 0.666667 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.000000 \n", + "e5borjq 0.666667 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.500000 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.666667 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 0.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.500000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 0.500000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.500000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.000000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 0.500000 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.250000 \n", + "e57a6qq 0.666667 \n", + "e5qc7eb 0.666667 \n", + "e6hqt5y 0.250000 \n", + "e5ua84v 0.500000 \n", + "e65m7kq 0.333333 \n", + "e5ggtru 0.400000 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.500000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 0.250000 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 0.500000 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.333333 \n", + "e58n526 NaN \n", + "e69r2kg 0.666667 \n", "\n", - " entropy[indegree over c->c responses] \\\n", - "e5hm9mp 1.831020 \n", - "e5ytz1d 1.310784 \n", - "e6ls80j 1.214890 \n", - "e5mhgl5 1.889159 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 2.043192 \n", - "e6cdkpy 1.273028 \n", - "e5wc4tj 2.197225 \n", - "e6ua0sb 1.427061 \n", - "e5ua84v 1.676988 \n", + " 2nd-largest / max[indegree over C->C responses] \\\n", + "e6p7yrp 0.200000 \n", + "e5ywqyk 1.000000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 0.250000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.000000 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 1.000000 \n", + "e5surbt 0.200000 \n", + "e58gxii 0.500000 \n", + "e64vc8y 0.166667 \n", + "e57504g 1.000000 \n", + "e5borjq 0.666667 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.666667 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 0.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.500000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 0.500000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.666667 \n", + "... ... \n", + "e5smhzk 0.500000 \n", + "e5v91s0 0.400000 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 0.166667 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.400000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.250000 \n", + "e57a6qq 0.666667 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 0.500000 \n", + "e65m7kq 0.750000 \n", + "e5ggtru 0.400000 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 0.166667 \n", + "e5beuqa 0.500000 \n", + "e5lqoj1 0.200000 \n", + "e5kvch1 0.666667 \n", + "e6srvwm 0.666667 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.750000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.666667 \n", "\n", - " 2nd-largest / max[indegree over c->c responses] \\\n", - "e5hm9mp 0.333333 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 0.750000 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 0.000000 \n", + " 2nd-largest / max[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.500000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.500000 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 1.000000 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.000000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.000000 \n", + "e5borjq 0.666667 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.500000 \n", + "e64i9cf 0.500000 \n", + "e6q9204 1.000000 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.500000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 0.666667 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.666667 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.000000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 0.666667 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.250000 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 0.666667 \n", + "e6hqt5y 0.250000 \n", + "e5ua84v 0.666667 \n", + "e65m7kq 0.500000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.500000 \n", + "e64l6vq 0.500000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.333333 \n", + "e58n526 NaN \n", + "e69r2kg 1.000000 \n", + "\n", + " 2nd-largest / max[indegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 0.500000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.000000 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 0.500000 \n", + "e64r385 1.000000 \n", + "e5surbt 0.200000 \n", + "e58gxii 1.000000 \n", + "e64vc8y 0.166667 \n", + "e57504g 1.000000 \n", + "e5borjq 0.666667 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.500000 \n", + "e6q9204 1.000000 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.500000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 0.666667 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.333333 \n", "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.500000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.500000 \n", + "e5smhzk 0.500000 \n", + "e5v91s0 0.400000 \n", + "e6n6di6 0.666667 \n", + "e6iqq30 0.166667 \n", + "e5bfad7 0.666667 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.400000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.250000 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 0.666667 \n", + "e6hqt5y 1.000000 \n", "e5ua84v 0.666667 \n", + "e65m7kq 0.500000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.500000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 0.166667 \n", + "e5beuqa 0.666667 \n", + "e5lqoj1 0.200000 \n", + "e5kvch1 0.333333 \n", + "e6srvwm 0.666667 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.750000 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.000000 \n", "\n", - " max[outdegree over C->c responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 8.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 5.0 \n", - "e6ua0sb 3.0 \n", - "e5ua84v 3.0 \n", + " 2nd-largest / max[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.500000 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.500000 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 1.000000 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.000000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.000000 \n", + "e5borjq 0.666667 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.500000 \n", + "e64i9cf 0.500000 \n", + "e6q9204 1.000000 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.500000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 0.666667 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.666667 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.000000 \n", + "e6x5he5 0.500000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 0.666667 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.250000 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 0.666667 \n", + "e6hqt5y 0.250000 \n", + "e5ua84v 0.666667 \n", + "e65m7kq 0.500000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.500000 \n", + "e64l6vq 0.500000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.333333 \n", + "e58n526 NaN \n", + "e69r2kg 1.000000 \n", "\n", - " max[indegree over C->c responses] \\\n", - "e5hm9mp 3.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 4.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 8.0 \n", - "... ... \n", - "e65ca8k 2.0 \n", - "e6cdkpy 4.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 3.0 \n", + " 2nd-largest / max[indegree over c->c responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 0.500000 \n", + "e6989ii 0.500000 \n", + "e69lgse 1.000000 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 0.500000 \n", + "e64r385 1.000000 \n", + "e5surbt 0.200000 \n", + "e58gxii 1.000000 \n", + "e64vc8y 0.166667 \n", + "e57504g 1.000000 \n", + "e5borjq 0.666667 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.500000 \n", + "e6q9204 1.000000 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.500000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 0.666667 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.333333 \n", + "... ... \n", + "e5smhzk 0.500000 \n", + "e5v91s0 0.400000 \n", + "e6n6di6 0.666667 \n", + "e6iqq30 0.166667 \n", + "e5bfad7 0.666667 \n", + "e6x5he5 0.500000 \n", + "e6l9uyf 0.400000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.250000 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 0.666667 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 0.666667 \n", + "e65m7kq 0.500000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.500000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 0.166667 \n", + "e5beuqa 0.666667 \n", + "e5lqoj1 0.200000 \n", + "e5kvch1 0.333333 \n", + "e6srvwm 0.666667 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.750000 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.000000 \n", "\n", - " argmax[outdegree over C->c responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 4.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 1.0 \n", + " 2nd-largest / max[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 0.250000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.500000 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.500000 \n", + "e64r385 0.500000 \n", + "e5surbt 1.000000 \n", + "e58gxii 0.333333 \n", + "e64vc8y 1.000000 \n", + "e57504g 0.500000 \n", + "e5borjq 1.000000 \n", + "e64n9zv 0.500000 \n", + "e582ud3 1.000000 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.500000 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 0.500000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.500000 \n", + "e5syrih 0.500000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.250000 \n", + "... ... \n", + "e5smhzk 0.500000 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 1.000000 \n", + "e57hyr1 1.000000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 NaN \n", + "e6ltazd 1.000000 \n", + "e57a6qq 0.500000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 1.000000 \n", + "e65m7kq 1.000000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.500000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 1.000000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 0.500000 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 NaN \n", + "e69r2kg 0.500000 \n", "\n", - " argmax[indegree over C->c responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 0.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 1.0 \n", + " 2nd-largest / max[outdegree over C->C responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 0.250000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.500000 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.500000 \n", + "e64r385 1.000000 \n", + "e5surbt 1.000000 \n", + "e58gxii 0.333333 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.000000 \n", + "e5borjq 0.500000 \n", + "e64n9zv 0.500000 \n", + "e582ud3 1.000000 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.333333 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 0.500000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 0.500000 \n", + "e5gnjv9 0.500000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 1.000000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.250000 \n", + "... ... \n", + "e5smhzk 0.500000 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 1.000000 \n", + "e5b8sj7 0.500000 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 0.500000 \n", + "e57a6qq 0.500000 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 0.500000 \n", + "e5ua84v 0.666667 \n", + "e65m7kq 0.500000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.666667 \n", + "e64l6vq 0.500000 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 0.500000 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 0.500000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.500000 \n", "\n", - " norm.max[outdegree over C->c responses] \\\n", - "e5hm9mp 0.222222 \n", - "e5ytz1d 0.111111 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.222222 \n", - "e6w6fah 0.125000 \n", - "... ... \n", - "e65ca8k 0.888889 \n", - "e6cdkpy 0.111111 \n", - "e5wc4tj 0.555556 \n", - "e6ua0sb 0.333333 \n", - "e5ua84v 0.333333 \n", + " 2nd-largest / max[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.750000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 0.500000 \n", + "e6989ii 0.750000 \n", + "e69lgse 0.500000 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 0.750000 \n", + "e64r385 0.500000 \n", + "e5surbt 1.000000 \n", + "e58gxii 0.333333 \n", + "e64vc8y 1.000000 \n", + "e57504g 0.500000 \n", + "e5borjq 1.000000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 1.000000 \n", + "e64i9cf 0.333333 \n", + "e6q9204 0.666667 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 0.666667 \n", + "e5oaf7h 0.750000 \n", + "e6nir3u 0.750000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.666667 \n", + "e5syrih 0.500000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.250000 \n", + "... ... \n", + "e5smhzk 0.750000 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 1.000000 \n", + "e57hyr1 1.000000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.666667 \n", + "e57a6qq 0.666667 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 1.000000 \n", + "e65m7kq 1.000000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 0.666667 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.750000 \n", + "e5h3xyy 1.000000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 0.500000 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 0.750000 \n", + "e647cm8 1.000000 \n", + "e58n526 NaN \n", + "e69r2kg 0.500000 \n", "\n", - " norm.max[indegree over C->c responses] \\\n", - "e5hm9mp 0.333333 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.444444 \n", - "e5mhgl5 0.222222 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 0.222222 \n", - "e6cdkpy 0.444444 \n", - "e5wc4tj 0.111111 \n", - "e6ua0sb 0.444444 \n", - "e5ua84v 0.333333 \n", + " 2nd-largest / max[outdegree over C->c responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.750000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 0.500000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.000000 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 0.600000 \n", + "e64r385 1.000000 \n", + "e5surbt 1.000000 \n", + "e58gxii 0.666667 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.000000 \n", + "e5borjq 0.500000 \n", + "e64n9zv 0.750000 \n", + "e582ud3 1.000000 \n", + "e64i9cf 0.666667 \n", + "e6q9204 0.500000 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 0.600000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 0.666667 \n", + "e5gnjv9 0.500000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 1.000000 \n", + "e5sa2yf 0.800000 \n", + "e6ai7z5 0.200000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 1.000000 \n", + "e5b8sj7 0.500000 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 0.500000 \n", + "e5ua84v 0.666667 \n", + "e65m7kq 0.500000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 0.500000 \n", + "e5lqoj1 0.666667 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 0.500000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.500000 \n", "\n", - " 2nd-largest[outdegree over C->c responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 4.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 2.0 \n", + " 2nd-largest[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 2.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " 2nd-largest[indegree over C->c responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", + " 2nd-largest[indegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 2.0 \n", "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 3.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 4.0 \n", "e5ua84v 2.0 \n", + "e65m7kq 3.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 3.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " 2nd-argmax[outdegree over C->c responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 6.0 \n", - "e6w6fah 2.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 4.0 \n", - "\n", - " 2nd-argmax[indegree over C->c responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 2.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 4.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 2.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 0.0 \n", - "e5ua84v 5.0 \n", - "\n", - " norm.2nd-largest[outdegree over C->c responses] \\\n", - "e5hm9mp 0.222222 \n", - "e5ytz1d 0.111111 \n", - "e6ls80j 0.222222 \n", - "e5mhgl5 0.222222 \n", - "e6w6fah 0.125000 \n", - "... ... \n", - "e65ca8k 0.111111 \n", - "e6cdkpy 0.111111 \n", - "e5wc4tj 0.444444 \n", - "e6ua0sb 0.222222 \n", - "e5ua84v 0.222222 \n", + " 2nd-largest[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 2.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " norm.2nd-largest[indegree over C->c responses] \\\n", - "e5hm9mp 0.111111 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.222222 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.111111 \n", - "e6cdkpy 0.222222 \n", - "e5wc4tj 0.111111 \n", - "e6ua0sb 0.222222 \n", - "e5ua84v 0.222222 \n", + " 2nd-largest[indegree over C->c responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 3.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 3.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " mean[outdegree over C->c responses] \\\n", - "e5hm9mp 1.800000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 1.285714 \n", - "e5mhgl5 1.125000 \n", - "e6w6fah 0.888889 \n", - "... ... \n", - "e65ca8k 4.500000 \n", - "e6cdkpy 0.900000 \n", - "e5wc4tj 4.500000 \n", - "e6ua0sb 1.800000 \n", - "e5ua84v 1.285714 \n", + " 2nd-largest[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 2.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " mean[indegree over C->c responses] \\\n", - "e5hm9mp 0.9 \n", - "e5ytz1d 0.9 \n", - "e6ls80j 0.9 \n", - "e5mhgl5 0.9 \n", - "e6w6fah 0.8 \n", - "... ... \n", - "e65ca8k 0.9 \n", - "e6cdkpy 0.9 \n", - "e5wc4tj 0.9 \n", - "e6ua0sb 0.9 \n", - "e5ua84v 0.9 \n", + " 2nd-largest[indegree over c->c responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 2.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 3.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 3.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " mean-nonzero[outdegree over C->c responses] \\\n", - "e5hm9mp 1.800000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 1.500000 \n", - "e5mhgl5 1.285714 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 4.500000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 4.500000 \n", - "e6ua0sb 1.800000 \n", - "e5ua84v 1.500000 \n", + " 2nd-largest[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", - " mean-nonzero[indegree over C->c responses] \\\n", - "e5hm9mp 1.285714 \n", - "e5ytz1d 2.250000 \n", - "e6ls80j 2.250000 \n", - "e5mhgl5 1.285714 \n", - "e6w6fah 8.000000 \n", + " 2nd-largest[outdegree over C->C responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", "... ... \n", - "e65ca8k 1.125000 \n", - "e6cdkpy 2.250000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.800000 \n", - "e5ua84v 1.500000 \n", + "e5smhzk 1.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 1.0 \n", "\n", - " prop-nonzero[outdegree over C->c responses] \\\n", - "e5hm9mp 1.000000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 0.857143 \n", - "e5mhgl5 0.875000 \n", - "e6w6fah 0.888889 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.900000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.000000 \n", - "e5ua84v 0.857143 \n", + " 2nd-largest[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 3.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 3.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 3.0 \n", + "e64r385 1.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 3.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 2.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 3.0 \n", + "e6nir3u 3.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 4.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 3.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 2.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 3.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 2.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 3.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", - " prop-nonzero[indegree over C->c responses] \\\n", - "e5hm9mp 0.7 \n", - "e5ytz1d 0.4 \n", - "e6ls80j 0.4 \n", - "e5mhgl5 0.7 \n", - "e6w6fah 0.1 \n", + " 2nd-largest[outdegree over C->c responses] \\\n", + "e6p7yrp 2.0 \n", + "e5ywqyk 3.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 4.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 3.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 3.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 3.0 \n", + "e5oaf7h 3.0 \n", + "e6nir3u 4.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 3.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 4.0 \n", + "e6ai7z5 1.0 \n", "... ... \n", - "e65ca8k 0.8 \n", - "e6cdkpy 0.4 \n", - "e5wc4tj 0.9 \n", - "e6ua0sb 0.5 \n", - "e5ua84v 0.6 \n", - "\n", - " prop-multiple[outdegree over C->c responses] \\\n", - "e5hm9mp 0.800000 \n", - "e5ytz1d 0.000000 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.285714 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.600000 \n", - "e5ua84v 0.333333 \n", - "\n", - " prop-multiple[indegree over C->c responses] \\\n", - "e5hm9mp 0.142857 \n", - "e5ytz1d 0.750000 \n", - "e6ls80j 0.500000 \n", - "e5mhgl5 0.285714 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 0.125000 \n", - "e6cdkpy 0.750000 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.400000 \n", - "e5ua84v 0.333333 \n", - "\n", - " entropy[outdegree over C->c responses] \\\n", - "e5hm9mp 1.581094 \n", - "e5ytz1d 2.197225 \n", - "e6ls80j 1.676988 \n", - "e5mhgl5 1.889159 \n", - "e6w6fah 2.079442 \n", - "... ... \n", - "e65ca8k 0.348832 \n", - "e6cdkpy 2.197225 \n", - "e5wc4tj 0.686962 \n", - "e6ua0sb 1.522955 \n", - "e5ua84v 1.676988 \n", - "\n", - " entropy[indegree over C->c responses] \\\n", - "e5hm9mp 1.831020 \n", - "e5ytz1d 1.310784 \n", - "e6ls80j 1.214890 \n", - "e5mhgl5 1.889159 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 2.043192 \n", - "e6cdkpy 1.273028 \n", - "e5wc4tj 2.197225 \n", - "e6ua0sb 1.427061 \n", - "e5ua84v 1.676988 \n", - "\n", - " 2nd-largest / max[outdegree over C->c responses] \\\n", - "e5hm9mp 1.000000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 0.666667 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 0.125000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 0.800000 \n", - "e6ua0sb 0.666667 \n", - "e5ua84v 0.666667 \n", + "e5smhzk 4.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 3.0 \n", + "e57a6qq 3.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 4.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 2.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 4.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 1.0 \n", "\n", - " 2nd-largest / max[indegree over C->c responses] \\\n", - "e5hm9mp 0.333333 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 0.750000 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 0.000000 \n", + " argmax[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 2.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 0.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 0.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 2.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 0.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.500000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.500000 \n", - "e5ua84v 0.666667 \n", - "\n", - " max[outdegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 3.0 \n", - "\n", - " max[indegree over C->C responses] \\\n", - "e5hm9mp 3.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 4.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 8.0 \n", - "... ... \n", - "e65ca8k 2.0 \n", - "e6cdkpy 4.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 4.0 \n", - "\n", - " argmax[outdegree over C->C responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 4.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 1.0 \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 4.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 0.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 0.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 0.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 3.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", " argmax[indegree over C->C responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 0.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", + "e6p7yrp 0.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 0.0 \n", + "e69lgse 0.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 0.0 \n", + "e5surbt 0.0 \n", + "e58gxii 0.0 \n", + "e64vc8y 0.0 \n", + "e57504g 0.0 \n", + "e5borjq 4.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 0.0 \n", + "e5smhzk 0.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 0.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 0.0 \n", "e5ua84v 1.0 \n", + "e65m7kq 0.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 0.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 0.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " norm.max[outdegree over C->C responses] \\\n", - "e5hm9mp 0.250000 \n", - "e5ytz1d 0.111111 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah 0.125000 \n", - "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.111111 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.285714 \n", - "e5ua84v 0.333333 \n", - "\n", - " norm.max[indegree over C->C responses] \\\n", - "e5hm9mp 0.375000 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.444444 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.444444 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.571429 \n", - "e5ua84v 0.444444 \n", - "\n", - " 2nd-largest[outdegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 2.0 \n", - "\n", - " 2nd-largest[indegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 2.0 \n", - "\n", - " 2nd-argmax[outdegree over C->C responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 2.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 4.0 \n", + " argmax[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 2.0 \n", + "e6jhojf 3.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 0.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 0.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 2.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 0.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 5.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 3.0 \n", + "e57hyr1 4.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 0.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 0.0 \n", + "e64l6vq 4.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 0.0 \n", "\n", - " 2nd-argmax[indegree over C->C responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 2.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 4.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 4.0 \n", + " argmax[indegree over C->c responses] \\\n", + "e6p7yrp 0.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 0.0 \n", + "e69lgse 0.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 0.0 \n", + "e5surbt 0.0 \n", + "e58gxii 0.0 \n", + "e64vc8y 0.0 \n", + "e57504g 0.0 \n", + "e5borjq 4.0 \n", + "e64n9zv 3.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 6.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 0.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 4.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 0.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 0.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 0.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 0.0 \n", "\n", - " norm.2nd-largest[outdegree over C->C responses] \\\n", - "e5hm9mp 0.250000 \n", - "e5ytz1d 0.111111 \n", - "e6ls80j 0.222222 \n", - "e5mhgl5 0.125000 \n", - "e6w6fah 0.125000 \n", + " argmax[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 2.0 \n", + "e6jhojf 3.0 \n", + "e6989ii 5.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 0.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 0.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 6.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 2.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 0.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.111111 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.285714 \n", - "e5ua84v 0.222222 \n", - "\n", - " norm.2nd-largest[indegree over C->C responses] \\\n", - "e5hm9mp 0.250000 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.222222 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.142857 \n", - "e5ua84v 0.222222 \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 5.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 3.0 \n", + "e6l9uyf 3.0 \n", + "e57hyr1 4.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 0.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 0.0 \n", + "e64l6vq 4.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 0.0 \n", "\n", - " mean[outdegree over C->C responses] \\\n", - "e5hm9mp 1.600000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 1.285714 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 0.888889 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.900000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.285714 \n", + " argmax[indegree over c->c responses] \\\n", + "e6p7yrp 0.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 6.0 \n", + "e69lgse 0.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 0.0 \n", + "e5surbt 0.0 \n", + "e58gxii 0.0 \n", + "e64vc8y 0.0 \n", + "e57504g 0.0 \n", + "e5borjq 4.0 \n", + "e64n9zv 3.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 2.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 7.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 0.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 6.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 0.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 4.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 0.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 0.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 0.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 0.0 \n", "\n", - " mean[indegree over C->C responses] \\\n", - "e5hm9mp 1.600000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 1.285714 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 0.888889 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.900000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.285714 \n", + " argmax[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 2.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 3.0 \n", + "e57504g 5.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 4.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 0.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 6.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 3.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 4.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 0.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 0.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 4.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 4.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " mean-nonzero[outdegree over C->C responses] \\\n", - "e5hm9mp 1.600000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 1.500000 \n", - "e5mhgl5 1.142857 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.500000 \n", + " argmax[outdegree over C->C responses] \\\n", + "e6p7yrp 0.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 0.0 \n", + "e69lgse 0.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 0.0 \n", + "e64vc8y 1.0 \n", + "e57504g 5.0 \n", + "e5borjq 4.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 7.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 3.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 0.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 5.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 5.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 1.0 \n", + "e69r2kg 2.0 \n", "\n", - " mean-nonzero[indegree over C->C responses] \\\n", - "e5hm9mp 1.600000 \n", - "e5ytz1d 2.250000 \n", - "e6ls80j 2.250000 \n", - "e5mhgl5 1.333333 \n", - "e6w6fah 8.000000 \n", - "... ... \n", - "e65ca8k 2.000000 \n", - "e6cdkpy 2.250000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.750000 \n", - "e5ua84v 1.800000 \n", + " argmax[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 2.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 3.0 \n", + "e57504g 5.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 0.0 \n", + "e582ud3 4.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 0.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 0.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 6.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 0.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 3.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 4.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 0.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 4.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 4.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " prop-nonzero[outdegree over C->C responses] \\\n", - "e5hm9mp 1.000000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 0.857143 \n", - "e5mhgl5 0.875000 \n", - "e6w6fah 0.888889 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.900000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.000000 \n", - "e5ua84v 0.857143 \n", + " argmax[outdegree over C->c responses] \\\n", + "e6p7yrp 0.0 \n", + "e5ywqyk 0.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 0.0 \n", + "e6989ii 0.0 \n", + "e69lgse 0.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 0.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 0.0 \n", + "e64vc8y 1.0 \n", + "e57504g 5.0 \n", + "e5borjq 4.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 0.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 7.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 3.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 0.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 5.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 2.0 \n", + "e5lqoj1 5.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 1.0 \n", + "e69r2kg 2.0 \n", "\n", - " prop-nonzero[indegree over C->C responses] \\\n", - "e5hm9mp 1.000000 \n", - "e5ytz1d 0.444444 \n", - "e6ls80j 0.571429 \n", - "e5mhgl5 0.750000 \n", - "e6w6fah 0.111111 \n", - "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.400000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.800000 \n", - "e5ua84v 0.714286 \n", + " count[dyadic interaction motif over mid-thread] \\\n", + "e6p7yrp 2.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 3.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 0.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 2.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 3.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 2.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", + "\n", + " count[dyadic interaction motif] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 0.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 3.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 3.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 2.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 2.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " prop-multiple[outdegree over C->C responses] \\\n", - "e5hm9mp 0.600000 \n", - "e5ytz1d 0.000000 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.142857 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.000000 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.400000 \n", - "e5ua84v 0.333333 \n", + " count[external reciprocity motif over mid-thread] \\\n", + "e6p7yrp 2.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 5.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 0.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 3.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 2.0 \n", + "e5surbt 3.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 1.0 \n", + "e57504g 4.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 2.0 \n", + "e5modd7 6.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 6.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 4.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 5.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 3.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 2.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 3.0 \n", + "e5qc7eb 4.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 4.0 \n", "\n", - " prop-multiple[indegree over C->C responses] \\\n", - "e5hm9mp 0.400000 \n", - "e5ytz1d 0.750000 \n", - "e6ls80j 0.500000 \n", - "e5mhgl5 0.333333 \n", - "e6w6fah 1.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.750000 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.400000 \n", + " count[external reciprocity motif] \\\n", + "e6p7yrp 2.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 6.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 0.0 \n", + "e69lgse 4.0 \n", + "e5kwkg2 6.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 5.0 \n", + "e5surbt 4.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 3.0 \n", + "e57504g 6.0 \n", + "e5borjq 5.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 3.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 4.0 \n", + "e5modd7 8.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 8.0 \n", + "e5d3zaa 4.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 6.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 4.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 4.0 \n", + "e6n6di6 7.0 \n", + "e6iqq30 2.0 \n", + "e5bfad7 4.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 4.0 \n", + "e57hyr1 3.0 \n", + "e5b8sj7 4.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 3.0 \n", + "e5qc7eb 4.0 \n", + "e6hqt5y 5.0 \n", + "e5ua84v 5.0 \n", + "e65m7kq 4.0 \n", + "e5ggtru 8.0 \n", + "e5pmmig 4.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 3.0 \n", + "e5beuqa 4.0 \n", + "e5lqoj1 2.0 \n", + "e5kvch1 3.0 \n", + "e6srvwm 5.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 4.0 \n", + "e58n526 0.0 \n", + "e69r2kg 5.0 \n", + "\n", + " count[incoming triads over mid-thread] count[incoming triads] \\\n", + "e6p7yrp 3.0 10.0 \n", + "e5ywqyk 1.0 2.0 \n", + "e5qv9rj 3.0 6.0 \n", + "e6jhojf 3.0 6.0 \n", + "e6989ii 0.0 0.0 \n", + "e69lgse 3.0 6.0 \n", + "e5kwkg2 1.0 4.0 \n", + "e6mehe7 1.0 7.0 \n", + "e6m0hsd 1.0 1.0 \n", + "e64r385 2.0 3.0 \n", + "e5surbt 0.0 10.0 \n", + "e58gxii 4.0 7.0 \n", + "e64vc8y 0.0 15.0 \n", + "e57504g 2.0 3.0 \n", + "e5borjq 4.0 5.0 \n", + "e64n9zv 1.0 1.0 \n", + "e582ud3 1.0 16.0 \n", + "e64i9cf 1.0 7.0 \n", + "e6q9204 4.0 4.0 \n", + "e5modd7 8.0 8.0 \n", + "e5xhbyd 2.0 2.0 \n", + "e5oaf7h 3.0 3.0 \n", + "e6nir3u 0.0 0.0 \n", + "e6c3xdn 1.0 1.0 \n", + "e5d3zaa 2.0 3.0 \n", + "e5gnjv9 0.0 28.0 \n", + "e69gw2t 2.0 3.0 \n", + "e5syrih 8.0 8.0 \n", + "e5sa2yf 0.0 0.0 \n", + "e6ai7z5 1.0 4.0 \n", + "... ... ... \n", + "e5smhzk 0.0 1.0 \n", + "e5v91s0 1.0 11.0 \n", + "e6n6di6 7.0 7.0 \n", + "e6iqq30 0.0 15.0 \n", + "e5bfad7 2.0 8.0 \n", + "e6x5he5 0.0 6.0 \n", + "e6l9uyf 1.0 11.0 \n", + "e57hyr1 1.0 7.0 \n", + "e5b8sj7 7.0 10.0 \n", + "e6nlep7 0.0 36.0 \n", + "e6ltazd 6.0 6.0 \n", + "e57a6qq 4.0 4.0 \n", + "e5qc7eb 4.0 6.0 \n", + "e6hqt5y 6.0 12.0 \n", + "e5ua84v 7.0 7.0 \n", + "e65m7kq 3.0 9.0 \n", + "e5ggtru 11.0 11.0 \n", + "e5pmmig 2.0 2.0 \n", + "e64l6vq 1.0 4.0 \n", + "e6fjx0d 0.0 1.0 \n", + "e5h3xyy 1.0 7.0 \n", + "e589ri5 0.0 15.0 \n", + "e5beuqa 6.0 7.0 \n", + "e5lqoj1 0.0 10.0 \n", + "e5kvch1 1.0 4.0 \n", + "e6srvwm 2.0 5.0 \n", + "e5o65mk 0.0 0.0 \n", + "e647cm8 3.0 9.0 \n", + "e58n526 0.0 36.0 \n", + "e69r2kg 4.0 5.0 \n", + "\n", + " count[outgoing triads over mid-thread] count[outgoing triads] \\\n", + "e6p7yrp 3.0 3.0 \n", + "e5ywqyk 1.0 1.0 \n", + "e5qv9rj 0.0 0.0 \n", + "e6jhojf 6.0 6.0 \n", + "e6989ii 0.0 0.0 \n", + "e69lgse 1.0 1.0 \n", + "e5kwkg2 0.0 0.0 \n", + "e6mehe7 3.0 3.0 \n", + "e6m0hsd 1.0 1.0 \n", + "e64r385 1.0 2.0 \n", + "e5surbt 0.0 0.0 \n", + "e58gxii 3.0 3.0 \n", + "e64vc8y 0.0 0.0 \n", + "e57504g 1.0 2.0 \n", + "e5borjq 0.0 1.0 \n", + "e64n9zv 1.0 1.0 \n", + "e582ud3 0.0 0.0 \n", + "e64i9cf 1.0 1.0 \n", + "e6q9204 1.0 3.0 \n", + "e5modd7 0.0 0.0 \n", + "e5xhbyd 2.0 2.0 \n", + "e5oaf7h 1.0 1.0 \n", + "e6nir3u 0.0 0.0 \n", + "e6c3xdn 0.0 0.0 \n", + "e5d3zaa 0.0 1.0 \n", + "e5gnjv9 0.0 1.0 \n", + "e69gw2t 1.0 2.0 \n", + "e5syrih 1.0 2.0 \n", + "e5sa2yf 0.0 0.0 \n", + "e6ai7z5 6.0 6.0 \n", + "... ... ... \n", + "e5smhzk 1.0 1.0 \n", + "e5v91s0 0.0 0.0 \n", + "e6n6di6 1.0 1.0 \n", + "e6iqq30 0.0 0.0 \n", + "e5bfad7 1.0 1.0 \n", + "e6x5he5 6.0 6.0 \n", + "e6l9uyf 0.0 1.0 \n", + "e57hyr1 0.0 3.0 \n", + "e5b8sj7 0.0 1.0 \n", + "e6nlep7 0.0 0.0 \n", + "e6ltazd 0.0 1.0 \n", + "e57a6qq 1.0 1.0 \n", + "e5qc7eb 0.0 1.0 \n", + "e6hqt5y 0.0 1.0 \n", + "e5ua84v 2.0 4.0 \n", + "e65m7kq 0.0 1.0 \n", + "e5ggtru 0.0 1.0 \n", + "e5pmmig 2.0 4.0 \n", + "e64l6vq 1.0 1.0 \n", + "e6fjx0d 0.0 1.0 \n", + "e5h3xyy 0.0 1.0 \n", + "e589ri5 0.0 0.0 \n", + "e5beuqa 3.0 7.0 \n", + "e5lqoj1 1.0 3.0 \n", + "e5kvch1 0.0 2.0 \n", + "e6srvwm 0.0 1.0 \n", + "e5o65mk 0.0 0.0 \n", + "e647cm8 0.0 0.0 \n", + "e58n526 0.0 0.0 \n", + "e69r2kg 1.0 1.0 \n", + "\n", + " count[reciprocity motif over mid-thread] count[reciprocity motif] \\\n", + "e6p7yrp 2.0 4.0 \n", + "e5ywqyk 5.0 6.0 \n", + "e5qv9rj 0.0 0.0 \n", + "e6jhojf 3.0 4.0 \n", + "e6989ii 7.0 8.0 \n", + "e69lgse 1.0 2.0 \n", + "e5kwkg2 0.0 0.0 \n", + "e6mehe7 2.0 4.0 \n", + "e6m0hsd 6.0 7.0 \n", + "e64r385 2.0 2.0 \n", + "e5surbt 0.0 0.0 \n", + "e58gxii 2.0 4.0 \n", + "e64vc8y 0.0 0.0 \n", + "e57504g 1.0 1.0 \n", + "e5borjq 1.0 2.0 \n", + "e64n9zv 6.0 7.0 \n", + "e582ud3 0.0 0.0 \n", + "e64i9cf 2.0 4.0 \n", + "e6q9204 4.0 4.0 \n", + "e5modd7 0.0 0.0 \n", + "e5xhbyd 5.0 6.0 \n", + "e5oaf7h 5.0 6.0 \n", + "e6nir3u 7.0 8.0 \n", + "e6c3xdn 0.0 0.0 \n", + "e5d3zaa 3.0 3.0 \n", + "e5gnjv9 0.0 0.0 \n", + "e69gw2t 4.0 5.0 \n", + "e5syrih 1.0 2.0 \n", + "e5sa2yf 7.0 8.0 \n", + "e6ai7z5 2.0 2.0 \n", + "... ... ... \n", + "e5smhzk 5.0 7.0 \n", + "e5v91s0 0.0 0.0 \n", + "e6n6di6 1.0 1.0 \n", + "e6iqq30 0.0 1.0 \n", + "e5bfad7 0.0 2.0 \n", + "e6x5he5 0.0 5.0 \n", + "e6l9uyf 0.0 0.0 \n", + "e57hyr1 2.0 2.0 \n", + "e5b8sj7 1.0 2.0 \n", + "e6nlep7 0.0 0.0 \n", + "e6ltazd 4.0 4.0 \n", + "e57a6qq 4.0 5.0 \n", + "e5qc7eb 2.0 3.0 \n", + "e6hqt5y 0.0 0.0 \n", + "e5ua84v 3.0 3.0 \n", + "e65m7kq 1.0 1.0 \n", + "e5ggtru 0.0 0.0 \n", + "e5pmmig 4.0 4.0 \n", + "e64l6vq 2.0 4.0 \n", + "e6fjx0d 6.0 6.0 \n", + "e5h3xyy 1.0 1.0 \n", + "e589ri5 0.0 0.0 \n", + "e5beuqa 3.0 3.0 \n", + "e5lqoj1 2.0 2.0 \n", + "e5kvch1 3.0 3.0 \n", + "e6srvwm 1.0 1.0 \n", + "e5o65mk 5.0 7.0 \n", + "e647cm8 0.0 1.0 \n", + "e58n526 0.0 0.0 \n", + "e69r2kg 1.0 2.0 \n", "\n", - " entropy[outdegree over C->C responses] \\\n", - "e5hm9mp 1.559581 \n", - "e5ytz1d 2.197225 \n", - "e6ls80j 1.676988 \n", - "e5mhgl5 1.906155 \n", - "e6w6fah 2.079442 \n", - "... ... \n", - "e65ca8k 0.693147 \n", - "e6cdkpy 2.197225 \n", - "e5wc4tj 0.693147 \n", - "e6ua0sb 1.549826 \n", - "e5ua84v 1.676988 \n", + " entropy[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.242453 \n", + "e5ywqyk 1.039721 \n", + "e5qv9rj 1.242453 \n", + "e6jhojf 1.475076 \n", + "e6989ii 0.693147 \n", + "e69lgse 1.242453 \n", + "e5kwkg2 1.560710 \n", + "e6mehe7 1.332179 \n", + "e6m0hsd 1.039721 \n", + "e64r385 1.549826 \n", + "e5surbt 1.386294 \n", + "e58gxii 1.277034 \n", + "e64vc8y 1.098612 \n", + "e57504g 1.549826 \n", + "e5borjq 1.277034 \n", + "e64n9zv 1.039721 \n", + "e582ud3 0.636514 \n", + "e64i9cf 1.039721 \n", + "e6q9204 1.011404 \n", + "e5modd7 1.039721 \n", + "e5xhbyd 1.329661 \n", + "e5oaf7h 0.562335 \n", + "e6nir3u 0.693147 \n", + "e6c3xdn 1.906155 \n", + "e5d3zaa 1.054920 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.054920 \n", + "e5syrih 1.039721 \n", + "e5sa2yf 0.693147 \n", + "e6ai7z5 1.560710 \n", + "... ... \n", + "e5smhzk 1.098612 \n", + "e5v91s0 1.039721 \n", + "e6n6di6 1.213008 \n", + "e6iqq30 1.098612 \n", + "e5bfad7 1.329661 \n", + "e6x5he5 1.386294 \n", + "e6l9uyf 1.039721 \n", + "e57hyr1 1.332179 \n", + "e5b8sj7 0.636514 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.500402 \n", + "e57a6qq 1.011404 \n", + "e5qc7eb 1.011404 \n", + "e6hqt5y 0.500402 \n", + "e5ua84v 1.213008 \n", + "e65m7kq 0.950271 \n", + "e5ggtru 0.900256 \n", + "e5pmmig 1.329661 \n", + "e64l6vq 1.332179 \n", + "e6fjx0d 0.693147 \n", + "e5h3xyy 1.332179 \n", + "e589ri5 1.098612 \n", + "e5beuqa 1.153742 \n", + "e5lqoj1 1.098612 \n", + "e5kvch1 1.332179 \n", + "e6srvwm 1.329661 \n", + "e5o65mk 0.693147 \n", + "e647cm8 0.950271 \n", + "e58n526 NaN \n", + "e69r2kg 1.277034 \n", "\n", " entropy[indegree over C->C responses] \\\n", - "e5hm9mp 1.494175 \n", - "e5ytz1d 1.310784 \n", - "e6ls80j 1.214890 \n", - "e5mhgl5 1.732868 \n", - "e6w6fah 0.000000 \n", + "e6p7yrp 1.073543 \n", + "e5ywqyk 1.054920 \n", + "e5qv9rj 1.464816 \n", + "e6jhojf 1.386294 \n", + "e6989ii 0.693147 \n", + "e69lgse 1.255482 \n", + "e5kwkg2 1.676988 \n", + "e6mehe7 1.213008 \n", + "e6m0hsd 1.039721 \n", + "e64r385 1.735126 \n", + "e5surbt 1.303092 \n", + "e58gxii 1.213008 \n", + "e64vc8y 1.002718 \n", + "e57504g 1.735126 \n", + "e5borjq 1.522955 \n", + "e64n9zv 1.039721 \n", + "e582ud3 0.848686 \n", + "e64i9cf 0.955700 \n", + "e6q9204 1.277034 \n", + "e5modd7 1.273028 \n", + "e5xhbyd 1.329661 \n", + "e5oaf7h 0.562335 \n", + "e6nir3u 0.693147 \n", + "e6c3xdn 2.043192 \n", + "e5d3zaa 1.351784 \n", + "e5gnjv9 0.348832 \n", + "e69gw2t 1.351784 \n", + "e5syrih 1.273028 \n", + "e5sa2yf 0.693147 \n", + "e6ai7z5 1.494175 \n", "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 1.273028 \n", - "e5wc4tj 0.693147 \n", - "e6ua0sb 1.153742 \n", + "e5smhzk 1.039721 \n", + "e5v91s0 1.149060 \n", + "e6n6di6 1.427061 \n", + "e6iqq30 1.002718 \n", + "e5bfad7 1.273028 \n", + "e6x5he5 1.386294 \n", + "e6l9uyf 1.149060 \n", + "e57hyr1 1.427061 \n", + "e5b8sj7 1.060857 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.867563 \n", + "e57a6qq 1.011404 \n", + "e5qc7eb 1.004242 \n", + "e6hqt5y 0.964963 \n", "e5ua84v 1.427061 \n", + "e65m7kq 1.214890 \n", + "e5ggtru 1.149060 \n", + "e5pmmig 1.549826 \n", + "e64l6vq 1.277034 \n", + "e6fjx0d 1.039721 \n", + "e5h3xyy 1.427061 \n", + "e589ri5 1.002718 \n", + "e5beuqa 1.427061 \n", + "e5lqoj1 1.073543 \n", + "e5kvch1 1.494175 \n", + "e6srvwm 1.522955 \n", + "e5o65mk 0.693147 \n", + "e647cm8 1.214890 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.522955 \n", "\n", - " 2nd-largest / max[outdegree over C->C responses] \\\n", - "e5hm9mp 1.000000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 0.666667 \n", - "e5mhgl5 0.500000 \n", - "e6w6fah 1.000000 \n", + " entropy[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.791759 \n", + "e5ywqyk 1.906155 \n", + "e5qv9rj 1.242453 \n", + "e6jhojf 1.906155 \n", + "e6989ii 1.945910 \n", + "e69lgse 1.242453 \n", + "e5kwkg2 1.560710 \n", + "e6mehe7 1.560710 \n", + "e6m0hsd 1.906155 \n", + "e64r385 1.549826 \n", + "e5surbt 1.386294 \n", + "e58gxii 1.549826 \n", + "e64vc8y 1.098612 \n", + "e57504g 1.549826 \n", + "e5borjq 1.277034 \n", + "e64n9zv 1.906155 \n", + "e582ud3 0.636514 \n", + "e64i9cf 1.332179 \n", + "e6q9204 1.732868 \n", + "e5modd7 1.039721 \n", + "e5xhbyd 1.732868 \n", + "e5oaf7h 1.732868 \n", + "e6nir3u 1.945910 \n", + "e6c3xdn 1.906155 \n", + "e5d3zaa 1.549826 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.549826 \n", + "e5syrih 1.494175 \n", + "e5sa2yf 2.079442 \n", + "e6ai7z5 1.791759 \n", "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.000000 \n", - "e5ua84v 0.666667 \n", + "e5smhzk 1.945910 \n", + "e5v91s0 1.039721 \n", + "e6n6di6 1.494175 \n", + "e6iqq30 1.098612 \n", + "e5bfad7 1.329661 \n", + "e6x5he5 1.386294 \n", + "e6l9uyf 1.039721 \n", + "e57hyr1 1.332179 \n", + "e5b8sj7 1.011404 \n", + "e6nlep7 NaN \n", + "e6ltazd 1.386294 \n", + "e57a6qq 1.732868 \n", + "e5qc7eb 1.277034 \n", + "e6hqt5y 0.500402 \n", + "e5ua84v 1.494175 \n", + "e65m7kq 1.332179 \n", + "e5ggtru 1.213008 \n", + "e5pmmig 1.906155 \n", + "e64l6vq 1.560710 \n", + "e6fjx0d 1.945910 \n", + "e5h3xyy 1.332179 \n", + "e589ri5 1.098612 \n", + "e5beuqa 1.475076 \n", + "e5lqoj1 1.386294 \n", + "e5kvch1 1.791759 \n", + "e6srvwm 1.329661 \n", + "e5o65mk 1.945910 \n", + "e647cm8 0.950271 \n", + "e58n526 NaN \n", + "e69r2kg 1.549826 \n", "\n", - " 2nd-largest / max[indegree over C->C responses] \\\n", - "e5hm9mp 0.666667 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 0.750000 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.500000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.500000 \n", + " entropy[indegree over C->c responses] \\\n", + "e6p7yrp 1.831020 \n", + "e5ywqyk 2.043192 \n", + "e5qv9rj 1.464816 \n", + "e6jhojf 2.043192 \n", + "e6989ii 2.079442 \n", + "e69lgse 1.464816 \n", + "e5kwkg2 1.676988 \n", + "e6mehe7 1.676988 \n", + "e6m0hsd 2.043192 \n", + "e64r385 1.735126 \n", + "e5surbt 1.303092 \n", + "e58gxii 1.735126 \n", + "e64vc8y 1.002718 \n", + "e57504g 1.735126 \n", + "e5borjq 1.522955 \n", + "e64n9zv 2.043192 \n", + "e582ud3 0.848686 \n", + "e64i9cf 1.427061 \n", + "e6q9204 1.889159 \n", + "e5modd7 1.273028 \n", + "e5xhbyd 1.889159 \n", + "e5oaf7h 1.889159 \n", + "e6nir3u 2.079442 \n", + "e6c3xdn 2.043192 \n", + "e5d3zaa 1.735126 \n", + "e5gnjv9 0.348832 \n", + "e69gw2t 1.735126 \n", + "e5syrih 1.676988 \n", + "e5sa2yf 2.197225 \n", + "e6ai7z5 1.831020 \n", + "... ... \n", + "e5smhzk 2.043192 \n", + "e5v91s0 1.149060 \n", + "e6n6di6 1.676988 \n", + "e6iqq30 1.002718 \n", + "e5bfad7 1.522955 \n", + "e6x5he5 1.386294 \n", + "e6l9uyf 1.149060 \n", + "e57hyr1 1.427061 \n", + "e5b8sj7 1.310784 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.581094 \n", + "e57a6qq 1.889159 \n", + "e5qc7eb 1.522955 \n", + "e6hqt5y 0.964963 \n", + "e5ua84v 1.676988 \n", + "e65m7kq 1.427061 \n", + "e5ggtru 1.427061 \n", + "e5pmmig 2.043192 \n", + "e64l6vq 1.676988 \n", + "e6fjx0d 2.043192 \n", + "e5h3xyy 1.427061 \n", + "e589ri5 1.002718 \n", + "e5beuqa 1.676988 \n", + "e5lqoj1 1.303092 \n", + "e5kvch1 1.831020 \n", + "e6srvwm 1.522955 \n", + "e5o65mk 2.079442 \n", + "e647cm8 1.214890 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.735126 \n", "\n", - " is-present[reciprocity motif] count[reciprocity motif] \\\n", - "e5hm9mp 1.0 4.0 \n", - "e5ytz1d 1.0 1.0 \n", - "e6ls80j 1.0 1.0 \n", - "e5mhgl5 1.0 2.0 \n", - "e6w6fah 0.0 0.0 \n", - "... ... ... \n", - "e65ca8k 1.0 7.0 \n", - "e6cdkpy 0.0 0.0 \n", - "e5wc4tj 1.0 8.0 \n", - "e6ua0sb 1.0 3.0 \n", - "e5ua84v 1.0 3.0 \n", + " entropy[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.791759 \n", + "e5ywqyk 1.906155 \n", + "e5qv9rj 1.242453 \n", + "e6jhojf 1.906155 \n", + "e6989ii 1.906155 \n", + "e69lgse 1.242453 \n", + "e5kwkg2 1.560710 \n", + "e6mehe7 1.560710 \n", + "e6m0hsd 1.906155 \n", + "e64r385 1.549826 \n", + "e5surbt 1.386294 \n", + "e58gxii 1.549826 \n", + "e64vc8y 1.098612 \n", + "e57504g 1.549826 \n", + "e5borjq 1.277034 \n", + "e64n9zv 1.906155 \n", + "e582ud3 0.636514 \n", + "e64i9cf 1.332179 \n", + "e6q9204 1.732868 \n", + "e5modd7 1.039721 \n", + "e5xhbyd 1.732868 \n", + "e5oaf7h 1.732868 \n", + "e6nir3u 1.906155 \n", + "e6c3xdn 1.906155 \n", + "e5d3zaa 1.549826 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.549826 \n", + "e5syrih 1.494175 \n", + "e5sa2yf 2.079442 \n", + "e6ai7z5 1.791759 \n", + "... ... \n", + "e5smhzk 1.945910 \n", + "e5v91s0 1.039721 \n", + "e6n6di6 1.494175 \n", + "e6iqq30 1.098612 \n", + "e5bfad7 1.329661 \n", + "e6x5he5 1.332179 \n", + "e6l9uyf 1.039721 \n", + "e57hyr1 1.332179 \n", + "e5b8sj7 1.011404 \n", + "e6nlep7 NaN \n", + "e6ltazd 1.386294 \n", + "e57a6qq 1.732868 \n", + "e5qc7eb 1.277034 \n", + "e6hqt5y 0.500402 \n", + "e5ua84v 1.494175 \n", + "e65m7kq 1.332179 \n", + "e5ggtru 1.213008 \n", + "e5pmmig 1.906155 \n", + "e64l6vq 1.560710 \n", + "e6fjx0d 1.945910 \n", + "e5h3xyy 1.332179 \n", + "e589ri5 1.098612 \n", + "e5beuqa 1.475076 \n", + "e5lqoj1 1.386294 \n", + "e5kvch1 1.791759 \n", + "e6srvwm 1.329661 \n", + "e5o65mk 1.945910 \n", + "e647cm8 0.950271 \n", + "e58n526 NaN \n", + "e69r2kg 1.549826 \n", "\n", - " is-present[external reciprocity motif] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", + " entropy[indegree over c->c responses] \\\n", + "e6p7yrp 1.831020 \n", + "e5ywqyk 2.043192 \n", + "e5qv9rj 1.464816 \n", + "e6jhojf 2.043192 \n", + "e6989ii 2.043192 \n", + "e69lgse 1.464816 \n", + "e5kwkg2 1.676988 \n", + "e6mehe7 1.676988 \n", + "e6m0hsd 2.043192 \n", + "e64r385 1.735126 \n", + "e5surbt 1.303092 \n", + "e58gxii 1.735126 \n", + "e64vc8y 1.002718 \n", + "e57504g 1.735126 \n", + "e5borjq 1.522955 \n", + "e64n9zv 2.043192 \n", + "e582ud3 0.848686 \n", + "e64i9cf 1.427061 \n", + "e6q9204 1.889159 \n", + "e5modd7 1.273028 \n", + "e5xhbyd 1.889159 \n", + "e5oaf7h 1.889159 \n", + "e6nir3u 2.043192 \n", + "e6c3xdn 2.043192 \n", + "e5d3zaa 1.735126 \n", + "e5gnjv9 0.348832 \n", + "e69gw2t 1.735126 \n", + "e5syrih 1.676988 \n", + "e5sa2yf 2.197225 \n", + "e6ai7z5 1.831020 \n", + "... ... \n", + "e5smhzk 2.043192 \n", + "e5v91s0 1.149060 \n", + "e6n6di6 1.676988 \n", + "e6iqq30 1.002718 \n", + "e5bfad7 1.522955 \n", + "e6x5he5 1.427061 \n", + "e6l9uyf 1.149060 \n", + "e57hyr1 1.427061 \n", + "e5b8sj7 1.310784 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.581094 \n", + "e57a6qq 1.889159 \n", + "e5qc7eb 1.522955 \n", + "e6hqt5y 0.964963 \n", + "e5ua84v 1.676988 \n", + "e65m7kq 1.427061 \n", + "e5ggtru 1.427061 \n", + "e5pmmig 2.043192 \n", + "e64l6vq 1.676988 \n", + "e6fjx0d 2.043192 \n", + "e5h3xyy 1.427061 \n", + "e589ri5 1.002718 \n", + "e5beuqa 1.676988 \n", + "e5lqoj1 1.303092 \n", + "e5kvch1 1.831020 \n", + "e6srvwm 1.522955 \n", + "e5o65mk 2.043192 \n", + "e647cm8 1.214890 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.735126 \n", + "\n", + " entropy[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.242453 \n", + "e5ywqyk 1.039721 \n", + "e5qv9rj 1.791759 \n", + "e6jhojf 1.153742 \n", + "e6989ii 0.693147 \n", + "e69lgse 1.560710 \n", + "e5kwkg2 1.791759 \n", + "e6mehe7 0.950271 \n", + "e6m0hsd 1.039721 \n", + "e64r385 1.747868 \n", + "e5surbt 1.386294 \n", + "e58gxii 1.475076 \n", + "e64vc8y 1.098612 \n", + "e57504g 1.747868 \n", + "e5borjq 1.945910 \n", + "e64n9zv 1.039721 \n", + "e582ud3 1.098612 \n", + "e64i9cf 1.039721 \n", + "e6q9204 1.560710 \n", + "e5modd7 2.079442 \n", + "e5xhbyd 1.329661 \n", + "e5oaf7h 1.039721 \n", + "e6nir3u 0.693147 \n", + "e6c3xdn 2.079442 \n", + "e5d3zaa 1.609438 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.332179 \n", + "e5syrih 1.906155 \n", + "e5sa2yf 0.693147 \n", + "e6ai7z5 0.867563 \n", + "... ... \n", + "e5smhzk 0.636514 \n", + "e5v91s0 1.386294 \n", + "e6n6di6 1.906155 \n", + "e6iqq30 1.098612 \n", + "e5bfad7 1.560710 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 1.386294 \n", + "e57hyr1 1.609438 \n", + "e5b8sj7 1.791759 \n", + "e6nlep7 NaN \n", + "e6ltazd 1.609438 \n", + "e57a6qq 1.560710 \n", + "e5qc7eb 1.791759 \n", + "e6hqt5y 1.609438 \n", + "e5ua84v 1.732868 \n", + "e65m7kq 1.609438 \n", + "e5ggtru 2.079442 \n", + "e5pmmig 1.329661 \n", + "e64l6vq 1.332179 \n", + "e6fjx0d 0.693147 \n", + "e5h3xyy 1.609438 \n", + "e589ri5 1.098612 \n", + "e5beuqa 1.475076 \n", + "e5lqoj1 0.636514 \n", + "e5kvch1 1.609438 \n", + "e6srvwm 1.791759 \n", + "e5o65mk 0.693147 \n", + "e647cm8 1.609438 \n", + "e58n526 NaN \n", + "e69r2kg 1.747868 \n", + "\n", + " entropy[outdegree over C->C responses] \\\n", + "e6p7yrp 1.667462 \n", + "e5ywqyk 1.332179 \n", + "e5qv9rj 2.197225 \n", + "e6jhojf 1.386294 \n", + "e6989ii 0.693147 \n", + "e69lgse 1.906155 \n", + "e5kwkg2 2.197225 \n", + "e6mehe7 1.667462 \n", + "e6m0hsd 1.039721 \n", + "e64r385 1.889159 \n", + "e5surbt 2.197225 \n", + "e58gxii 1.667462 \n", + "e64vc8y 2.197225 \n", + "e57504g 1.889159 \n", + "e5borjq 2.043192 \n", + "e64n9zv 1.039721 \n", + "e582ud3 2.197225 \n", + "e64i9cf 1.747868 \n", + "e6q9204 1.475076 \n", + "e5modd7 2.197225 \n", + "e5xhbyd 1.329661 \n", + "e5oaf7h 1.039721 \n", + "e6nir3u 0.693147 \n", + "e6c3xdn 2.197225 \n", + "e5d3zaa 1.747868 \n", + "e5gnjv9 2.043192 \n", + "e69gw2t 1.549826 \n", + "e5syrih 1.889159 \n", + "e5sa2yf 0.693147 \n", + "e6ai7z5 1.386294 \n", "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + "e5smhzk 1.039721 \n", + "e5v91s0 2.197225 \n", + "e6n6di6 2.043192 \n", + "e6iqq30 2.197225 \n", + "e5bfad7 2.043192 \n", + "e6x5he5 1.386294 \n", + "e6l9uyf 2.043192 \n", + "e57hyr1 1.735126 \n", + "e5b8sj7 2.043192 \n", + "e6nlep7 2.197225 \n", + "e6ltazd 1.560710 \n", + "e57a6qq 1.560710 \n", + "e5qc7eb 1.747868 \n", + "e6hqt5y 2.043192 \n", + "e5ua84v 1.676988 \n", + "e65m7kq 2.043192 \n", + "e5ggtru 2.043192 \n", + "e5pmmig 1.277034 \n", + "e64l6vq 1.747868 \n", + "e6fjx0d 1.039721 \n", + "e5h3xyy 2.043192 \n", + "e589ri5 2.197225 \n", + "e5beuqa 1.427061 \n", + "e5lqoj1 1.667462 \n", + "e5kvch1 1.732868 \n", + "e6srvwm 2.043192 \n", + "e5o65mk 0.693147 \n", + "e647cm8 2.197225 \n", + "e58n526 2.197225 \n", + "e69r2kg 2.043192 \n", "\n", - " count[external reciprocity motif] \\\n", - "e5hm9mp 4.0 \n", - "e5ytz1d 5.0 \n", - "e6ls80j 4.0 \n", - "e5mhgl5 6.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 5.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 5.0 \n", + " entropy[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.242453 \n", + "e5ywqyk 0.974315 \n", + "e5qv9rj 1.791759 \n", + "e6jhojf 1.213008 \n", + "e6989ii 0.682908 \n", + "e69lgse 1.560710 \n", + "e5kwkg2 1.791759 \n", + "e6mehe7 1.011404 \n", + "e6m0hsd 0.974315 \n", + "e64r385 1.747868 \n", + "e5surbt 1.386294 \n", + "e58gxii 1.475076 \n", + "e64vc8y 1.098612 \n", + "e57504g 1.747868 \n", + "e5borjq 1.945910 \n", + "e64n9zv 1.082196 \n", + "e582ud3 1.098612 \n", + "e64i9cf 0.950271 \n", + "e6q9204 1.494175 \n", + "e5modd7 2.079442 \n", + "e5xhbyd 1.320888 \n", + "e5oaf7h 0.974315 \n", + "e6nir3u 0.682908 \n", + "e6c3xdn 2.079442 \n", + "e5d3zaa 1.549826 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 1.277034 \n", + "e5syrih 1.906155 \n", + "e5sa2yf 0.693147 \n", + "e6ai7z5 0.867563 \n", + "... ... \n", + "e5smhzk 0.682908 \n", + "e5v91s0 1.386294 \n", + "e6n6di6 1.906155 \n", + "e6iqq30 1.098612 \n", + "e5bfad7 1.560710 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 1.386294 \n", + "e57hyr1 1.609438 \n", + "e5b8sj7 1.791759 \n", + "e6nlep7 NaN \n", + "e6ltazd 1.494175 \n", + "e57a6qq 1.494175 \n", + "e5qc7eb 1.747868 \n", + "e6hqt5y 1.609438 \n", + "e5ua84v 1.732868 \n", + "e65m7kq 1.609438 \n", + "e5ggtru 2.079442 \n", + "e5pmmig 1.320888 \n", + "e64l6vq 1.242453 \n", + "e6fjx0d 0.682908 \n", + "e5h3xyy 1.609438 \n", + "e589ri5 1.098612 \n", + "e5beuqa 1.475076 \n", + "e5lqoj1 0.693147 \n", + "e5kvch1 1.560710 \n", + "e6srvwm 1.791759 \n", + "e5o65mk 0.682908 \n", + "e647cm8 1.609438 \n", + "e58n526 NaN \n", + "e69r2kg 1.747868 \n", + "\n", + " entropy[outdegree over C->c responses] \\\n", + "e6p7yrp 1.676988 \n", + "e5ywqyk 1.214890 \n", + "e5qv9rj 2.197225 \n", + "e6jhojf 1.427061 \n", + "e6989ii 0.693147 \n", + "e69lgse 1.889159 \n", + "e5kwkg2 2.197225 \n", + "e6mehe7 1.676988 \n", + "e6m0hsd 0.936888 \n", + "e64r385 1.889159 \n", + "e5surbt 2.197225 \n", + "e58gxii 1.676988 \n", + "e64vc8y 2.197225 \n", + "e57504g 1.889159 \n", + "e5borjq 2.043192 \n", + "e64n9zv 1.060857 \n", + "e582ud3 2.197225 \n", + "e64i9cf 1.676988 \n", + "e6q9204 1.427061 \n", + "e5modd7 2.197225 \n", + "e5xhbyd 1.310784 \n", + "e5oaf7h 0.936888 \n", + "e6nir3u 0.693147 \n", + "e6c3xdn 2.197225 \n", + "e5d3zaa 1.676988 \n", + "e5gnjv9 2.043192 \n", + "e69gw2t 1.464816 \n", + "e5syrih 1.889159 \n", + "e5sa2yf 0.686962 \n", + "e6ai7z5 1.303092 \n", + "... ... \n", + "e5smhzk 0.964963 \n", + "e5v91s0 2.197225 \n", + "e6n6di6 2.043192 \n", + "e6iqq30 2.197225 \n", + "e5bfad7 2.043192 \n", + "e6x5he5 1.386294 \n", + "e6l9uyf 2.043192 \n", + "e57hyr1 1.735126 \n", + "e5b8sj7 2.043192 \n", + "e6nlep7 2.197225 \n", + "e6ltazd 1.464816 \n", + "e57a6qq 1.464816 \n", + "e5qc7eb 1.735126 \n", + "e6hqt5y 2.043192 \n", + "e5ua84v 1.676988 \n", + "e65m7kq 2.043192 \n", + "e5ggtru 2.043192 \n", + "e5pmmig 1.310784 \n", + "e64l6vq 1.676988 \n", + "e6fjx0d 0.964963 \n", + "e5h3xyy 2.043192 \n", + "e589ri5 2.197225 \n", + "e5beuqa 1.427061 \n", + "e5lqoj1 1.676988 \n", + "e5kvch1 1.735126 \n", + "e6srvwm 2.043192 \n", + "e5o65mk 0.693147 \n", + "e647cm8 2.197225 \n", + "e58n526 2.197225 \n", + "e69r2kg 2.043192 \n", + "\n", + " is-present[dyadic interaction motif over mid-thread] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", " is-present[dyadic interaction motif] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", - " count[dyadic interaction motif] is-present[incoming triads] \\\n", - "e5hm9mp 3.0 1.0 \n", - "e5ytz1d 1.0 1.0 \n", - "e6ls80j 1.0 1.0 \n", - "e5mhgl5 1.0 1.0 \n", - "e6w6fah 0.0 1.0 \n", - "... ... ... \n", - "e65ca8k 0.0 1.0 \n", - "e6cdkpy 0.0 1.0 \n", - "e5wc4tj 1.0 0.0 \n", - "e6ua0sb 2.0 1.0 \n", - "e5ua84v 3.0 1.0 \n", - "\n", - " count[incoming triads] is-present[outgoing triads] \\\n", - "e5hm9mp 4.0 1.0 \n", - "e5ytz1d 7.0 0.0 \n", - "e6ls80j 9.0 1.0 \n", - "e5mhgl5 2.0 1.0 \n", - "e6w6fah 28.0 0.0 \n", - "... ... ... \n", - "e65ca8k 1.0 0.0 \n", - "e6cdkpy 8.0 0.0 \n", - "e5wc4tj 0.0 0.0 \n", - "e6ua0sb 6.0 1.0 \n", - "e5ua84v 7.0 1.0 \n", - "\n", - " count[outgoing triads] max[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 3.0 3.0 \n", - "e5ytz1d 0.0 3.0 \n", - "e6ls80j 4.0 3.0 \n", - "e5mhgl5 1.0 2.0 \n", - "e6w6fah 0.0 0.0 \n", - "... ... ... \n", - "e65ca8k 0.0 1.0 \n", - "e6cdkpy 0.0 2.0 \n", - "e5wc4tj 0.0 1.0 \n", - "e6ua0sb 2.0 4.0 \n", - "e5ua84v 4.0 3.0 \n", + " is-present[external reciprocity motif over mid-thread] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 0.0 \n", + "e64r385 1.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 0.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 0.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", - " argmax[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 0.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 0.0 \n", + " is-present[external reciprocity motif] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", - " norm.max[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.375000 \n", - "e5ytz1d 0.500000 \n", - "e6ls80j 0.600000 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.142857 \n", - "e6cdkpy 0.400000 \n", - "e5wc4tj 0.125000 \n", - "e6ua0sb 0.571429 \n", - "e5ua84v 0.375000 \n", + " is-present[incoming triads over mid-thread] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 0.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 0.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", + "\n", + " is-present[incoming triads] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 1.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 1.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 1.0 \n", "\n", - " 2nd-largest[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 2.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 2.0 \n", + " is-present[outgoing triads over mid-thread] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 0.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 0.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 0.0 \n", + "e5b8sj7 0.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 0.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 0.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 0.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 0.0 \n", + "e5h3xyy 0.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 0.0 \n", + "e6srvwm 0.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", + "\n", + " is-present[outgoing triads] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 0.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 0.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 0.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 0.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", - " 2nd-argmax[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 4.0 \n", - "e5mhgl5 3.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 2.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 0.0 \n", - "e5ua84v 4.0 \n", + " is-present[reciprocity motif over mid-thread] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 0.0 \n", + "e5bfad7 0.0 \n", + "e6x5he5 0.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 0.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", + "\n", + " is-present[reciprocity motif] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 1.0 \n", + "e5qv9rj 0.0 \n", + "e6jhojf 1.0 \n", + "e6989ii 1.0 \n", + "e69lgse 1.0 \n", + "e5kwkg2 0.0 \n", + "e6mehe7 1.0 \n", + "e6m0hsd 1.0 \n", + "e64r385 1.0 \n", + "e5surbt 0.0 \n", + "e58gxii 1.0 \n", + "e64vc8y 0.0 \n", + "e57504g 1.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 1.0 \n", + "e582ud3 0.0 \n", + "e64i9cf 1.0 \n", + "e6q9204 1.0 \n", + "e5modd7 0.0 \n", + "e5xhbyd 1.0 \n", + "e5oaf7h 1.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 0.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 0.0 \n", + "e69gw2t 1.0 \n", + "e5syrih 1.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 0.0 \n", + "e6n6di6 1.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 1.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 0.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 1.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 0.0 \n", + "e5ua84v 1.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 0.0 \n", + "e5pmmig 1.0 \n", + "e64l6vq 1.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 0.0 \n", + "e5beuqa 1.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 1.0 \n", "\n", - " norm.2nd-largest[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.125000 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.200000 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.142857 \n", - "e6cdkpy 0.400000 \n", - "e5wc4tj 0.125000 \n", - "e6ua0sb 0.142857 \n", - "e5ua84v 0.250000 \n", + " max[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 3.0 \n", + "e6989ii 1.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 3.0 \n", + "e5modd7 4.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 3.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 4.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 2.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 4.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 4.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 3.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 4.0 \n", + "e65m7kq 3.0 \n", + "e5ggtru 5.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 4.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 3.0 \n", + "e58n526 0.0 \n", + "e69r2kg 3.0 \n", "\n", - " mean[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.888889 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.555556 \n", - "e5mhgl5 0.888889 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.777778 \n", - "e6cdkpy 0.555556 \n", - "e5wc4tj 0.888889 \n", - "e6ua0sb 0.777778 \n", - "e5ua84v 0.888889 \n", + " max[indegree over C->C responses] \\\n", + "e6p7yrp 5.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 1.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 3.0 \n", + "e6mehe7 4.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 5.0 \n", + "e58gxii 4.0 \n", + "e64vc8y 6.0 \n", + "e57504g 2.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 6.0 \n", + "e64i9cf 4.0 \n", + "e6q9204 3.0 \n", + "e5modd7 4.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 3.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 8.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 4.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 3.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 5.0 \n", + "e6n6di6 4.0 \n", + "e6iqq30 6.0 \n", + "e5bfad7 4.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 5.0 \n", + "e57hyr1 4.0 \n", + "e5b8sj7 4.0 \n", + "e6nlep7 9.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 3.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 4.0 \n", + "e65m7kq 4.0 \n", + "e5ggtru 5.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 3.0 \n", + "e6fjx0d 2.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 6.0 \n", + "e5beuqa 4.0 \n", + "e5lqoj1 5.0 \n", + "e5kvch1 3.0 \n", + "e6srvwm 3.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 4.0 \n", + "e58n526 9.0 \n", + "e69r2kg 3.0 \n", "\n", - " mean-nonzero[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.333333 \n", - "e5ytz1d 2.000000 \n", - "e6ls80j 1.666667 \n", - "e5mhgl5 1.333333 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.666667 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.750000 \n", - "e5ua84v 1.600000 \n", + " max[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 1.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 4.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 3.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 1.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 3.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 3.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " prop-nonzero[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.666667 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.666667 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.777778 \n", - "e6cdkpy 0.333333 \n", - "e5wc4tj 0.888889 \n", - "e6ua0sb 0.444444 \n", - "e5ua84v 0.555556 \n", + " max[indegree over C->c responses] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 1.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 3.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 5.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 6.0 \n", + "e57504g 2.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 6.0 \n", + "e64i9cf 4.0 \n", + "e6q9204 2.0 \n", + "e5modd7 4.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 8.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 3.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 5.0 \n", + "e6n6di6 3.0 \n", + "e6iqq30 6.0 \n", + "e5bfad7 3.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 5.0 \n", + "e57hyr1 4.0 \n", + "e5b8sj7 3.0 \n", + "e6nlep7 9.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 4.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 3.0 \n", + "e6fjx0d 2.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 6.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 5.0 \n", + "e5kvch1 3.0 \n", + "e6srvwm 3.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 4.0 \n", + "e58n526 9.0 \n", + "e69r2kg 2.0 \n", + "\n", + " max[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 2.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 2.0 \n", + "e6mehe7 2.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 2.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 4.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 2.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 1.0 \n", + "... ... \n", + "e5smhzk 1.0 \n", + "e5v91s0 2.0 \n", + "e6n6di6 3.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 2.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 3.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 1.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 3.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " prop-multiple[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.166667 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.333333 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.666667 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.400000 \n", + " max[indegree over c->c responses] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 3.0 \n", + "e6jhojf 2.0 \n", + "e6989ii 2.0 \n", + "e69lgse 3.0 \n", + "e5kwkg2 3.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 5.0 \n", + "e58gxii 2.0 \n", + "e64vc8y 6.0 \n", + "e57504g 2.0 \n", + "e5borjq 3.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 6.0 \n", + "e64i9cf 4.0 \n", + "e6q9204 2.0 \n", + "e5modd7 4.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 2.0 \n", + "e6c3xdn 2.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 8.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 3.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 3.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 5.0 \n", + "e6n6di6 3.0 \n", + "e6iqq30 6.0 \n", + "e5bfad7 3.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 5.0 \n", + "e57hyr1 4.0 \n", + "e5b8sj7 3.0 \n", + "e6nlep7 9.0 \n", + "e6ltazd 4.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 3.0 \n", + "e6hqt5y 4.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 4.0 \n", + "e5ggtru 4.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 3.0 \n", + "e6fjx0d 2.0 \n", + "e5h3xyy 4.0 \n", + "e589ri5 6.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 5.0 \n", + "e5kvch1 3.0 \n", + "e6srvwm 3.0 \n", + "e5o65mk 2.0 \n", + "e647cm8 4.0 \n", + "e58n526 9.0 \n", + "e69r2kg 2.0 \n", "\n", - " entropy[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 1.667462 \n", - "e5ytz1d 1.011404 \n", - "e6ls80j 0.950271 \n", - "e5mhgl5 1.732868 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 1.945910 \n", - "e6cdkpy 1.054920 \n", - "e5wc4tj 2.079442 \n", - "e6ua0sb 1.153742 \n", - "e5ua84v 1.494175 \n", + " max[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 2.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 1.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 4.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 1.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 1.0 \n", + "e6hqt5y 1.0 \n", + "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 2.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 1.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 2.0 \n", + "e5kvch1 1.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " 2nd-largest / max[indegree over c->c mid-thread responses] \\\n", - "e5hm9mp 0.333333 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.666667 \n", + " max[outdegree over C->C responses] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 2.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 1.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 2.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 2.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 2.0 \n", + "e6q9204 3.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 2.0 \n", + "e5oaf7h 2.0 \n", + "e6nir3u 1.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 2.0 \n", + "e69gw2t 2.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 1.0 \n", + "e6ai7z5 4.0 \n", + "... ... \n", + "e5smhzk 2.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 2.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 2.0 \n", + "e57a6qq 2.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 2.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 2.0 \n", + "e6fjx0d 2.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 4.0 \n", + "e5lqoj1 3.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 1.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 2.0 \n", "\n", " max[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", + "e6p7yrp 3.0 \n", + "e5ywqyk 4.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 4.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 4.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 1.0 \n", + "e64n9zv 3.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 3.0 \n", + "e6q9204 3.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 3.0 \n", + "e5oaf7h 4.0 \n", + "e6nir3u 4.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 2.0 \n", + "e5gnjv9 1.0 \n", + "e69gw2t 3.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 4.0 \n", + "e6ai7z5 4.0 \n", "... ... \n", - "e65ca8k 7.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 4.0 \n", - "e6ua0sb 2.0 \n", + "e5smhzk 4.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 1.0 \n", + "e57hyr1 1.0 \n", + "e5b8sj7 1.0 \n", + "e6nlep7 0.0 \n", + "e6ltazd 3.0 \n", + "e57a6qq 3.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 1.0 \n", "e5ua84v 2.0 \n", + "e65m7kq 1.0 \n", + "e5ggtru 1.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 3.0 \n", + "e6fjx0d 4.0 \n", + "e5h3xyy 1.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 3.0 \n", + "e5lqoj1 2.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 1.0 \n", + "e5o65mk 4.0 \n", + "e647cm8 1.0 \n", + "e58n526 0.0 \n", + "e69r2kg 2.0 \n", "\n", - " max[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 3.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 3.0 \n", + " max[outdegree over C->c responses] \\\n", + "e6p7yrp 3.0 \n", + "e5ywqyk 4.0 \n", + "e5qv9rj 1.0 \n", + "e6jhojf 4.0 \n", + "e6989ii 4.0 \n", + "e69lgse 2.0 \n", + "e5kwkg2 1.0 \n", + "e6mehe7 3.0 \n", + "e6m0hsd 5.0 \n", + "e64r385 2.0 \n", + "e5surbt 1.0 \n", + "e58gxii 3.0 \n", + "e64vc8y 1.0 \n", + "e57504g 2.0 \n", + "e5borjq 2.0 \n", + "e64n9zv 4.0 \n", + "e582ud3 1.0 \n", + "e64i9cf 3.0 \n", + "e6q9204 4.0 \n", + "e5modd7 1.0 \n", + "e5xhbyd 3.0 \n", + "e5oaf7h 5.0 \n", + "e6nir3u 4.0 \n", + "e6c3xdn 1.0 \n", + "e5d3zaa 3.0 \n", + "e5gnjv9 2.0 \n", + "e69gw2t 3.0 \n", + "e5syrih 2.0 \n", + "e5sa2yf 5.0 \n", + "e6ai7z5 5.0 \n", + "... ... \n", + "e5smhzk 4.0 \n", + "e5v91s0 1.0 \n", + "e6n6di6 2.0 \n", + "e6iqq30 1.0 \n", + "e5bfad7 2.0 \n", + "e6x5he5 4.0 \n", + "e6l9uyf 2.0 \n", + "e57hyr1 2.0 \n", + "e5b8sj7 2.0 \n", + "e6nlep7 1.0 \n", + "e6ltazd 3.0 \n", + "e57a6qq 3.0 \n", + "e5qc7eb 2.0 \n", + "e6hqt5y 2.0 \n", + "e5ua84v 3.0 \n", + "e65m7kq 2.0 \n", + "e5ggtru 2.0 \n", + "e5pmmig 3.0 \n", + "e64l6vq 3.0 \n", + "e6fjx0d 4.0 \n", + "e5h3xyy 2.0 \n", + "e589ri5 1.0 \n", + "e5beuqa 4.0 \n", + "e5lqoj1 3.0 \n", + "e5kvch1 2.0 \n", + "e6srvwm 2.0 \n", + "e5o65mk 4.0 \n", + "e647cm8 1.0 \n", + "e58n526 1.0 \n", + "e69r2kg 2.0 \n", "\n", - " argmax[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 3.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 0.0 \n", + " mean-nonzero[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.500000 \n", + "e5ywqyk 1.333333 \n", + "e5qv9rj 1.500000 \n", + "e6jhojf 1.400000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.500000 \n", + "e5kwkg2 1.200000 \n", + "e6mehe7 1.250000 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.400000 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.750000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.400000 \n", + "e5borjq 1.750000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 1.500000 \n", + "e64i9cf 1.333333 \n", + "e6q9204 2.000000 \n", + "e5modd7 2.666667 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 2.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.142857 \n", + "e5d3zaa 1.666667 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.666667 \n", + "e5syrih 2.666667 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.200000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 1.333333 \n", + "e6n6di6 2.000000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.500000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 1.333333 \n", + "e57hyr1 1.250000 \n", + "e5b8sj7 3.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 2.500000 \n", + "e57a6qq 2.000000 \n", + "e5qc7eb 2.000000 \n", + "e6hqt5y 2.500000 \n", + "e5ua84v 2.000000 \n", + "e65m7kq 1.666667 \n", + "e5ggtru 2.666667 \n", + "e5pmmig 1.500000 \n", + "e64l6vq 1.250000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 1.250000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 1.750000 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 1.250000 \n", + "e6srvwm 1.500000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.666667 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.750000 \n", "\n", - " argmax[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 0.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 0.0 \n", + " mean-nonzero[indegree over C->C responses] \\\n", + "e6p7yrp 2.000000 \n", + "e5ywqyk 1.666667 \n", + "e5qv9rj 1.800000 \n", + "e6jhojf 1.600000 \n", + "e6989ii 1.000000 \n", + "e69lgse 2.000000 \n", + "e5kwkg2 1.500000 \n", + "e6mehe7 2.000000 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.500000 \n", + "e5surbt 1.800000 \n", + "e58gxii 2.000000 \n", + "e64vc8y 2.250000 \n", + "e57504g 1.500000 \n", + "e5borjq 1.800000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 3.000000 \n", + "e64i9cf 2.333333 \n", + "e6q9204 1.750000 \n", + "e5modd7 2.250000 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 2.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.125000 \n", + "e5d3zaa 1.750000 \n", + "e5gnjv9 4.500000 \n", + "e69gw2t 1.750000 \n", + "e5syrih 2.250000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.600000 \n", + "... ... \n", + "e5smhzk 1.333333 \n", + "e5v91s0 2.250000 \n", + "e6n6di6 1.800000 \n", + "e6iqq30 2.250000 \n", + "e5bfad7 2.250000 \n", + "e6x5he5 1.600000 \n", + "e6l9uyf 2.250000 \n", + "e57hyr1 1.800000 \n", + "e5b8sj7 3.000000 \n", + "e6nlep7 9.000000 \n", + "e6ltazd 2.000000 \n", + "e57a6qq 2.000000 \n", + "e5qc7eb 2.333333 \n", + "e6hqt5y 3.000000 \n", + "e5ua84v 1.800000 \n", + "e65m7kq 2.250000 \n", + "e5ggtru 2.250000 \n", + "e5pmmig 1.400000 \n", + "e64l6vq 1.750000 \n", + "e6fjx0d 1.333333 \n", + "e5h3xyy 1.800000 \n", + "e589ri5 2.250000 \n", + "e5beuqa 1.800000 \n", + "e5lqoj1 2.000000 \n", + "e5kvch1 1.600000 \n", + "e6srvwm 1.800000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 2.250000 \n", + "e58n526 9.000000 \n", + "e69r2kg 1.800000 \n", "\n", - " norm.max[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.250000 \n", - "e5ytz1d 0.166667 \n", - "e6ls80j 0.400000 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.200000 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.285714 \n", - "e5ua84v 0.250000 \n", + " mean-nonzero[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 1.142857 \n", + "e5qv9rj 1.500000 \n", + "e6jhojf 1.142857 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.500000 \n", + "e5kwkg2 1.200000 \n", + "e6mehe7 1.200000 \n", + "e6m0hsd 1.142857 \n", + "e64r385 1.400000 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.400000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.400000 \n", + "e5borjq 1.750000 \n", + "e64n9zv 1.142857 \n", + "e582ud3 1.500000 \n", + "e64i9cf 1.250000 \n", + "e6q9204 1.333333 \n", + "e5modd7 2.666667 \n", + "e5xhbyd 1.333333 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.142857 \n", + "e5d3zaa 1.400000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.400000 \n", + "e5syrih 1.600000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 1.333333 \n", + "e6n6di6 1.600000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.500000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 1.333333 \n", + "e57hyr1 1.250000 \n", + "e5b8sj7 2.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.600000 \n", + "e57a6qq 1.333333 \n", + "e5qc7eb 1.750000 \n", + "e6hqt5y 2.500000 \n", + "e5ua84v 1.600000 \n", + "e65m7kq 1.250000 \n", + "e5ggtru 2.000000 \n", + "e5pmmig 1.142857 \n", + "e64l6vq 1.200000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 1.250000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 1.400000 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 1.500000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.666667 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.400000 \n", "\n", - " norm.max[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.375000 \n", - "e5ytz1d 0.500000 \n", - "e6ls80j 0.600000 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.142857 \n", - "e6cdkpy 0.400000 \n", - "e5wc4tj 0.125000 \n", - "e6ua0sb 0.571429 \n", - "e5ua84v 0.375000 \n", + " mean-nonzero[indegree over C->c responses] \\\n", + "e6p7yrp 1.285714 \n", + "e5ywqyk 1.125000 \n", + "e5qv9rj 1.800000 \n", + "e6jhojf 1.125000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.800000 \n", + "e5kwkg2 1.500000 \n", + "e6mehe7 1.500000 \n", + "e6m0hsd 1.125000 \n", + "e64r385 1.500000 \n", + "e5surbt 1.800000 \n", + "e58gxii 1.500000 \n", + "e64vc8y 2.250000 \n", + "e57504g 1.500000 \n", + "e5borjq 1.800000 \n", + "e64n9zv 1.125000 \n", + "e582ud3 3.000000 \n", + "e64i9cf 1.800000 \n", + "e6q9204 1.285714 \n", + "e5modd7 2.250000 \n", + "e5xhbyd 1.285714 \n", + "e5oaf7h 1.285714 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.125000 \n", + "e5d3zaa 1.500000 \n", + "e5gnjv9 4.500000 \n", + "e69gw2t 1.500000 \n", + "e5syrih 1.500000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.285714 \n", + "... ... \n", + "e5smhzk 1.125000 \n", + "e5v91s0 2.250000 \n", + "e6n6di6 1.500000 \n", + "e6iqq30 2.250000 \n", + "e5bfad7 1.800000 \n", + "e6x5he5 1.600000 \n", + "e6l9uyf 2.250000 \n", + "e57hyr1 1.800000 \n", + "e5b8sj7 2.250000 \n", + "e6nlep7 9.000000 \n", + "e6ltazd 1.500000 \n", + "e57a6qq 1.285714 \n", + "e5qc7eb 1.800000 \n", + "e6hqt5y 3.000000 \n", + "e5ua84v 1.500000 \n", + "e65m7kq 1.800000 \n", + "e5ggtru 1.800000 \n", + "e5pmmig 1.125000 \n", + "e64l6vq 1.500000 \n", + "e6fjx0d 1.125000 \n", + "e5h3xyy 1.800000 \n", + "e589ri5 2.250000 \n", + "e5beuqa 1.500000 \n", + "e5lqoj1 1.800000 \n", + "e5kvch1 1.285714 \n", + "e6srvwm 1.800000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 2.250000 \n", + "e58n526 9.000000 \n", + "e69r2kg 1.500000 \n", "\n", - " 2nd-largest[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", + " mean-nonzero[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 1.142857 \n", + "e5qv9rj 1.500000 \n", + "e6jhojf 1.142857 \n", + "e6989ii 1.142857 \n", + "e69lgse 1.500000 \n", + "e5kwkg2 1.200000 \n", + "e6mehe7 1.200000 \n", + "e6m0hsd 1.142857 \n", + "e64r385 1.400000 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.400000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.400000 \n", + "e5borjq 1.750000 \n", + "e64n9zv 1.142857 \n", + "e582ud3 1.500000 \n", + "e64i9cf 1.250000 \n", + "e6q9204 1.333333 \n", + "e5modd7 2.666667 \n", + "e5xhbyd 1.333333 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.142857 \n", + "e6c3xdn 1.142857 \n", + "e5d3zaa 1.400000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.400000 \n", + "e5syrih 1.600000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 4.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 2.0 \n", + "e5smhzk 1.000000 \n", + "e5v91s0 1.333333 \n", + "e6n6di6 1.600000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.500000 \n", + "e6x5he5 1.250000 \n", + "e6l9uyf 1.333333 \n", + "e57hyr1 1.250000 \n", + "e5b8sj7 2.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.600000 \n", + "e57a6qq 1.333333 \n", + "e5qc7eb 1.750000 \n", + "e6hqt5y 2.500000 \n", + "e5ua84v 1.600000 \n", + "e65m7kq 1.250000 \n", + "e5ggtru 2.000000 \n", + "e5pmmig 1.142857 \n", + "e64l6vq 1.200000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 1.250000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 1.400000 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 1.500000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.666667 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.400000 \n", "\n", - " 2nd-largest[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 2.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 2.0 \n", + " mean-nonzero[indegree over c->c responses] \\\n", + "e6p7yrp 1.285714 \n", + "e5ywqyk 1.125000 \n", + "e5qv9rj 1.800000 \n", + "e6jhojf 1.125000 \n", + "e6989ii 1.125000 \n", + "e69lgse 1.800000 \n", + "e5kwkg2 1.500000 \n", + "e6mehe7 1.500000 \n", + "e6m0hsd 1.125000 \n", + "e64r385 1.500000 \n", + "e5surbt 1.800000 \n", + "e58gxii 1.500000 \n", + "e64vc8y 2.250000 \n", + "e57504g 1.500000 \n", + "e5borjq 1.800000 \n", + "e64n9zv 1.125000 \n", + "e582ud3 3.000000 \n", + "e64i9cf 1.800000 \n", + "e6q9204 1.285714 \n", + "e5modd7 2.250000 \n", + "e5xhbyd 1.285714 \n", + "e5oaf7h 1.285714 \n", + "e6nir3u 1.125000 \n", + "e6c3xdn 1.125000 \n", + "e5d3zaa 1.500000 \n", + "e5gnjv9 4.500000 \n", + "e69gw2t 1.500000 \n", + "e5syrih 1.500000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.285714 \n", + "... ... \n", + "e5smhzk 1.125000 \n", + "e5v91s0 2.250000 \n", + "e6n6di6 1.500000 \n", + "e6iqq30 2.250000 \n", + "e5bfad7 1.800000 \n", + "e6x5he5 1.800000 \n", + "e6l9uyf 2.250000 \n", + "e57hyr1 1.800000 \n", + "e5b8sj7 2.250000 \n", + "e6nlep7 9.000000 \n", + "e6ltazd 1.500000 \n", + "e57a6qq 1.285714 \n", + "e5qc7eb 1.800000 \n", + "e6hqt5y 3.000000 \n", + "e5ua84v 1.500000 \n", + "e65m7kq 1.800000 \n", + "e5ggtru 1.800000 \n", + "e5pmmig 1.125000 \n", + "e64l6vq 1.500000 \n", + "e6fjx0d 1.125000 \n", + "e5h3xyy 1.800000 \n", + "e589ri5 2.250000 \n", + "e5beuqa 1.500000 \n", + "e5lqoj1 1.800000 \n", + "e5kvch1 1.285714 \n", + "e6srvwm 1.800000 \n", + "e5o65mk 1.125000 \n", + "e647cm8 2.250000 \n", + "e58n526 9.000000 \n", + "e69r2kg 1.500000 \n", "\n", - " 2nd-argmax[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 4.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 5.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 3.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 3.0 \n", + " mean-nonzero[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.500000 \n", + "e5ywqyk 1.333333 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 1.750000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.200000 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 1.666667 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.166667 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.400000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.166667 \n", + "e5borjq 1.000000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 1.000000 \n", + "e64i9cf 1.333333 \n", + "e6q9204 1.200000 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.250000 \n", + "e5syrih 1.142857 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 2.000000 \n", + "... ... \n", + "e5smhzk 1.500000 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 1.142857 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.200000 \n", + "e6x5he5 4.000000 \n", + "e6l9uyf 1.000000 \n", + "e57hyr1 1.000000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.200000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 1.333333 \n", + "e65m7kq 1.000000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.500000 \n", + "e64l6vq 1.250000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 1.000000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 1.400000 \n", + "e5lqoj1 1.500000 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.166667 \n", "\n", - " 2nd-argmax[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 4.0 \n", - "e5mhgl5 3.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 2.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 0.0 \n", - "e5ua84v 4.0 \n", + " mean-nonzero[outdegree over C->C responses] \\\n", + "e6p7yrp 1.333333 \n", + "e5ywqyk 1.250000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 1.600000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.142857 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 1.333333 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.285714 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.333333 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.285714 \n", + "e5borjq 1.125000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 1.000000 \n", + "e64i9cf 1.166667 \n", + "e6q9204 1.400000 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 1.166667 \n", + "e5gnjv9 1.125000 \n", + "e69gw2t 1.400000 \n", + "e5syrih 1.285714 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.600000 \n", + "... ... \n", + "e5smhzk 1.333333 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 1.125000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.125000 \n", + "e6x5he5 1.600000 \n", + "e6l9uyf 1.125000 \n", + "e57hyr1 1.500000 \n", + "e5b8sj7 1.125000 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 1.200000 \n", + "e57a6qq 1.200000 \n", + "e5qc7eb 1.166667 \n", + "e6hqt5y 1.125000 \n", + "e5ua84v 1.500000 \n", + "e65m7kq 1.125000 \n", + "e5ggtru 1.125000 \n", + "e5pmmig 1.750000 \n", + "e64l6vq 1.166667 \n", + "e6fjx0d 1.333333 \n", + "e5h3xyy 1.125000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 1.800000 \n", + "e5lqoj1 1.333333 \n", + "e5kvch1 1.333333 \n", + "e6srvwm 1.125000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 1.000000 \n", + "e69r2kg 1.125000 \n", "\n", - " norm.2nd-largest[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.250000 \n", - "e5ytz1d 0.166667 \n", - "e6ls80j 0.200000 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.200000 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.285714 \n", - "e5ua84v 0.250000 \n", + " mean-nonzero[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.500000 \n", + "e5ywqyk 2.666667 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 2.000000 \n", + "e6989ii 3.500000 \n", + "e69lgse 1.200000 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 2.000000 \n", + "e6m0hsd 2.666667 \n", + "e64r385 1.166667 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.400000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.166667 \n", + "e5borjq 1.000000 \n", + "e64n9zv 2.666667 \n", + "e582ud3 1.000000 \n", + "e64i9cf 1.666667 \n", + "e6q9204 1.600000 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 2.000000 \n", + "e5oaf7h 2.666667 \n", + "e6nir3u 3.500000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 1.400000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.750000 \n", + "e5syrih 1.142857 \n", + "e5sa2yf 4.000000 \n", + "e6ai7z5 2.000000 \n", + "... ... \n", + "e5smhzk 3.500000 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 1.142857 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.200000 \n", + "e6x5he5 4.000000 \n", + "e6l9uyf 1.000000 \n", + "e57hyr1 1.000000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.600000 \n", + "e57a6qq 1.600000 \n", + "e5qc7eb 1.166667 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 1.333333 \n", + "e65m7kq 1.000000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 2.000000 \n", + "e64l6vq 1.500000 \n", + "e6fjx0d 3.500000 \n", + "e5h3xyy 1.000000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 1.400000 \n", + "e5lqoj1 2.000000 \n", + "e5kvch1 1.200000 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 3.500000 \n", + "e647cm8 1.000000 \n", + "e58n526 0.000000 \n", + "e69r2kg 1.166667 \n", "\n", - " norm.2nd-largest[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.125000 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.200000 \n", - "e5mhgl5 0.250000 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.142857 \n", - "e6cdkpy 0.400000 \n", - "e5wc4tj 0.125000 \n", - "e6ua0sb 0.142857 \n", - "e5ua84v 0.250000 \n", + " mean-nonzero[outdegree over C->c responses] \\\n", + "e6p7yrp 1.500000 \n", + "e5ywqyk 2.250000 \n", + "e5qv9rj 1.000000 \n", + "e6jhojf 1.800000 \n", + "e6989ii 4.000000 \n", + "e69lgse 1.285714 \n", + "e5kwkg2 1.000000 \n", + "e6mehe7 1.500000 \n", + "e6m0hsd 3.000000 \n", + "e64r385 1.285714 \n", + "e5surbt 1.000000 \n", + "e58gxii 1.500000 \n", + "e64vc8y 1.000000 \n", + "e57504g 1.285714 \n", + "e5borjq 1.125000 \n", + "e64n9zv 3.000000 \n", + "e582ud3 1.000000 \n", + "e64i9cf 1.500000 \n", + "e6q9204 1.800000 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 2.250000 \n", + "e5oaf7h 3.000000 \n", + "e6nir3u 4.000000 \n", + "e6c3xdn 1.000000 \n", + "e5d3zaa 1.500000 \n", + "e5gnjv9 1.125000 \n", + "e69gw2t 1.800000 \n", + "e5syrih 1.285714 \n", + "e5sa2yf 4.500000 \n", + "e6ai7z5 1.800000 \n", + "... ... \n", + "e5smhzk 3.000000 \n", + "e5v91s0 1.000000 \n", + "e6n6di6 1.125000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.125000 \n", + "e6x5he5 1.600000 \n", + "e6l9uyf 1.125000 \n", + "e57hyr1 1.500000 \n", + "e5b8sj7 1.125000 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 1.800000 \n", + "e57a6qq 1.800000 \n", + "e5qc7eb 1.500000 \n", + "e6hqt5y 1.125000 \n", + "e5ua84v 1.500000 \n", + "e65m7kq 1.125000 \n", + "e5ggtru 1.125000 \n", + "e5pmmig 2.250000 \n", + "e64l6vq 1.500000 \n", + "e6fjx0d 3.000000 \n", + "e5h3xyy 1.125000 \n", + "e589ri5 1.000000 \n", + "e5beuqa 1.800000 \n", + "e5lqoj1 1.500000 \n", + "e5kvch1 1.500000 \n", + "e6srvwm 1.125000 \n", + "e5o65mk 4.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 1.000000 \n", + "e69r2kg 1.125000 \n", "\n", - " mean[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.600000 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.833333 \n", - "e5mhgl5 1.142857 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 3.500000 \n", - "e6cdkpy 0.555556 \n", - "e5wc4tj 4.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.333333 \n", + " mean[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 1.000000 \n", + "e5qv9rj 0.666667 \n", + "e6jhojf 1.400000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.857143 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.833333 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.000000 \n", + "e5surbt 0.444444 \n", + "e58gxii 1.166667 \n", + "e64vc8y 0.333333 \n", + "e57504g 1.000000 \n", + "e5borjq 0.875000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.666667 \n", + "e6q9204 1.200000 \n", + "e5modd7 0.888889 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.888889 \n", + "e5d3zaa 0.833333 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 1.142857 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.200000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.444444 \n", + "e6n6di6 1.000000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.750000 \n", + "e6x5he5 0.800000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.833333 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.200000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 0.625000 \n", + "e5ua84v 1.333333 \n", + "e65m7kq 0.625000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.500000 \n", + "e64l6vq 0.833333 \n", + "e6fjx0d 0.666667 \n", + "e5h3xyy 0.625000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 1.400000 \n", + "e5lqoj1 0.500000 \n", + "e5kvch1 0.833333 \n", + "e6srvwm 0.750000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.555556 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.875000 \n", + "\n", + " mean[indegree over C->C responses] \\\n", + "e6p7yrp 1.333333 \n", + "e5ywqyk 1.250000 \n", + "e5qv9rj 0.900000 \n", + "e6jhojf 1.600000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.142857 \n", + "e5kwkg2 0.900000 \n", + "e6mehe7 1.333333 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.125000 \n", + "e5surbt 0.900000 \n", + "e58gxii 1.333333 \n", + "e64vc8y 0.900000 \n", + "e57504g 1.125000 \n", + "e5borjq 1.125000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 0.900000 \n", + "e64i9cf 1.166667 \n", + "e6q9204 1.166667 \n", + "e5modd7 0.900000 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.900000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.400000 \n", + "e5syrih 1.285714 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.600000 \n", + "... ... \n", + "e5smhzk 1.333333 \n", + "e5v91s0 0.900000 \n", + "e6n6di6 1.000000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.125000 \n", + "e6x5he5 1.600000 \n", + "e6l9uyf 1.000000 \n", + "e57hyr1 1.285714 \n", + "e5b8sj7 1.125000 \n", + "e6nlep7 0.900000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.200000 \n", + "e5qc7eb 1.166667 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 1.285714 \n", + "e65m7kq 1.000000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.400000 \n", + "e64l6vq 1.166667 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 1.000000 \n", + "e589ri5 0.900000 \n", + "e5beuqa 1.500000 \n", + "e5lqoj1 1.142857 \n", + "e5kvch1 1.142857 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 0.900000 \n", + "e69r2kg 1.125000 \n", "\n", " mean[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.888889 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.555556 \n", - "e5mhgl5 0.888889 \n", - "e6w6fah 0.000000 \n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.888889 \n", + "e5qv9rj 0.666667 \n", + "e6jhojf 0.888889 \n", + "e6989ii 0.777778 \n", + "e69lgse 0.666667 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 0.888889 \n", + "e64r385 0.777778 \n", + "e5surbt 0.444444 \n", + "e58gxii 0.777778 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.777778 \n", + "e5borjq 0.777778 \n", + "e64n9zv 0.888889 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.555556 \n", + "e6q9204 0.888889 \n", + "e5modd7 0.888889 \n", + "e5xhbyd 0.888889 \n", + "e5oaf7h 0.888889 \n", + "e6nir3u 0.777778 \n", + "e6c3xdn 0.888889 \n", + "e5d3zaa 0.777778 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.777778 \n", + "e5syrih 0.888889 \n", + "e5sa2yf 0.888889 \n", + "e6ai7z5 0.666667 \n", "... ... \n", - "e65ca8k 0.777778 \n", - "e6cdkpy 0.555556 \n", - "e5wc4tj 0.888889 \n", - "e6ua0sb 0.777778 \n", + "e5smhzk 0.777778 \n", + "e5v91s0 0.444444 \n", + "e6n6di6 0.888889 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.666667 \n", + "e6x5he5 0.444444 \n", + "e6l9uyf 0.444444 \n", + "e57hyr1 0.555556 \n", + "e5b8sj7 0.666667 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.888889 \n", + "e57a6qq 0.888889 \n", + "e5qc7eb 0.777778 \n", + "e6hqt5y 0.555556 \n", "e5ua84v 0.888889 \n", + "e65m7kq 0.555556 \n", + "e5ggtru 0.888889 \n", + "e5pmmig 0.888889 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.777778 \n", + "e5h3xyy 0.555556 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.777778 \n", + "e5lqoj1 0.444444 \n", + "e5kvch1 0.666667 \n", + "e6srvwm 0.666667 \n", + "e5o65mk 0.777778 \n", + "e647cm8 0.555556 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.777778 \n", "\n", - " mean-nonzero[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 2.000000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 1.250000 \n", - "e5mhgl5 1.333333 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 7.000000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 4.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.333333 \n", + " mean[indegree over C->c responses] \\\n", + "e6p7yrp 0.9 \n", + "e5ywqyk 0.9 \n", + "e5qv9rj 0.9 \n", + "e6jhojf 0.9 \n", + "e6989ii 0.8 \n", + "e69lgse 0.9 \n", + "e5kwkg2 0.9 \n", + "e6mehe7 0.9 \n", + "e6m0hsd 0.9 \n", + "e64r385 0.9 \n", + "e5surbt 0.9 \n", + "e58gxii 0.9 \n", + "e64vc8y 0.9 \n", + "e57504g 0.9 \n", + "e5borjq 0.9 \n", + "e64n9zv 0.9 \n", + "e582ud3 0.9 \n", + "e64i9cf 0.9 \n", + "e6q9204 0.9 \n", + "e5modd7 0.9 \n", + "e5xhbyd 0.9 \n", + "e5oaf7h 0.9 \n", + "e6nir3u 0.8 \n", + "e6c3xdn 0.9 \n", + "e5d3zaa 0.9 \n", + "e5gnjv9 0.9 \n", + "e69gw2t 0.9 \n", + "e5syrih 0.9 \n", + "e5sa2yf 0.9 \n", + "e6ai7z5 0.9 \n", + "... ... \n", + "e5smhzk 0.9 \n", + "e5v91s0 0.9 \n", + "e6n6di6 0.9 \n", + "e6iqq30 0.9 \n", + "e5bfad7 0.9 \n", + "e6x5he5 0.8 \n", + "e6l9uyf 0.9 \n", + "e57hyr1 0.9 \n", + "e5b8sj7 0.9 \n", + "e6nlep7 0.9 \n", + "e6ltazd 0.9 \n", + "e57a6qq 0.9 \n", + "e5qc7eb 0.9 \n", + "e6hqt5y 0.9 \n", + "e5ua84v 0.9 \n", + "e65m7kq 0.9 \n", + "e5ggtru 0.9 \n", + "e5pmmig 0.9 \n", + "e64l6vq 0.9 \n", + "e6fjx0d 0.9 \n", + "e5h3xyy 0.9 \n", + "e589ri5 0.9 \n", + "e5beuqa 0.9 \n", + "e5lqoj1 0.9 \n", + "e5kvch1 0.9 \n", + "e6srvwm 0.9 \n", + "e5o65mk 0.8 \n", + "e647cm8 0.9 \n", + "e58n526 0.9 \n", + "e69r2kg 0.9 \n", "\n", - " mean-nonzero[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.333333 \n", - "e5ytz1d 2.000000 \n", - "e6ls80j 1.666667 \n", - "e5mhgl5 1.333333 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.666667 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.750000 \n", - "e5ua84v 1.600000 \n", + " mean[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.888889 \n", + "e5qv9rj 0.666667 \n", + "e6jhojf 0.888889 \n", + "e6989ii 0.888889 \n", + "e69lgse 0.666667 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 0.888889 \n", + "e64r385 0.777778 \n", + "e5surbt 0.444444 \n", + "e58gxii 0.777778 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.777778 \n", + "e5borjq 0.777778 \n", + "e64n9zv 0.888889 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.555556 \n", + "e6q9204 0.888889 \n", + "e5modd7 0.888889 \n", + "e5xhbyd 0.888889 \n", + "e5oaf7h 0.888889 \n", + "e6nir3u 0.888889 \n", + "e6c3xdn 0.888889 \n", + "e5d3zaa 0.777778 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.777778 \n", + "e5syrih 0.888889 \n", + "e5sa2yf 0.888889 \n", + "e6ai7z5 0.666667 \n", + "... ... \n", + "e5smhzk 0.777778 \n", + "e5v91s0 0.444444 \n", + "e6n6di6 0.888889 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.666667 \n", + "e6x5he5 0.555556 \n", + "e6l9uyf 0.444444 \n", + "e57hyr1 0.555556 \n", + "e5b8sj7 0.666667 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.888889 \n", + "e57a6qq 0.888889 \n", + "e5qc7eb 0.777778 \n", + "e6hqt5y 0.555556 \n", + "e5ua84v 0.888889 \n", + "e65m7kq 0.555556 \n", + "e5ggtru 0.888889 \n", + "e5pmmig 0.888889 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.777778 \n", + "e5h3xyy 0.555556 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.777778 \n", + "e5lqoj1 0.444444 \n", + "e5kvch1 0.666667 \n", + "e6srvwm 0.666667 \n", + "e5o65mk 0.777778 \n", + "e647cm8 0.555556 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.777778 \n", "\n", - " prop-nonzero[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.800000 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.666667 \n", - "e5mhgl5 0.857143 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.555556 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.000000 \n", - "e5ua84v 1.000000 \n", + " mean[indegree over c->c responses] \\\n", + "e6p7yrp 0.9 \n", + "e5ywqyk 0.9 \n", + "e5qv9rj 0.9 \n", + "e6jhojf 0.9 \n", + "e6989ii 0.9 \n", + "e69lgse 0.9 \n", + "e5kwkg2 0.9 \n", + "e6mehe7 0.9 \n", + "e6m0hsd 0.9 \n", + "e64r385 0.9 \n", + "e5surbt 0.9 \n", + "e58gxii 0.9 \n", + "e64vc8y 0.9 \n", + "e57504g 0.9 \n", + "e5borjq 0.9 \n", + "e64n9zv 0.9 \n", + "e582ud3 0.9 \n", + "e64i9cf 0.9 \n", + "e6q9204 0.9 \n", + "e5modd7 0.9 \n", + "e5xhbyd 0.9 \n", + "e5oaf7h 0.9 \n", + "e6nir3u 0.9 \n", + "e6c3xdn 0.9 \n", + "e5d3zaa 0.9 \n", + "e5gnjv9 0.9 \n", + "e69gw2t 0.9 \n", + "e5syrih 0.9 \n", + "e5sa2yf 0.9 \n", + "e6ai7z5 0.9 \n", + "... ... \n", + "e5smhzk 0.9 \n", + "e5v91s0 0.9 \n", + "e6n6di6 0.9 \n", + "e6iqq30 0.9 \n", + "e5bfad7 0.9 \n", + "e6x5he5 0.9 \n", + "e6l9uyf 0.9 \n", + "e57hyr1 0.9 \n", + "e5b8sj7 0.9 \n", + "e6nlep7 0.9 \n", + "e6ltazd 0.9 \n", + "e57a6qq 0.9 \n", + "e5qc7eb 0.9 \n", + "e6hqt5y 0.9 \n", + "e5ua84v 0.9 \n", + "e65m7kq 0.9 \n", + "e5ggtru 0.9 \n", + "e5pmmig 0.9 \n", + "e64l6vq 0.9 \n", + "e6fjx0d 0.9 \n", + "e5h3xyy 0.9 \n", + "e589ri5 0.9 \n", + "e5beuqa 0.9 \n", + "e5lqoj1 0.9 \n", + "e5kvch1 0.9 \n", + "e6srvwm 0.9 \n", + "e5o65mk 0.9 \n", + "e647cm8 0.9 \n", + "e58n526 0.9 \n", + "e69r2kg 0.9 \n", "\n", - " prop-nonzero[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.666667 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.666667 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.777778 \n", - "e6cdkpy 0.333333 \n", - "e5wc4tj 0.888889 \n", - "e6ua0sb 0.444444 \n", - "e5ua84v 0.555556 \n", + " mean[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 1.000000 \n", + "e5qv9rj 0.666667 \n", + "e6jhojf 1.400000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.857143 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.833333 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.000000 \n", + "e5surbt 0.444444 \n", + "e58gxii 1.166667 \n", + "e64vc8y 0.333333 \n", + "e57504g 1.000000 \n", + "e5borjq 0.875000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.666667 \n", + "e6q9204 1.200000 \n", + "e5modd7 0.888889 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.888889 \n", + "e5d3zaa 0.833333 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 1.000000 \n", + "e5syrih 1.142857 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.200000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.444444 \n", + "e6n6di6 1.000000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.750000 \n", + "e6x5he5 0.800000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.833333 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.200000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 0.625000 \n", + "e5ua84v 1.333333 \n", + "e65m7kq 0.625000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.500000 \n", + "e64l6vq 0.833333 \n", + "e6fjx0d 0.666667 \n", + "e5h3xyy 0.625000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 1.400000 \n", + "e5lqoj1 0.500000 \n", + "e5kvch1 0.833333 \n", + "e6srvwm 0.750000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.555556 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.875000 \n", + "\n", + " mean[outdegree over C->C responses] \\\n", + "e6p7yrp 1.333333 \n", + "e5ywqyk 1.250000 \n", + "e5qv9rj 0.900000 \n", + "e6jhojf 1.600000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.142857 \n", + "e5kwkg2 0.900000 \n", + "e6mehe7 1.333333 \n", + "e6m0hsd 1.333333 \n", + "e64r385 1.125000 \n", + "e5surbt 0.900000 \n", + "e58gxii 1.333333 \n", + "e64vc8y 0.900000 \n", + "e57504g 1.125000 \n", + "e5borjq 1.125000 \n", + "e64n9zv 1.333333 \n", + "e582ud3 0.900000 \n", + "e64i9cf 1.166667 \n", + "e6q9204 1.166667 \n", + "e5modd7 0.900000 \n", + "e5xhbyd 1.500000 \n", + "e5oaf7h 1.333333 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.900000 \n", + "e5d3zaa 1.000000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.400000 \n", + "e5syrih 1.285714 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.600000 \n", + "... ... \n", + "e5smhzk 1.333333 \n", + "e5v91s0 0.900000 \n", + "e6n6di6 1.000000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.125000 \n", + "e6x5he5 1.600000 \n", + "e6l9uyf 1.000000 \n", + "e57hyr1 1.285714 \n", + "e5b8sj7 1.125000 \n", + "e6nlep7 0.900000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.200000 \n", + "e5qc7eb 1.166667 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 1.285714 \n", + "e65m7kq 1.000000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.400000 \n", + "e64l6vq 1.166667 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 1.000000 \n", + "e589ri5 0.900000 \n", + "e5beuqa 1.500000 \n", + "e5lqoj1 1.142857 \n", + "e5kvch1 1.142857 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 0.900000 \n", + "e69r2kg 1.125000 \n", + "\n", + " mean[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 2.000000 \n", + "e5qv9rj 0.666667 \n", + "e6jhojf 1.600000 \n", + "e6989ii 3.500000 \n", + "e69lgse 0.857143 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 1.000000 \n", + "e6m0hsd 2.666667 \n", + "e64r385 1.000000 \n", + "e5surbt 0.444444 \n", + "e58gxii 1.166667 \n", + "e64vc8y 0.333333 \n", + "e57504g 1.000000 \n", + "e5borjq 0.875000 \n", + "e64n9zv 2.666667 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.833333 \n", + "e6q9204 1.600000 \n", + "e5modd7 0.888889 \n", + "e5xhbyd 2.000000 \n", + "e5oaf7h 2.666667 \n", + "e6nir3u 3.500000 \n", + "e6c3xdn 0.888889 \n", + "e5d3zaa 1.166667 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 1.400000 \n", + "e5syrih 1.142857 \n", + "e5sa2yf 4.000000 \n", + "e6ai7z5 1.200000 \n", + "... ... \n", + "e5smhzk 2.333333 \n", + "e5v91s0 0.444444 \n", + "e6n6di6 1.000000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.750000 \n", + "e6x5he5 0.800000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.833333 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.600000 \n", + "e57a6qq 1.600000 \n", + "e5qc7eb 1.166667 \n", + "e6hqt5y 0.625000 \n", + "e5ua84v 1.333333 \n", + "e65m7kq 0.625000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 2.000000 \n", + "e64l6vq 1.000000 \n", + "e6fjx0d 2.333333 \n", + "e5h3xyy 0.625000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 1.400000 \n", + "e5lqoj1 0.666667 \n", + "e5kvch1 1.000000 \n", + "e6srvwm 0.750000 \n", + "e5o65mk 3.500000 \n", + "e647cm8 0.555556 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.875000 \n", + "\n", + " mean[outdegree over C->c responses] \\\n", + "e6p7yrp 1.500000 \n", + "e5ywqyk 2.250000 \n", + "e5qv9rj 0.900000 \n", + "e6jhojf 1.800000 \n", + "e6989ii 4.000000 \n", + "e69lgse 1.285714 \n", + "e5kwkg2 0.900000 \n", + "e6mehe7 1.500000 \n", + "e6m0hsd 3.000000 \n", + "e64r385 1.125000 \n", + "e5surbt 0.900000 \n", + "e58gxii 1.500000 \n", + "e64vc8y 0.900000 \n", + "e57504g 1.125000 \n", + "e5borjq 1.125000 \n", + "e64n9zv 3.000000 \n", + "e582ud3 0.900000 \n", + "e64i9cf 1.500000 \n", + "e6q9204 1.500000 \n", + "e5modd7 0.900000 \n", + "e5xhbyd 2.250000 \n", + "e5oaf7h 3.000000 \n", + "e6nir3u 4.000000 \n", + "e6c3xdn 0.900000 \n", + "e5d3zaa 1.285714 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 1.800000 \n", + "e5syrih 1.285714 \n", + "e5sa2yf 4.500000 \n", + "e6ai7z5 1.800000 \n", + "... ... \n", + "e5smhzk 3.000000 \n", + "e5v91s0 0.900000 \n", + "e6n6di6 1.000000 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.125000 \n", + "e6x5he5 1.600000 \n", + "e6l9uyf 1.000000 \n", + "e57hyr1 1.285714 \n", + "e5b8sj7 1.125000 \n", + "e6nlep7 0.900000 \n", + "e6ltazd 1.500000 \n", + "e57a6qq 1.800000 \n", + "e5qc7eb 1.500000 \n", + "e6hqt5y 1.000000 \n", + "e5ua84v 1.285714 \n", + "e65m7kq 1.000000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.800000 \n", + "e64l6vq 1.500000 \n", + "e6fjx0d 2.250000 \n", + "e5h3xyy 1.000000 \n", + "e589ri5 0.900000 \n", + "e5beuqa 1.500000 \n", + "e5lqoj1 1.285714 \n", + "e5kvch1 1.285714 \n", + "e6srvwm 1.000000 \n", + "e5o65mk 4.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 0.900000 \n", + "e69r2kg 1.125000 \n", + "\n", + " norm.2nd-largest[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.250000 \n", + "e5qv9rj 0.166667 \n", + "e6jhojf 0.142857 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.166667 \n", + "e5kwkg2 0.166667 \n", + "e6mehe7 0.200000 \n", + "e6m0hsd 0.250000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.285714 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.285714 \n", + "e64n9zv 0.250000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.250000 \n", + "e6q9204 0.333333 \n", + "e5modd7 0.250000 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.400000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.400000 \n", + "e5syrih 0.250000 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.166667 \n", + "... ... \n", + "e5smhzk 0.333333 \n", + "e5v91s0 0.250000 \n", + "e6n6di6 0.250000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.250000 \n", + "e57hyr1 0.200000 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.200000 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.333333 \n", + "e6hqt5y 0.200000 \n", + "e5ua84v 0.250000 \n", + "e65m7kq 0.200000 \n", + "e5ggtru 0.250000 \n", + "e5pmmig 0.333333 \n", + "e64l6vq 0.200000 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.200000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.142857 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 0.200000 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.200000 \n", + "e58n526 NaN \n", + "e69r2kg 0.285714 \n", "\n", - " prop-multiple[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.000000 \n", - "e5ytz1d 0.000000 \n", - "e6ls80j 0.250000 \n", - "e5mhgl5 0.333333 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.400000 \n", - "e5ua84v 0.333333 \n", + " norm.2nd-largest[indegree over C->C responses] \\\n", + "e6p7yrp 0.125000 \n", + "e5ywqyk 0.400000 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.125000 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.375000 \n", + "e5kwkg2 0.222222 \n", + "e6mehe7 0.250000 \n", + "e6m0hsd 0.250000 \n", + "e64r385 0.222222 \n", + "e5surbt 0.111111 \n", + "e58gxii 0.250000 \n", + "e64vc8y 0.111111 \n", + "e57504g 0.222222 \n", + "e5borjq 0.222222 \n", + "e64n9zv 0.250000 \n", + "e582ud3 0.222222 \n", + "e64i9cf 0.285714 \n", + "e6q9204 0.285714 \n", + "e5modd7 0.222222 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.111111 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.222222 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.250000 \n", + "... ... \n", + "e5smhzk 0.250000 \n", + "e5v91s0 0.222222 \n", + "e6n6di6 0.222222 \n", + "e6iqq30 0.111111 \n", + "e5bfad7 0.222222 \n", + "e6x5he5 0.125000 \n", + "e6l9uyf 0.222222 \n", + "e57hyr1 0.222222 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.166667 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.428571 \n", + "e6hqt5y 0.444444 \n", + "e5ua84v 0.222222 \n", + "e65m7kq 0.333333 \n", + "e5ggtru 0.222222 \n", + "e5pmmig 0.285714 \n", + "e64l6vq 0.285714 \n", + "e6fjx0d 0.250000 \n", + "e5h3xyy 0.222222 \n", + "e589ri5 0.111111 \n", + "e5beuqa 0.222222 \n", + "e5lqoj1 0.125000 \n", + "e5kvch1 0.250000 \n", + "e6srvwm 0.222222 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.222222 \n", "\n", - " prop-multiple[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.166667 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.333333 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.666667 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.400000 \n", + " norm.2nd-largest[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.125000 \n", + "e5qv9rj 0.166667 \n", + "e6jhojf 0.125000 \n", + "e6989ii 0.142857 \n", + "e69lgse 0.166667 \n", + "e5kwkg2 0.166667 \n", + "e6mehe7 0.166667 \n", + "e6m0hsd 0.125000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.285714 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.285714 \n", + "e64n9zv 0.125000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.200000 \n", + "e6q9204 0.250000 \n", + "e5modd7 0.250000 \n", + "e5xhbyd 0.250000 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.142857 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.250000 \n", + "e5sa2yf 0.125000 \n", + "e6ai7z5 0.166667 \n", + "... ... \n", + "e5smhzk 0.142857 \n", + "e5v91s0 0.250000 \n", + "e6n6di6 0.250000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.250000 \n", + "e57hyr1 0.200000 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.125000 \n", + "e57a6qq 0.250000 \n", + "e5qc7eb 0.285714 \n", + "e6hqt5y 0.200000 \n", + "e5ua84v 0.250000 \n", + "e65m7kq 0.200000 \n", + "e5ggtru 0.250000 \n", + "e5pmmig 0.125000 \n", + "e64l6vq 0.166667 \n", + "e6fjx0d 0.142857 \n", + "e5h3xyy 0.200000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.142857 \n", + "e5lqoj1 0.250000 \n", + "e5kvch1 0.166667 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.142857 \n", + "e647cm8 0.200000 \n", + "e58n526 NaN \n", + "e69r2kg 0.285714 \n", "\n", - " entropy[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.386294 \n", - "e5ytz1d 1.791759 \n", - "e6ls80j 1.332179 \n", - "e5mhgl5 1.732868 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 1.609438 \n", - "e5wc4tj 0.693147 \n", - "e6ua0sb 1.549826 \n", - "e5ua84v 1.732868 \n", + " norm.2nd-largest[indegree over C->c responses] \\\n", + "e6p7yrp 0.111111 \n", + "e5ywqyk 0.111111 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.111111 \n", + "e6989ii 0.125000 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.222222 \n", + "e6mehe7 0.222222 \n", + "e6m0hsd 0.111111 \n", + "e64r385 0.222222 \n", + "e5surbt 0.111111 \n", + "e58gxii 0.222222 \n", + "e64vc8y 0.111111 \n", + "e57504g 0.222222 \n", + "e5borjq 0.222222 \n", + "e64n9zv 0.111111 \n", + "e582ud3 0.222222 \n", + "e64i9cf 0.222222 \n", + "e6q9204 0.222222 \n", + "e5modd7 0.222222 \n", + "e5xhbyd 0.222222 \n", + "e5oaf7h 0.222222 \n", + "e6nir3u 0.125000 \n", + "e6c3xdn 0.111111 \n", + "e5d3zaa 0.222222 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.222222 \n", + "e5syrih 0.222222 \n", + "e5sa2yf 0.111111 \n", + "e6ai7z5 0.111111 \n", + "... ... \n", + "e5smhzk 0.111111 \n", + "e5v91s0 0.222222 \n", + "e6n6di6 0.222222 \n", + "e6iqq30 0.111111 \n", + "e5bfad7 0.222222 \n", + "e6x5he5 0.125000 \n", + "e6l9uyf 0.222222 \n", + "e57hyr1 0.222222 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.111111 \n", + "e57a6qq 0.222222 \n", + "e5qc7eb 0.222222 \n", + "e6hqt5y 0.444444 \n", + "e5ua84v 0.222222 \n", + "e65m7kq 0.222222 \n", + "e5ggtru 0.222222 \n", + "e5pmmig 0.111111 \n", + "e64l6vq 0.222222 \n", + "e6fjx0d 0.111111 \n", + "e5h3xyy 0.222222 \n", + "e589ri5 0.111111 \n", + "e5beuqa 0.222222 \n", + "e5lqoj1 0.111111 \n", + "e5kvch1 0.111111 \n", + "e6srvwm 0.222222 \n", + "e5o65mk 0.125000 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.222222 \n", "\n", - " entropy[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.667462 \n", - "e5ytz1d 1.011404 \n", - "e6ls80j 0.950271 \n", - "e5mhgl5 1.732868 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 1.945910 \n", - "e6cdkpy 1.054920 \n", - "e5wc4tj 2.079442 \n", - "e6ua0sb 1.153742 \n", - "e5ua84v 1.494175 \n", + " norm.2nd-largest[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.125000 \n", + "e5qv9rj 0.166667 \n", + "e6jhojf 0.125000 \n", + "e6989ii 0.125000 \n", + "e69lgse 0.166667 \n", + "e5kwkg2 0.166667 \n", + "e6mehe7 0.166667 \n", + "e6m0hsd 0.125000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.285714 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.285714 \n", + "e64n9zv 0.125000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.200000 \n", + "e6q9204 0.250000 \n", + "e5modd7 0.250000 \n", + "e5xhbyd 0.250000 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.125000 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.250000 \n", + "e5sa2yf 0.125000 \n", + "e6ai7z5 0.166667 \n", + "... ... \n", + "e5smhzk 0.142857 \n", + "e5v91s0 0.250000 \n", + "e6n6di6 0.250000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.200000 \n", + "e6l9uyf 0.250000 \n", + "e57hyr1 0.200000 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.125000 \n", + "e57a6qq 0.250000 \n", + "e5qc7eb 0.285714 \n", + "e6hqt5y 0.200000 \n", + "e5ua84v 0.250000 \n", + "e65m7kq 0.200000 \n", + "e5ggtru 0.250000 \n", + "e5pmmig 0.125000 \n", + "e64l6vq 0.166667 \n", + "e6fjx0d 0.142857 \n", + "e5h3xyy 0.200000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.142857 \n", + "e5lqoj1 0.250000 \n", + "e5kvch1 0.166667 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.142857 \n", + "e647cm8 0.200000 \n", + "e58n526 NaN \n", + "e69r2kg 0.285714 \n", "\n", - " 2nd-largest / max[outdegree over C->c mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 0.5 \n", - "e5mhgl5 1.0 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + " norm.2nd-largest[indegree over c->c responses] \\\n", + "e6p7yrp 0.111111 \n", + "e5ywqyk 0.111111 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.111111 \n", + "e6989ii 0.111111 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.222222 \n", + "e6mehe7 0.222222 \n", + "e6m0hsd 0.111111 \n", + "e64r385 0.222222 \n", + "e5surbt 0.111111 \n", + "e58gxii 0.222222 \n", + "e64vc8y 0.111111 \n", + "e57504g 0.222222 \n", + "e5borjq 0.222222 \n", + "e64n9zv 0.111111 \n", + "e582ud3 0.222222 \n", + "e64i9cf 0.222222 \n", + "e6q9204 0.222222 \n", + "e5modd7 0.222222 \n", + "e5xhbyd 0.222222 \n", + "e5oaf7h 0.222222 \n", + "e6nir3u 0.111111 \n", + "e6c3xdn 0.111111 \n", + "e5d3zaa 0.222222 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.222222 \n", + "e5syrih 0.222222 \n", + "e5sa2yf 0.111111 \n", + "e6ai7z5 0.111111 \n", + "... ... \n", + "e5smhzk 0.111111 \n", + "e5v91s0 0.222222 \n", + "e6n6di6 0.222222 \n", + "e6iqq30 0.111111 \n", + "e5bfad7 0.222222 \n", + "e6x5he5 0.222222 \n", + "e6l9uyf 0.222222 \n", + "e57hyr1 0.222222 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.111111 \n", + "e57a6qq 0.222222 \n", + "e5qc7eb 0.222222 \n", + "e6hqt5y 0.444444 \n", + "e5ua84v 0.222222 \n", + "e65m7kq 0.222222 \n", + "e5ggtru 0.222222 \n", + "e5pmmig 0.111111 \n", + "e64l6vq 0.222222 \n", + "e6fjx0d 0.111111 \n", + "e5h3xyy 0.222222 \n", + "e589ri5 0.111111 \n", + "e5beuqa 0.222222 \n", + "e5lqoj1 0.111111 \n", + "e5kvch1 0.111111 \n", + "e6srvwm 0.222222 \n", + "e5o65mk 0.111111 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.222222 \n", "\n", - " 2nd-largest / max[indegree over C->c mid-thread responses] \\\n", - "e5hm9mp 0.333333 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah NaN \n", + " norm.2nd-largest[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.250000 \n", + "e5qv9rj 0.166667 \n", + "e6jhojf 0.142857 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.166667 \n", + "e5kwkg2 0.166667 \n", + "e6mehe7 0.200000 \n", + "e6m0hsd 0.250000 \n", + "e64r385 0.142857 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.142857 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.142857 \n", + "e5borjq 0.142857 \n", + "e64n9zv 0.250000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.250000 \n", + "e6q9204 0.166667 \n", + "e5modd7 0.125000 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.200000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.200000 \n", + "e5syrih 0.125000 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.166667 \n", "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.666667 \n", - "\n", - " max[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 2.0 \n", + "e5smhzk 0.333333 \n", + "e5v91s0 0.250000 \n", + "e6n6di6 0.125000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.166667 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 0.250000 \n", + "e57hyr1 0.200000 \n", + "e5b8sj7 0.166667 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.200000 \n", + "e57a6qq 0.166667 \n", + "e5qc7eb 0.166667 \n", + "e6hqt5y 0.200000 \n", + "e5ua84v 0.250000 \n", + "e65m7kq 0.200000 \n", + "e5ggtru 0.125000 \n", + "e5pmmig 0.333333 \n", + "e64l6vq 0.200000 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.200000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.142857 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 0.200000 \n", + "e6srvwm 0.166667 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.200000 \n", + "e58n526 NaN \n", + "e69r2kg 0.142857 \n", "\n", - " max[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 3.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 4.0 \n", + " norm.2nd-largest[outdegree over C->C responses] \\\n", + "e6p7yrp 0.125000 \n", + "e5ywqyk 0.200000 \n", + "e5qv9rj 0.111111 \n", + "e6jhojf 0.125000 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.125000 \n", + "e5kwkg2 0.111111 \n", + "e6mehe7 0.125000 \n", + "e6m0hsd 0.250000 \n", + "e64r385 0.222222 \n", + "e5surbt 0.111111 \n", + "e58gxii 0.125000 \n", + "e64vc8y 0.111111 \n", + "e57504g 0.222222 \n", + "e5borjq 0.111111 \n", + "e64n9zv 0.250000 \n", + "e582ud3 0.111111 \n", + "e64i9cf 0.142857 \n", + "e6q9204 0.142857 \n", + "e5modd7 0.111111 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.111111 \n", + "e5d3zaa 0.142857 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.222222 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.125000 \n", + "... ... \n", + "e5smhzk 0.250000 \n", + "e5v91s0 0.111111 \n", + "e6n6di6 0.111111 \n", + "e6iqq30 0.111111 \n", + "e5bfad7 0.111111 \n", + "e6x5he5 0.125000 \n", + "e6l9uyf 0.111111 \n", + "e57hyr1 0.222222 \n", + "e5b8sj7 0.111111 \n", + "e6nlep7 0.111111 \n", + "e6ltazd 0.166667 \n", + "e57a6qq 0.166667 \n", + "e5qc7eb 0.142857 \n", + "e6hqt5y 0.111111 \n", + "e5ua84v 0.222222 \n", + "e65m7kq 0.111111 \n", + "e5ggtru 0.111111 \n", + "e5pmmig 0.285714 \n", + "e64l6vq 0.142857 \n", + "e6fjx0d 0.250000 \n", + "e5h3xyy 0.111111 \n", + "e589ri5 0.111111 \n", + "e5beuqa 0.222222 \n", + "e5lqoj1 0.125000 \n", + "e5kvch1 0.250000 \n", + "e6srvwm 0.111111 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.111111 \n", + "e58n526 0.111111 \n", + "e69r2kg 0.111111 \n", "\n", - " argmax[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 3.0 \n", - "e6ls80j 2.0 \n", - "e5mhgl5 3.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 0.0 \n", + " norm.2nd-largest[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.375000 \n", + "e5qv9rj 0.166667 \n", + "e6jhojf 0.250000 \n", + "e6989ii 0.428571 \n", + "e69lgse 0.166667 \n", + "e5kwkg2 0.166667 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.375000 \n", + "e64r385 0.142857 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.142857 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.142857 \n", + "e5borjq 0.142857 \n", + "e64n9zv 0.375000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.200000 \n", + "e6q9204 0.250000 \n", + "e5modd7 0.125000 \n", + "e5xhbyd 0.250000 \n", + "e5oaf7h 0.375000 \n", + "e6nir3u 0.428571 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.125000 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.166667 \n", + "... ... \n", + "e5smhzk 0.428571 \n", + "e5v91s0 0.250000 \n", + "e6n6di6 0.125000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.166667 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 0.250000 \n", + "e57hyr1 0.200000 \n", + "e5b8sj7 0.166667 \n", + "e6nlep7 NaN \n", + "e6ltazd 0.250000 \n", + "e57a6qq 0.250000 \n", + "e5qc7eb 0.142857 \n", + "e6hqt5y 0.200000 \n", + "e5ua84v 0.250000 \n", + "e65m7kq 0.200000 \n", + "e5ggtru 0.125000 \n", + "e5pmmig 0.250000 \n", + "e64l6vq 0.166667 \n", + "e6fjx0d 0.428571 \n", + "e5h3xyy 0.200000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.142857 \n", + "e5lqoj1 0.500000 \n", + "e5kvch1 0.166667 \n", + "e6srvwm 0.166667 \n", + "e5o65mk 0.428571 \n", + "e647cm8 0.200000 \n", + "e58n526 NaN \n", + "e69r2kg 0.142857 \n", "\n", - " argmax[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 0.0 \n", - "e6w6fah 0.0 \n", + " norm.2nd-largest[outdegree over C->c responses] \\\n", + "e6p7yrp 0.222222 \n", + "e5ywqyk 0.333333 \n", + "e5qv9rj 0.111111 \n", + "e6jhojf 0.222222 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.222222 \n", + "e5kwkg2 0.111111 \n", + "e6mehe7 0.222222 \n", + "e6m0hsd 0.333333 \n", + "e64r385 0.222222 \n", + "e5surbt 0.111111 \n", + "e58gxii 0.222222 \n", + "e64vc8y 0.111111 \n", + "e57504g 0.222222 \n", + "e5borjq 0.111111 \n", + "e64n9zv 0.333333 \n", + "e582ud3 0.111111 \n", + "e64i9cf 0.222222 \n", + "e6q9204 0.222222 \n", + "e5modd7 0.111111 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.333333 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.111111 \n", + "e5d3zaa 0.222222 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.333333 \n", + "e5syrih 0.222222 \n", + "e5sa2yf 0.444444 \n", + "e6ai7z5 0.111111 \n", "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 0.0 \n", - "\n", - " norm.max[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.285714 \n", - "e5ytz1d 0.166667 \n", - "e6ls80j 0.400000 \n", - "e5mhgl5 0.285714 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.200000 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.285714 \n", - "e5ua84v 0.250000 \n", + "e5smhzk 0.444444 \n", + "e5v91s0 0.111111 \n", + "e6n6di6 0.111111 \n", + "e6iqq30 0.111111 \n", + "e5bfad7 0.111111 \n", + "e6x5he5 0.125000 \n", + "e6l9uyf 0.111111 \n", + "e57hyr1 0.222222 \n", + "e5b8sj7 0.111111 \n", + "e6nlep7 0.111111 \n", + "e6ltazd 0.333333 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.222222 \n", + "e6hqt5y 0.111111 \n", + "e5ua84v 0.222222 \n", + "e65m7kq 0.111111 \n", + "e5ggtru 0.111111 \n", + "e5pmmig 0.333333 \n", + "e64l6vq 0.222222 \n", + "e6fjx0d 0.444444 \n", + "e5h3xyy 0.111111 \n", + "e589ri5 0.111111 \n", + "e5beuqa 0.222222 \n", + "e5lqoj1 0.222222 \n", + "e5kvch1 0.222222 \n", + "e6srvwm 0.111111 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.111111 \n", + "e58n526 0.111111 \n", + "e69r2kg 0.111111 \n", "\n", " norm.max[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.428571 \n", - "e5ytz1d 0.500000 \n", - "e6ls80j 0.600000 \n", - "e5mhgl5 0.285714 \n", - "e6w6fah 0.000000 \n", + "e6p7yrp 0.500000 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 0.500000 \n", + "e6jhojf 0.428571 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.500000 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.400000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.428571 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.428571 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.500000 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.750000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.250000 \n", + "e5d3zaa 0.400000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 0.400000 \n", + "e5syrih 0.500000 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.333333 \n", "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 0.400000 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.571429 \n", + "e5smhzk 0.333333 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.400000 \n", + "e5b8sj7 0.666667 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.800000 \n", + "e57a6qq 0.500000 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 0.800000 \n", "e5ua84v 0.500000 \n", + "e65m7kq 0.600000 \n", + "e5ggtru 0.625000 \n", + "e5pmmig 0.333333 \n", + "e64l6vq 0.400000 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.400000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.571429 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 0.400000 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.600000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.428571 \n", "\n", - " 2nd-largest[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 2.0 \n", + " norm.max[indegree over C->C responses] \\\n", + "e6p7yrp 0.625000 \n", + "e5ywqyk 0.400000 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.500000 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.375000 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 0.222222 \n", + "e5surbt 0.555556 \n", + "e58gxii 0.500000 \n", + "e64vc8y 0.666667 \n", + "e57504g 0.222222 \n", + "e5borjq 0.333333 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.571429 \n", + "e6q9204 0.428571 \n", + "e5modd7 0.444444 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.750000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.222222 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 0.888889 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.444444 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.375000 \n", + "... ... \n", + "e5smhzk 0.500000 \n", + "e5v91s0 0.555556 \n", + "e6n6di6 0.444444 \n", + "e6iqq30 0.666667 \n", + "e5bfad7 0.444444 \n", + "e6x5he5 0.500000 \n", + "e6l9uyf 0.555556 \n", + "e57hyr1 0.444444 \n", + "e5b8sj7 0.444444 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 0.666667 \n", + "e57a6qq 0.500000 \n", + "e5qc7eb 0.428571 \n", + "e6hqt5y 0.444444 \n", + "e5ua84v 0.444444 \n", + "e65m7kq 0.444444 \n", + "e5ggtru 0.555556 \n", + "e5pmmig 0.285714 \n", + "e64l6vq 0.428571 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.444444 \n", + "e589ri5 0.666667 \n", + "e5beuqa 0.444444 \n", + "e5lqoj1 0.625000 \n", + "e5kvch1 0.375000 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.444444 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.333333 \n", "\n", - " 2nd-largest[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 2.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 2.0 \n", + " norm.max[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.250000 \n", + "e5qv9rj 0.500000 \n", + "e6jhojf 0.250000 \n", + "e6989ii 0.142857 \n", + "e69lgse 0.500000 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.250000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.285714 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.428571 \n", + "e64n9zv 0.250000 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.400000 \n", + "e6q9204 0.250000 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 0.250000 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.142857 \n", + "e6c3xdn 0.250000 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.375000 \n", + "e5sa2yf 0.125000 \n", + "e6ai7z5 0.166667 \n", + "... ... \n", + "e5smhzk 0.142857 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.375000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.400000 \n", + "e5b8sj7 0.500000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.500000 \n", + "e57a6qq 0.250000 \n", + "e5qc7eb 0.428571 \n", + "e6hqt5y 0.800000 \n", + "e5ua84v 0.375000 \n", + "e65m7kq 0.400000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.250000 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.142857 \n", + "e5h3xyy 0.400000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.428571 \n", + "e5lqoj1 0.250000 \n", + "e5kvch1 0.166667 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.142857 \n", + "e647cm8 0.600000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.285714 \n", "\n", - " 2nd-argmax[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 4.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 3.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 3.0 \n", + " norm.max[indegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.222222 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.222222 \n", + "e6989ii 0.125000 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.222222 \n", + "e64r385 0.222222 \n", + "e5surbt 0.555556 \n", + "e58gxii 0.222222 \n", + "e64vc8y 0.666667 \n", + "e57504g 0.222222 \n", + "e5borjq 0.333333 \n", + "e64n9zv 0.222222 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.444444 \n", + "e6q9204 0.222222 \n", + "e5modd7 0.444444 \n", + "e5xhbyd 0.222222 \n", + "e5oaf7h 0.222222 \n", + "e6nir3u 0.125000 \n", + "e6c3xdn 0.222222 \n", + "e5d3zaa 0.222222 \n", + "e5gnjv9 0.888889 \n", + "e69gw2t 0.222222 \n", + "e5syrih 0.333333 \n", + "e5sa2yf 0.111111 \n", + "e6ai7z5 0.333333 \n", + "... ... \n", + "e5smhzk 0.222222 \n", + "e5v91s0 0.555556 \n", + "e6n6di6 0.333333 \n", + "e6iqq30 0.666667 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.500000 \n", + "e6l9uyf 0.555556 \n", + "e57hyr1 0.444444 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 0.444444 \n", + "e57a6qq 0.222222 \n", + "e5qc7eb 0.333333 \n", + "e6hqt5y 0.444444 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.444444 \n", + "e5ggtru 0.444444 \n", + "e5pmmig 0.222222 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.222222 \n", + "e5h3xyy 0.444444 \n", + "e589ri5 0.666667 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 0.555556 \n", + "e5kvch1 0.333333 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.125000 \n", + "e647cm8 0.444444 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.222222 \n", "\n", - " 2nd-argmax[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 4.0 \n", - "e5mhgl5 3.0 \n", - "e6w6fah 1.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 0.0 \n", - "e5ua84v 3.0 \n", + " norm.max[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.250000 \n", + "e5qv9rj 0.500000 \n", + "e6jhojf 0.250000 \n", + "e6989ii 0.250000 \n", + "e69lgse 0.500000 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.250000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.285714 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.428571 \n", + "e64n9zv 0.250000 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.400000 \n", + "e6q9204 0.250000 \n", + "e5modd7 0.500000 \n", + "e5xhbyd 0.250000 \n", + "e5oaf7h 0.250000 \n", + "e6nir3u 0.250000 \n", + "e6c3xdn 0.250000 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.375000 \n", + "e5sa2yf 0.125000 \n", + "e6ai7z5 0.166667 \n", + "... ... \n", + "e5smhzk 0.142857 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.375000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.400000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.400000 \n", + "e5b8sj7 0.500000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.500000 \n", + "e57a6qq 0.250000 \n", + "e5qc7eb 0.428571 \n", + "e6hqt5y 0.800000 \n", + "e5ua84v 0.375000 \n", + "e65m7kq 0.400000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.250000 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.142857 \n", + "e5h3xyy 0.400000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.428571 \n", + "e5lqoj1 0.250000 \n", + "e5kvch1 0.166667 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.142857 \n", + "e647cm8 0.600000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.285714 \n", "\n", - " norm.2nd-largest[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.285714 \n", - "e5ytz1d 0.166667 \n", - "e6ls80j 0.200000 \n", - "e5mhgl5 0.142857 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.200000 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.285714 \n", - "e5ua84v 0.250000 \n", + " norm.max[indegree over c->c responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.222222 \n", + "e5qv9rj 0.333333 \n", + "e6jhojf 0.222222 \n", + "e6989ii 0.222222 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.222222 \n", + "e64r385 0.222222 \n", + "e5surbt 0.555556 \n", + "e58gxii 0.222222 \n", + "e64vc8y 0.666667 \n", + "e57504g 0.222222 \n", + "e5borjq 0.333333 \n", + "e64n9zv 0.222222 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.444444 \n", + "e6q9204 0.222222 \n", + "e5modd7 0.444444 \n", + "e5xhbyd 0.222222 \n", + "e5oaf7h 0.222222 \n", + "e6nir3u 0.222222 \n", + "e6c3xdn 0.222222 \n", + "e5d3zaa 0.222222 \n", + "e5gnjv9 0.888889 \n", + "e69gw2t 0.222222 \n", + "e5syrih 0.333333 \n", + "e5sa2yf 0.111111 \n", + "e6ai7z5 0.333333 \n", + "... ... \n", + "e5smhzk 0.222222 \n", + "e5v91s0 0.555556 \n", + "e6n6di6 0.333333 \n", + "e6iqq30 0.666667 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 0.444444 \n", + "e6l9uyf 0.555556 \n", + "e57hyr1 0.444444 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 0.444444 \n", + "e57a6qq 0.222222 \n", + "e5qc7eb 0.333333 \n", + "e6hqt5y 0.444444 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.444444 \n", + "e5ggtru 0.444444 \n", + "e5pmmig 0.222222 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.222222 \n", + "e5h3xyy 0.444444 \n", + "e589ri5 0.666667 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 0.555556 \n", + "e5kvch1 0.333333 \n", + "e6srvwm 0.333333 \n", + "e5o65mk 0.222222 \n", + "e647cm8 0.444444 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.222222 \n", "\n", - " norm.2nd-largest[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.142857 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.200000 \n", - "e5mhgl5 0.285714 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.400000 \n", - "e5wc4tj 0.500000 \n", - "e6ua0sb 0.142857 \n", - "e5ua84v 0.250000 \n", + " norm.max[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.500000 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 0.166667 \n", + "e6jhojf 0.571429 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.166667 \n", + "e6mehe7 0.600000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.428571 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.142857 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.333333 \n", + "e5modd7 0.125000 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.500000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.200000 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 0.400000 \n", + "e5syrih 0.250000 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.666667 \n", + "... ... \n", + "e5smhzk 0.666667 \n", + "e5v91s0 0.250000 \n", + "e6n6di6 0.250000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.250000 \n", + "e57hyr1 0.200000 \n", + "e5b8sj7 0.166667 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.200000 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.166667 \n", + "e6hqt5y 0.200000 \n", + "e5ua84v 0.250000 \n", + "e65m7kq 0.200000 \n", + "e5ggtru 0.125000 \n", + "e5pmmig 0.333333 \n", + "e64l6vq 0.400000 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.200000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.428571 \n", + "e5lqoj1 0.666667 \n", + "e5kvch1 0.200000 \n", + "e6srvwm 0.166667 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.200000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.285714 \n", "\n", - " mean[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.400000 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.833333 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.555556 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.333333 \n", + " norm.max[outdegree over C->C responses] \\\n", + "e6p7yrp 0.375000 \n", + "e5ywqyk 0.400000 \n", + "e5qv9rj 0.111111 \n", + "e6jhojf 0.500000 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.250000 \n", + "e5kwkg2 0.111111 \n", + "e6mehe7 0.375000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 0.222222 \n", + "e5surbt 0.111111 \n", + "e58gxii 0.375000 \n", + "e64vc8y 0.111111 \n", + "e57504g 0.222222 \n", + "e5borjq 0.222222 \n", + "e64n9zv 0.500000 \n", + "e582ud3 0.111111 \n", + "e64i9cf 0.285714 \n", + "e6q9204 0.428571 \n", + "e5modd7 0.111111 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.500000 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.111111 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 0.222222 \n", + "e69gw2t 0.285714 \n", + "e5syrih 0.222222 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.500000 \n", + "... ... \n", + "e5smhzk 0.500000 \n", + "e5v91s0 0.111111 \n", + "e6n6di6 0.222222 \n", + "e6iqq30 0.111111 \n", + "e5bfad7 0.222222 \n", + "e6x5he5 0.500000 \n", + "e6l9uyf 0.222222 \n", + "e57hyr1 0.222222 \n", + "e5b8sj7 0.222222 \n", + "e6nlep7 0.111111 \n", + "e6ltazd 0.333333 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.285714 \n", + "e6hqt5y 0.222222 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.222222 \n", + "e5ggtru 0.222222 \n", + "e5pmmig 0.428571 \n", + "e64l6vq 0.285714 \n", + "e6fjx0d 0.500000 \n", + "e5h3xyy 0.222222 \n", + "e589ri5 0.111111 \n", + "e5beuqa 0.444444 \n", + "e5lqoj1 0.375000 \n", + "e5kvch1 0.250000 \n", + "e6srvwm 0.222222 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.111111 \n", + "e58n526 0.111111 \n", + "e69r2kg 0.222222 \n", "\n", - " mean[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.400000 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.833333 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.555556 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.333333 \n", + " norm.max[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.500000 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 0.166667 \n", + "e6jhojf 0.500000 \n", + "e6989ii 0.571429 \n", + "e69lgse 0.333333 \n", + "e5kwkg2 0.166667 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 0.500000 \n", + "e64r385 0.285714 \n", + "e5surbt 0.250000 \n", + "e58gxii 0.428571 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.285714 \n", + "e5borjq 0.142857 \n", + "e64n9zv 0.375000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.600000 \n", + "e6q9204 0.375000 \n", + "e5modd7 0.125000 \n", + "e5xhbyd 0.375000 \n", + "e5oaf7h 0.500000 \n", + "e6nir3u 0.571429 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.285714 \n", + "e5gnjv9 1.000000 \n", + "e69gw2t 0.428571 \n", + "e5syrih 0.250000 \n", + "e5sa2yf 0.500000 \n", + "e6ai7z5 0.666667 \n", + "... ... \n", + "e5smhzk 0.571429 \n", + "e5v91s0 0.250000 \n", + "e6n6di6 0.250000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.333333 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.250000 \n", + "e57hyr1 0.200000 \n", + "e5b8sj7 0.166667 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.375000 \n", + "e57a6qq 0.375000 \n", + "e5qc7eb 0.285714 \n", + "e6hqt5y 0.200000 \n", + "e5ua84v 0.250000 \n", + "e65m7kq 0.200000 \n", + "e5ggtru 0.125000 \n", + "e5pmmig 0.375000 \n", + "e64l6vq 0.500000 \n", + "e6fjx0d 0.571429 \n", + "e5h3xyy 0.200000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.428571 \n", + "e5lqoj1 0.500000 \n", + "e5kvch1 0.333333 \n", + "e6srvwm 0.166667 \n", + "e5o65mk 0.571429 \n", + "e647cm8 0.200000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.285714 \n", "\n", - " mean-nonzero[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.750000 \n", - "e5ytz1d 1.000000 \n", - "e6ls80j 1.250000 \n", - "e5mhgl5 1.166667 \n", - "e6w6fah 0.000000 \n", + " norm.max[outdegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.444444 \n", + "e5qv9rj 0.111111 \n", + "e6jhojf 0.444444 \n", + "e6989ii 0.500000 \n", + "e69lgse 0.222222 \n", + "e5kwkg2 0.111111 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.555556 \n", + "e64r385 0.222222 \n", + "e5surbt 0.111111 \n", + "e58gxii 0.333333 \n", + "e64vc8y 0.111111 \n", + "e57504g 0.222222 \n", + "e5borjq 0.222222 \n", + "e64n9zv 0.444444 \n", + "e582ud3 0.111111 \n", + "e64i9cf 0.333333 \n", + "e6q9204 0.444444 \n", + "e5modd7 0.111111 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.555556 \n", + "e6nir3u 0.500000 \n", + "e6c3xdn 0.111111 \n", + "e5d3zaa 0.333333 \n", + "e5gnjv9 0.222222 \n", + "e69gw2t 0.333333 \n", + "e5syrih 0.222222 \n", + "e5sa2yf 0.555556 \n", + "e6ai7z5 0.555556 \n", + "... ... \n", + "e5smhzk 0.444444 \n", + "e5v91s0 0.111111 \n", + "e6n6di6 0.222222 \n", + "e6iqq30 0.111111 \n", + "e5bfad7 0.222222 \n", + "e6x5he5 0.500000 \n", + "e6l9uyf 0.222222 \n", + "e57hyr1 0.222222 \n", + "e5b8sj7 0.222222 \n", + "e6nlep7 0.111111 \n", + "e6ltazd 0.333333 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.222222 \n", + "e6hqt5y 0.222222 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.222222 \n", + "e5ggtru 0.222222 \n", + "e5pmmig 0.333333 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.444444 \n", + "e5h3xyy 0.222222 \n", + "e589ri5 0.111111 \n", + "e5beuqa 0.444444 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 0.222222 \n", + "e6srvwm 0.222222 \n", + "e5o65mk 0.500000 \n", + "e647cm8 0.111111 \n", + "e58n526 0.111111 \n", + "e69r2kg 0.222222 \n", + "\n", + " prop-multiple[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.250000 \n", + "e5ywqyk 0.333333 \n", + "e5qv9rj 0.250000 \n", + "e6jhojf 0.200000 \n", + "e6989ii 0.000000 \n", + "e69lgse 0.250000 \n", + "e5kwkg2 0.200000 \n", + "e6mehe7 0.250000 \n", + "e6m0hsd 0.333333 \n", + "e64r385 0.400000 \n", + "e5surbt 0.000000 \n", + "e58gxii 0.500000 \n", + "e64vc8y 0.000000 \n", + "e57504g 0.400000 \n", + "e5borjq 0.500000 \n", + "e64n9zv 0.333333 \n", + "e582ud3 0.500000 \n", + "e64i9cf 0.333333 \n", + "e6q9204 0.666667 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 0.500000 \n", + "e5oaf7h 0.500000 \n", + "e6nir3u 0.000000 \n", + "e6c3xdn 0.142857 \n", + "e5d3zaa 0.666667 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.666667 \n", + "e5syrih 1.000000 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.200000 \n", "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.400000 \n", - "e5ua84v 1.333333 \n", + "e5smhzk 0.000000 \n", + "e5v91s0 0.333333 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 0.000000 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 0.333333 \n", + "e57hyr1 0.250000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.500000 \n", + "e57a6qq 0.666667 \n", + "e5qc7eb 0.666667 \n", + "e6hqt5y 0.500000 \n", + "e5ua84v 0.500000 \n", + "e65m7kq 0.333333 \n", + "e5ggtru 0.666667 \n", + "e5pmmig 0.500000 \n", + "e64l6vq 0.250000 \n", + "e6fjx0d 0.000000 \n", + "e5h3xyy 0.250000 \n", + "e589ri5 0.000000 \n", + "e5beuqa 0.250000 \n", + "e5lqoj1 0.000000 \n", + "e5kvch1 0.250000 \n", + "e6srvwm 0.500000 \n", + "e5o65mk 0.000000 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.500000 \n", "\n", - " mean-nonzero[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.400000 \n", - "e5ytz1d 2.000000 \n", - "e6ls80j 1.666667 \n", - "e5mhgl5 1.400000 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 1.000000 \n", - "e6cdkpy 1.666667 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.750000 \n", - "e5ua84v 2.000000 \n", + " prop-multiple[indegree over C->C responses] \\\n", + "e6p7yrp 0.250000 \n", + "e5ywqyk 0.666667 \n", + "e5qv9rj 0.400000 \n", + "e6jhojf 0.200000 \n", + "e6989ii 0.000000 \n", + "e69lgse 0.500000 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 0.333333 \n", + "e64r385 0.500000 \n", + "e5surbt 0.200000 \n", + "e58gxii 0.500000 \n", + "e64vc8y 0.250000 \n", + "e57504g 0.500000 \n", + "e5borjq 0.600000 \n", + "e64n9zv 0.333333 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.666667 \n", + "e6q9204 0.500000 \n", + "e5modd7 0.750000 \n", + "e5xhbyd 0.500000 \n", + "e5oaf7h 0.500000 \n", + "e6nir3u 0.000000 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.750000 \n", + "e5gnjv9 0.500000 \n", + "e69gw2t 0.750000 \n", + "e5syrih 0.750000 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.400000 \n", + "... ... \n", + "e5smhzk 0.333333 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.400000 \n", + "e6iqq30 0.250000 \n", + "e5bfad7 0.750000 \n", + "e6x5he5 0.200000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.400000 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 0.333333 \n", + "e57a6qq 0.666667 \n", + "e5qc7eb 0.666667 \n", + "e6hqt5y 0.666667 \n", + "e5ua84v 0.400000 \n", + "e65m7kq 0.500000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.400000 \n", + "e64l6vq 0.500000 \n", + "e6fjx0d 0.333333 \n", + "e5h3xyy 0.400000 \n", + "e589ri5 0.250000 \n", + "e5beuqa 0.400000 \n", + "e5lqoj1 0.250000 \n", + "e5kvch1 0.400000 \n", + "e6srvwm 0.600000 \n", + "e5o65mk 0.000000 \n", + "e647cm8 0.500000 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.600000 \n", "\n", - " prop-nonzero[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.800000 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.666667 \n", - "e5mhgl5 0.857143 \n", - "e6w6fah 0.000000 \n", + " prop-multiple[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.000000 \n", + "e5ywqyk 0.142857 \n", + "e5qv9rj 0.250000 \n", + "e6jhojf 0.142857 \n", + "e6989ii 0.000000 \n", + "e69lgse 0.250000 \n", + "e5kwkg2 0.200000 \n", + "e6mehe7 0.200000 \n", + "e6m0hsd 0.142857 \n", + "e64r385 0.400000 \n", + "e5surbt 0.000000 \n", + "e58gxii 0.400000 \n", + "e64vc8y 0.000000 \n", + "e57504g 0.400000 \n", + "e5borjq 0.500000 \n", + "e64n9zv 0.142857 \n", + "e582ud3 0.500000 \n", + "e64i9cf 0.250000 \n", + "e6q9204 0.333333 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.333333 \n", + "e6nir3u 0.000000 \n", + "e6c3xdn 0.142857 \n", + "e5d3zaa 0.400000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.400000 \n", + "e5syrih 0.400000 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.000000 \n", "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.555556 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 1.000000 \n", - "e5ua84v 1.000000 \n", - "\n", - " prop-nonzero[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.000000 \n", - "e5ytz1d 0.333333 \n", - "e6ls80j 0.500000 \n", - "e5mhgl5 0.714286 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.500000 \n", - "e6cdkpy 0.333333 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.800000 \n", - "e5ua84v 0.666667 \n", + "e5smhzk 0.000000 \n", + "e5v91s0 0.333333 \n", + "e6n6di6 0.400000 \n", + "e6iqq30 0.000000 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.000000 \n", + "e6l9uyf 0.333333 \n", + "e57hyr1 0.250000 \n", + "e5b8sj7 0.666667 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.200000 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 0.500000 \n", + "e5ua84v 0.400000 \n", + "e65m7kq 0.250000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.142857 \n", + "e64l6vq 0.200000 \n", + "e6fjx0d 0.000000 \n", + "e5h3xyy 0.250000 \n", + "e589ri5 0.000000 \n", + "e5beuqa 0.200000 \n", + "e5lqoj1 0.000000 \n", + "e5kvch1 0.000000 \n", + "e6srvwm 0.500000 \n", + "e5o65mk 0.000000 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.400000 \n", "\n", - " prop-multiple[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.750000 \n", - "e5ytz1d 0.000000 \n", - "e6ls80j 0.250000 \n", - "e5mhgl5 0.166667 \n", - "e6w6fah 0.000000 \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.000000 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.400000 \n", - "e5ua84v 0.333333 \n", + " prop-multiple[indegree over C->c responses] \\\n", + "e6p7yrp 0.142857 \n", + "e5ywqyk 0.125000 \n", + "e5qv9rj 0.400000 \n", + "e6jhojf 0.125000 \n", + "e6989ii 0.000000 \n", + "e69lgse 0.400000 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.125000 \n", + "e64r385 0.500000 \n", + "e5surbt 0.200000 \n", + "e58gxii 0.500000 \n", + "e64vc8y 0.250000 \n", + "e57504g 0.500000 \n", + "e5borjq 0.600000 \n", + "e64n9zv 0.125000 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.400000 \n", + "e6q9204 0.285714 \n", + "e5modd7 0.750000 \n", + "e5xhbyd 0.285714 \n", + "e5oaf7h 0.285714 \n", + "e6nir3u 0.000000 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.500000 \n", + "e5gnjv9 0.500000 \n", + "e69gw2t 0.500000 \n", + "e5syrih 0.333333 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.142857 \n", + "... ... \n", + "e5smhzk 0.125000 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.333333 \n", + "e6iqq30 0.250000 \n", + "e5bfad7 0.600000 \n", + "e6x5he5 0.200000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.400000 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 0.166667 \n", + "e57a6qq 0.285714 \n", + "e5qc7eb 0.600000 \n", + "e6hqt5y 0.666667 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.400000 \n", + "e5ggtru 0.400000 \n", + "e5pmmig 0.125000 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.125000 \n", + "e5h3xyy 0.400000 \n", + "e589ri5 0.250000 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 0.200000 \n", + "e5kvch1 0.142857 \n", + "e6srvwm 0.600000 \n", + "e5o65mk 0.000000 \n", + "e647cm8 0.500000 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.500000 \n", "\n", - " prop-multiple[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.200000 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 0.400000 \n", - "e6w6fah 0.000000 \n", + " prop-multiple[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.000000 \n", + "e5ywqyk 0.142857 \n", + "e5qv9rj 0.250000 \n", + "e6jhojf 0.142857 \n", + "e6989ii 0.142857 \n", + "e69lgse 0.250000 \n", + "e5kwkg2 0.200000 \n", + "e6mehe7 0.200000 \n", + "e6m0hsd 0.142857 \n", + "e64r385 0.400000 \n", + "e5surbt 0.000000 \n", + "e58gxii 0.400000 \n", + "e64vc8y 0.000000 \n", + "e57504g 0.400000 \n", + "e5borjq 0.500000 \n", + "e64n9zv 0.142857 \n", + "e582ud3 0.500000 \n", + "e64i9cf 0.250000 \n", + "e6q9204 0.333333 \n", + "e5modd7 1.000000 \n", + "e5xhbyd 0.333333 \n", + "e5oaf7h 0.333333 \n", + "e6nir3u 0.142857 \n", + "e6c3xdn 0.142857 \n", + "e5d3zaa 0.400000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.400000 \n", + "e5syrih 0.400000 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.000000 \n", "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 0.666667 \n", - "e5wc4tj 0.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.500000 \n", + "e5smhzk 0.000000 \n", + "e5v91s0 0.333333 \n", + "e6n6di6 0.400000 \n", + "e6iqq30 0.000000 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.250000 \n", + "e6l9uyf 0.333333 \n", + "e57hyr1 0.250000 \n", + "e5b8sj7 0.666667 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.200000 \n", + "e57a6qq 0.333333 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 0.500000 \n", + "e5ua84v 0.400000 \n", + "e65m7kq 0.250000 \n", + "e5ggtru 0.500000 \n", + "e5pmmig 0.142857 \n", + "e64l6vq 0.200000 \n", + "e6fjx0d 0.000000 \n", + "e5h3xyy 0.250000 \n", + "e589ri5 0.000000 \n", + "e5beuqa 0.200000 \n", + "e5lqoj1 0.000000 \n", + "e5kvch1 0.000000 \n", + "e6srvwm 0.500000 \n", + "e5o65mk 0.000000 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.400000 \n", "\n", - " entropy[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.351784 \n", - "e5ytz1d 1.791759 \n", - "e6ls80j 1.332179 \n", - "e5mhgl5 1.747868 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 1.609438 \n", - "e5wc4tj 0.693147 \n", - "e6ua0sb 1.549826 \n", - "e5ua84v 1.732868 \n", + " prop-multiple[indegree over c->c responses] \\\n", + "e6p7yrp 0.142857 \n", + "e5ywqyk 0.125000 \n", + "e5qv9rj 0.400000 \n", + "e6jhojf 0.125000 \n", + "e6989ii 0.125000 \n", + "e69lgse 0.400000 \n", + "e5kwkg2 0.333333 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.125000 \n", + "e64r385 0.500000 \n", + "e5surbt 0.200000 \n", + "e58gxii 0.500000 \n", + "e64vc8y 0.250000 \n", + "e57504g 0.500000 \n", + "e5borjq 0.600000 \n", + "e64n9zv 0.125000 \n", + "e582ud3 0.666667 \n", + "e64i9cf 0.400000 \n", + "e6q9204 0.285714 \n", + "e5modd7 0.750000 \n", + "e5xhbyd 0.285714 \n", + "e5oaf7h 0.285714 \n", + "e6nir3u 0.125000 \n", + "e6c3xdn 0.125000 \n", + "e5d3zaa 0.500000 \n", + "e5gnjv9 0.500000 \n", + "e69gw2t 0.500000 \n", + "e5syrih 0.333333 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.142857 \n", + "... ... \n", + "e5smhzk 0.125000 \n", + "e5v91s0 0.500000 \n", + "e6n6di6 0.333333 \n", + "e6iqq30 0.250000 \n", + "e5bfad7 0.600000 \n", + "e6x5he5 0.400000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.400000 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 1.000000 \n", + "e6ltazd 0.166667 \n", + "e57a6qq 0.285714 \n", + "e5qc7eb 0.600000 \n", + "e6hqt5y 0.666667 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.400000 \n", + "e5ggtru 0.400000 \n", + "e5pmmig 0.125000 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.125000 \n", + "e5h3xyy 0.400000 \n", + "e589ri5 0.250000 \n", + "e5beuqa 0.333333 \n", + "e5lqoj1 0.200000 \n", + "e5kvch1 0.142857 \n", + "e6srvwm 0.600000 \n", + "e5o65mk 0.125000 \n", + "e647cm8 0.500000 \n", + "e58n526 1.000000 \n", + "e69r2kg 0.500000 \n", "\n", - " entropy[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.475076 \n", - "e5ytz1d 1.011404 \n", - "e6ls80j 0.950271 \n", - "e5mhgl5 1.549826 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 1.054920 \n", - "e5wc4tj 0.693147 \n", - "e6ua0sb 1.153742 \n", - "e5ua84v 1.213008 \n", + " prop-multiple[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.250000 \n", + "e5ywqyk 0.333333 \n", + "e5qv9rj 0.000000 \n", + "e6jhojf 0.250000 \n", + "e6989ii 0.000000 \n", + "e69lgse 0.200000 \n", + "e5kwkg2 0.000000 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.333333 \n", + "e64r385 0.166667 \n", + "e5surbt 0.000000 \n", + "e58gxii 0.200000 \n", + "e64vc8y 0.000000 \n", + "e57504g 0.166667 \n", + "e5borjq 0.000000 \n", + "e64n9zv 0.333333 \n", + "e582ud3 0.000000 \n", + "e64i9cf 0.333333 \n", + "e6q9204 0.200000 \n", + "e5modd7 0.000000 \n", + "e5xhbyd 0.500000 \n", + "e5oaf7h 0.333333 \n", + "e6nir3u 0.000000 \n", + "e6c3xdn 0.000000 \n", + "e5d3zaa 0.000000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.250000 \n", + "e5syrih 0.142857 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.333333 \n", + "... ... \n", + "e5smhzk 0.500000 \n", + "e5v91s0 0.000000 \n", + "e6n6di6 0.142857 \n", + "e6iqq30 0.000000 \n", + "e5bfad7 0.200000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.000000 \n", + "e57hyr1 0.000000 \n", + "e5b8sj7 0.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.000000 \n", + "e57a6qq 0.200000 \n", + "e5qc7eb 0.000000 \n", + "e6hqt5y 0.000000 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.000000 \n", + "e5ggtru 0.000000 \n", + "e5pmmig 0.500000 \n", + "e64l6vq 0.250000 \n", + "e6fjx0d 0.000000 \n", + "e5h3xyy 0.000000 \n", + "e589ri5 0.000000 \n", + "e5beuqa 0.200000 \n", + "e5lqoj1 0.500000 \n", + "e5kvch1 0.000000 \n", + "e6srvwm 0.000000 \n", + "e5o65mk 0.000000 \n", + "e647cm8 0.000000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.166667 \n", "\n", - " 2nd-largest / max[outdegree over C->C mid-thread responses] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 0.5 \n", - "e5mhgl5 0.5 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + " prop-multiple[outdegree over C->C responses] \\\n", + "e6p7yrp 0.166667 \n", + "e5ywqyk 0.250000 \n", + "e5qv9rj 0.000000 \n", + "e6jhojf 0.200000 \n", + "e6989ii 0.000000 \n", + "e69lgse 0.142857 \n", + "e5kwkg2 0.000000 \n", + "e6mehe7 0.166667 \n", + "e6m0hsd 0.333333 \n", + "e64r385 0.285714 \n", + "e5surbt 0.000000 \n", + "e58gxii 0.166667 \n", + "e64vc8y 0.000000 \n", + "e57504g 0.285714 \n", + "e5borjq 0.125000 \n", + "e64n9zv 0.333333 \n", + "e582ud3 0.000000 \n", + "e64i9cf 0.166667 \n", + "e6q9204 0.200000 \n", + "e5modd7 0.000000 \n", + "e5xhbyd 0.500000 \n", + "e5oaf7h 0.333333 \n", + "e6nir3u 0.000000 \n", + "e6c3xdn 0.000000 \n", + "e5d3zaa 0.166667 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 0.400000 \n", + "e5syrih 0.285714 \n", + "e5sa2yf 0.000000 \n", + "e6ai7z5 0.200000 \n", + "... ... \n", + "e5smhzk 0.333333 \n", + "e5v91s0 0.000000 \n", + "e6n6di6 0.125000 \n", + "e6iqq30 0.000000 \n", + "e5bfad7 0.125000 \n", + "e6x5he5 0.200000 \n", + "e6l9uyf 0.125000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 0.125000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.200000 \n", + "e57a6qq 0.200000 \n", + "e5qc7eb 0.166667 \n", + "e6hqt5y 0.125000 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.125000 \n", + "e5ggtru 0.125000 \n", + "e5pmmig 0.500000 \n", + "e64l6vq 0.166667 \n", + "e6fjx0d 0.333333 \n", + "e5h3xyy 0.125000 \n", + "e589ri5 0.000000 \n", + "e5beuqa 0.400000 \n", + "e5lqoj1 0.166667 \n", + "e5kvch1 0.333333 \n", + "e6srvwm 0.125000 \n", + "e5o65mk 0.000000 \n", + "e647cm8 0.000000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.125000 \n", "\n", - " 2nd-largest / max[indegree over C->C mid-thread responses] \\\n", - "e5hm9mp 0.333333 \n", - "e5ytz1d 0.666667 \n", - "e6ls80j 0.333333 \n", - "e5mhgl5 1.000000 \n", - "e6w6fah NaN \n", - "... ... \n", - "e65ca8k 0.000000 \n", - "e6cdkpy 1.000000 \n", - "e5wc4tj 1.000000 \n", - "e6ua0sb 0.250000 \n", - "e5ua84v 0.500000 \n", + " prop-multiple[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.250000 \n", + "e5ywqyk 0.666667 \n", + "e5qv9rj 0.000000 \n", + "e6jhojf 0.500000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.200000 \n", + "e5kwkg2 0.000000 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 0.666667 \n", + "e64r385 0.166667 \n", + "e5surbt 0.000000 \n", + "e58gxii 0.200000 \n", + "e64vc8y 0.000000 \n", + "e57504g 0.166667 \n", + "e5borjq 0.000000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.000000 \n", + "e64i9cf 0.333333 \n", + "e6q9204 0.400000 \n", + "e5modd7 0.000000 \n", + "e5xhbyd 0.750000 \n", + "e5oaf7h 0.666667 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.000000 \n", + "e5d3zaa 0.400000 \n", + "e5gnjv9 0.000000 \n", + "e69gw2t 0.500000 \n", + "e5syrih 0.142857 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.333333 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.000000 \n", + "e6n6di6 0.142857 \n", + "e6iqq30 0.000000 \n", + "e5bfad7 0.200000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.000000 \n", + "e57hyr1 0.000000 \n", + "e5b8sj7 0.000000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.400000 \n", + "e57a6qq 0.400000 \n", + "e5qc7eb 0.166667 \n", + "e6hqt5y 0.000000 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.000000 \n", + "e5ggtru 0.000000 \n", + "e5pmmig 0.750000 \n", + "e64l6vq 0.250000 \n", + "e6fjx0d 1.000000 \n", + "e5h3xyy 0.000000 \n", + "e589ri5 0.000000 \n", + "e5beuqa 0.200000 \n", + "e5lqoj1 1.000000 \n", + "e5kvch1 0.200000 \n", + "e6srvwm 0.000000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.000000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.166667 \n", "\n", - " is-present[reciprocity motif over mid-thread] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 1.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + " prop-multiple[outdegree over C->c responses] \\\n", + "e6p7yrp 0.333333 \n", + "e5ywqyk 0.500000 \n", + "e5qv9rj 0.000000 \n", + "e6jhojf 0.400000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.285714 \n", + "e5kwkg2 0.000000 \n", + "e6mehe7 0.333333 \n", + "e6m0hsd 0.666667 \n", + "e64r385 0.285714 \n", + "e5surbt 0.000000 \n", + "e58gxii 0.333333 \n", + "e64vc8y 0.000000 \n", + "e57504g 0.285714 \n", + "e5borjq 0.125000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.000000 \n", + "e64i9cf 0.333333 \n", + "e6q9204 0.400000 \n", + "e5modd7 0.000000 \n", + "e5xhbyd 0.750000 \n", + "e5oaf7h 0.666667 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.000000 \n", + "e5d3zaa 0.333333 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 0.400000 \n", + "e5syrih 0.285714 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.200000 \n", + "... ... \n", + "e5smhzk 0.666667 \n", + "e5v91s0 0.000000 \n", + "e6n6di6 0.125000 \n", + "e6iqq30 0.000000 \n", + "e5bfad7 0.125000 \n", + "e6x5he5 0.200000 \n", + "e6l9uyf 0.125000 \n", + "e57hyr1 0.500000 \n", + "e5b8sj7 0.125000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.400000 \n", + "e57a6qq 0.400000 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 0.125000 \n", + "e5ua84v 0.333333 \n", + "e65m7kq 0.125000 \n", + "e5ggtru 0.125000 \n", + "e5pmmig 0.750000 \n", + "e64l6vq 0.333333 \n", + "e6fjx0d 0.666667 \n", + "e5h3xyy 0.125000 \n", + "e589ri5 0.000000 \n", + "e5beuqa 0.400000 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 0.500000 \n", + "e6srvwm 0.125000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.000000 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.125000 \n", "\n", - " count[reciprocity motif over mid-thread] \\\n", - "e5hm9mp 3.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 6.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 7.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 3.0 \n", + " prop-nonzero[indegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.750000 \n", + "e5qv9rj 0.444444 \n", + "e6jhojf 1.000000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.571429 \n", + "e5kwkg2 0.555556 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 1.000000 \n", + "e64r385 0.714286 \n", + "e5surbt 0.444444 \n", + "e58gxii 0.666667 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.714286 \n", + "e5borjq 0.500000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.222222 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.600000 \n", + "e5modd7 0.333333 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 0.666667 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.777778 \n", + "e5d3zaa 0.500000 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 0.600000 \n", + "e5syrih 0.428571 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.333333 \n", + "e6n6di6 0.500000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 0.800000 \n", + "e6l9uyf 0.375000 \n", + "e57hyr1 0.666667 \n", + "e5b8sj7 0.250000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.400000 \n", + "e57a6qq 0.600000 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 0.250000 \n", + "e5ua84v 0.666667 \n", + "e65m7kq 0.375000 \n", + "e5ggtru 0.375000 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.666667 \n", + "e5h3xyy 0.500000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.800000 \n", + "e5lqoj1 0.500000 \n", + "e5kvch1 0.666667 \n", + "e6srvwm 0.500000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.500000 \n", "\n", - " is-present[external reciprocity motif over mid-thread] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + " prop-nonzero[indegree over C->C responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.750000 \n", + "e5qv9rj 0.500000 \n", + "e6jhojf 1.000000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.571429 \n", + "e5kwkg2 0.600000 \n", + "e6mehe7 0.666667 \n", + "e6m0hsd 1.000000 \n", + "e64r385 0.750000 \n", + "e5surbt 0.500000 \n", + "e58gxii 0.666667 \n", + "e64vc8y 0.400000 \n", + "e57504g 0.750000 \n", + "e5borjq 0.625000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.300000 \n", + "e64i9cf 0.500000 \n", + "e6q9204 0.666667 \n", + "e5modd7 0.400000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 0.666667 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.800000 \n", + "e5d3zaa 0.571429 \n", + "e5gnjv9 0.222222 \n", + "e69gw2t 0.800000 \n", + "e5syrih 0.571429 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.400000 \n", + "e6n6di6 0.555556 \n", + "e6iqq30 0.444444 \n", + "e5bfad7 0.500000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.444444 \n", + "e57hyr1 0.714286 \n", + "e5b8sj7 0.375000 \n", + "e6nlep7 0.100000 \n", + "e6ltazd 0.500000 \n", + "e57a6qq 0.600000 \n", + "e5qc7eb 0.500000 \n", + "e6hqt5y 0.333333 \n", + "e5ua84v 0.714286 \n", + "e65m7kq 0.444444 \n", + "e5ggtru 0.444444 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.750000 \n", + "e5h3xyy 0.555556 \n", + "e589ri5 0.400000 \n", + "e5beuqa 0.833333 \n", + "e5lqoj1 0.571429 \n", + "e5kvch1 0.714286 \n", + "e6srvwm 0.555556 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.444444 \n", + "e58n526 0.100000 \n", + "e69r2kg 0.625000 \n", "\n", - " count[external reciprocity motif over mid-thread] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 4.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 4.0 \n", - "e5ua84v 2.0 \n", + " prop-nonzero[indegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.777778 \n", + "e5qv9rj 0.444444 \n", + "e6jhojf 0.777778 \n", + "e6989ii 0.777778 \n", + "e69lgse 0.444444 \n", + "e5kwkg2 0.555556 \n", + "e6mehe7 0.555556 \n", + "e6m0hsd 0.777778 \n", + "e64r385 0.555556 \n", + "e5surbt 0.444444 \n", + "e58gxii 0.555556 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.555556 \n", + "e5borjq 0.444444 \n", + "e64n9zv 0.777778 \n", + "e582ud3 0.222222 \n", + "e64i9cf 0.444444 \n", + "e6q9204 0.666667 \n", + "e5modd7 0.333333 \n", + "e5xhbyd 0.666667 \n", + "e5oaf7h 0.666667 \n", + "e6nir3u 0.777778 \n", + "e6c3xdn 0.777778 \n", + "e5d3zaa 0.555556 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.555556 \n", + "e5syrih 0.555556 \n", + "e5sa2yf 0.888889 \n", + "e6ai7z5 0.666667 \n", + "... ... \n", + "e5smhzk 0.777778 \n", + "e5v91s0 0.333333 \n", + "e6n6di6 0.555556 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.444444 \n", + "e6x5he5 0.444444 \n", + "e6l9uyf 0.333333 \n", + "e57hyr1 0.444444 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.555556 \n", + "e57a6qq 0.666667 \n", + "e5qc7eb 0.444444 \n", + "e6hqt5y 0.222222 \n", + "e5ua84v 0.555556 \n", + "e65m7kq 0.444444 \n", + "e5ggtru 0.444444 \n", + "e5pmmig 0.777778 \n", + "e64l6vq 0.555556 \n", + "e6fjx0d 0.777778 \n", + "e5h3xyy 0.444444 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.555556 \n", + "e5lqoj1 0.444444 \n", + "e5kvch1 0.666667 \n", + "e6srvwm 0.444444 \n", + "e5o65mk 0.777778 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.555556 \n", "\n", - " is-present[dyadic interaction motif over mid-thread] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + " prop-nonzero[indegree over C->c responses] \\\n", + "e6p7yrp 0.7 \n", + "e5ywqyk 0.8 \n", + "e5qv9rj 0.5 \n", + "e6jhojf 0.8 \n", + "e6989ii 0.8 \n", + "e69lgse 0.5 \n", + "e5kwkg2 0.6 \n", + "e6mehe7 0.6 \n", + "e6m0hsd 0.8 \n", + "e64r385 0.6 \n", + "e5surbt 0.5 \n", + "e58gxii 0.6 \n", + "e64vc8y 0.4 \n", + "e57504g 0.6 \n", + "e5borjq 0.5 \n", + "e64n9zv 0.8 \n", + "e582ud3 0.3 \n", + "e64i9cf 0.5 \n", + "e6q9204 0.7 \n", + "e5modd7 0.4 \n", + "e5xhbyd 0.7 \n", + "e5oaf7h 0.7 \n", + "e6nir3u 0.8 \n", + "e6c3xdn 0.8 \n", + "e5d3zaa 0.6 \n", + "e5gnjv9 0.2 \n", + "e69gw2t 0.6 \n", + "e5syrih 0.6 \n", + "e5sa2yf 0.9 \n", + "e6ai7z5 0.7 \n", + "... ... \n", + "e5smhzk 0.8 \n", + "e5v91s0 0.4 \n", + "e6n6di6 0.6 \n", + "e6iqq30 0.4 \n", + "e5bfad7 0.5 \n", + "e6x5he5 0.5 \n", + "e6l9uyf 0.4 \n", + "e57hyr1 0.5 \n", + "e5b8sj7 0.4 \n", + "e6nlep7 0.1 \n", + "e6ltazd 0.6 \n", + "e57a6qq 0.7 \n", + "e5qc7eb 0.5 \n", + "e6hqt5y 0.3 \n", + "e5ua84v 0.6 \n", + "e65m7kq 0.5 \n", + "e5ggtru 0.5 \n", + "e5pmmig 0.8 \n", + "e64l6vq 0.6 \n", + "e6fjx0d 0.8 \n", + "e5h3xyy 0.5 \n", + "e589ri5 0.4 \n", + "e5beuqa 0.6 \n", + "e5lqoj1 0.5 \n", + "e5kvch1 0.7 \n", + "e6srvwm 0.5 \n", + "e5o65mk 0.8 \n", + "e647cm8 0.4 \n", + "e58n526 0.1 \n", + "e69r2kg 0.6 \n", "\n", - " count[dyadic interaction motif over mid-thread] \\\n", - "e5hm9mp 2.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 1.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 3.0 \n", + " prop-nonzero[indegree over c->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.777778 \n", + "e5qv9rj 0.444444 \n", + "e6jhojf 0.777778 \n", + "e6989ii 0.777778 \n", + "e69lgse 0.444444 \n", + "e5kwkg2 0.555556 \n", + "e6mehe7 0.555556 \n", + "e6m0hsd 0.777778 \n", + "e64r385 0.555556 \n", + "e5surbt 0.444444 \n", + "e58gxii 0.555556 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.555556 \n", + "e5borjq 0.444444 \n", + "e64n9zv 0.777778 \n", + "e582ud3 0.222222 \n", + "e64i9cf 0.444444 \n", + "e6q9204 0.666667 \n", + "e5modd7 0.333333 \n", + "e5xhbyd 0.666667 \n", + "e5oaf7h 0.666667 \n", + "e6nir3u 0.777778 \n", + "e6c3xdn 0.777778 \n", + "e5d3zaa 0.555556 \n", + "e5gnjv9 0.111111 \n", + "e69gw2t 0.555556 \n", + "e5syrih 0.555556 \n", + "e5sa2yf 0.888889 \n", + "e6ai7z5 0.666667 \n", + "... ... \n", + "e5smhzk 0.777778 \n", + "e5v91s0 0.333333 \n", + "e6n6di6 0.555556 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.444444 \n", + "e6x5he5 0.444444 \n", + "e6l9uyf 0.333333 \n", + "e57hyr1 0.444444 \n", + "e5b8sj7 0.333333 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 0.555556 \n", + "e57a6qq 0.666667 \n", + "e5qc7eb 0.444444 \n", + "e6hqt5y 0.222222 \n", + "e5ua84v 0.555556 \n", + "e65m7kq 0.444444 \n", + "e5ggtru 0.444444 \n", + "e5pmmig 0.777778 \n", + "e64l6vq 0.555556 \n", + "e6fjx0d 0.777778 \n", + "e5h3xyy 0.444444 \n", + "e589ri5 0.333333 \n", + "e5beuqa 0.555556 \n", + "e5lqoj1 0.444444 \n", + "e5kvch1 0.666667 \n", + "e6srvwm 0.444444 \n", + "e5o65mk 0.777778 \n", + "e647cm8 0.333333 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.555556 \n", "\n", - " is-present[incoming triads over mid-thread] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 1.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 1.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + " prop-nonzero[indegree over c->c responses] \\\n", + "e6p7yrp 0.7 \n", + "e5ywqyk 0.8 \n", + "e5qv9rj 0.5 \n", + "e6jhojf 0.8 \n", + "e6989ii 0.8 \n", + "e69lgse 0.5 \n", + "e5kwkg2 0.6 \n", + "e6mehe7 0.6 \n", + "e6m0hsd 0.8 \n", + "e64r385 0.6 \n", + "e5surbt 0.5 \n", + "e58gxii 0.6 \n", + "e64vc8y 0.4 \n", + "e57504g 0.6 \n", + "e5borjq 0.5 \n", + "e64n9zv 0.8 \n", + "e582ud3 0.3 \n", + "e64i9cf 0.5 \n", + "e6q9204 0.7 \n", + "e5modd7 0.4 \n", + "e5xhbyd 0.7 \n", + "e5oaf7h 0.7 \n", + "e6nir3u 0.8 \n", + "e6c3xdn 0.8 \n", + "e5d3zaa 0.6 \n", + "e5gnjv9 0.2 \n", + "e69gw2t 0.6 \n", + "e5syrih 0.6 \n", + "e5sa2yf 0.9 \n", + "e6ai7z5 0.7 \n", + "... ... \n", + "e5smhzk 0.8 \n", + "e5v91s0 0.4 \n", + "e6n6di6 0.6 \n", + "e6iqq30 0.4 \n", + "e5bfad7 0.5 \n", + "e6x5he5 0.5 \n", + "e6l9uyf 0.4 \n", + "e57hyr1 0.5 \n", + "e5b8sj7 0.4 \n", + "e6nlep7 0.1 \n", + "e6ltazd 0.6 \n", + "e57a6qq 0.7 \n", + "e5qc7eb 0.5 \n", + "e6hqt5y 0.3 \n", + "e5ua84v 0.6 \n", + "e65m7kq 0.5 \n", + "e5ggtru 0.5 \n", + "e5pmmig 0.8 \n", + "e64l6vq 0.6 \n", + "e6fjx0d 0.8 \n", + "e5h3xyy 0.5 \n", + "e589ri5 0.4 \n", + "e5beuqa 0.6 \n", + "e5lqoj1 0.5 \n", + "e5kvch1 0.7 \n", + "e6srvwm 0.5 \n", + "e5o65mk 0.8 \n", + "e647cm8 0.4 \n", + "e58n526 0.1 \n", + "e69r2kg 0.6 \n", "\n", - " count[incoming triads over mid-thread] \\\n", - "e5hm9mp 3.0 \n", - "e5ytz1d 4.0 \n", - "e6ls80j 3.0 \n", - "e5mhgl5 2.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 2.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 6.0 \n", - "e5ua84v 7.0 \n", + " prop-nonzero[outdegree over C->C mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.750000 \n", + "e5qv9rj 0.666667 \n", + "e6jhojf 0.800000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.714286 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 1.000000 \n", + "e64r385 0.857143 \n", + "e5surbt 0.444444 \n", + "e58gxii 0.833333 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.857143 \n", + "e5borjq 0.875000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.500000 \n", + "e6q9204 1.000000 \n", + "e5modd7 0.888889 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.888889 \n", + "e5d3zaa 0.833333 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 0.800000 \n", + "e5syrih 1.000000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.600000 \n", + "... ... \n", + "e5smhzk 0.666667 \n", + "e5v91s0 0.444444 \n", + "e6n6di6 0.875000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.625000 \n", + "e6x5he5 0.200000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.833333 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 0.625000 \n", + "e5ua84v 1.000000 \n", + "e65m7kq 0.625000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.666667 \n", + "e5h3xyy 0.625000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 1.000000 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 0.833333 \n", + "e6srvwm 0.750000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.555556 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.750000 \n", "\n", - " is-present[outgoing triads over mid-thread] \\\n", - "e5hm9mp 1.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", + " prop-nonzero[outdegree over C->C responses] \\\n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 1.000000 \n", + "e5qv9rj 0.900000 \n", + "e6jhojf 1.000000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.000000 \n", + "e5kwkg2 0.900000 \n", + "e6mehe7 1.000000 \n", + "e6m0hsd 1.000000 \n", + "e64r385 0.875000 \n", + "e5surbt 0.900000 \n", + "e58gxii 1.000000 \n", + "e64vc8y 0.900000 \n", + "e57504g 0.875000 \n", + "e5borjq 1.000000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.900000 \n", + "e64i9cf 1.000000 \n", + "e6q9204 0.833333 \n", + "e5modd7 0.900000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.900000 \n", + "e5d3zaa 0.857143 \n", + "e5gnjv9 0.888889 \n", + "e69gw2t 1.000000 \n", + "e5syrih 1.000000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 1.0 \n", - "e5ua84v 1.0 \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.900000 \n", + "e6n6di6 0.888889 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.000000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.888889 \n", + "e57hyr1 0.857143 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 0.900000 \n", + "e6ltazd 0.833333 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 0.888889 \n", + "e5ua84v 0.857143 \n", + "e65m7kq 0.888889 \n", + "e5ggtru 0.888889 \n", + "e5pmmig 0.800000 \n", + "e64l6vq 1.000000 \n", + "e6fjx0d 0.750000 \n", + "e5h3xyy 0.888889 \n", + "e589ri5 0.900000 \n", + "e5beuqa 0.833333 \n", + "e5lqoj1 0.857143 \n", + "e5kvch1 0.857143 \n", + "e6srvwm 0.888889 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 0.900000 \n", + "e69r2kg 1.000000 \n", "\n", - " count[outgoing triads over mid-thread] \n", - "e5hm9mp 3.0 \n", - "e5ytz1d 0.0 \n", - "e6ls80j 1.0 \n", - "e5mhgl5 1.0 \n", - "e6w6fah 0.0 \n", - "... ... \n", - "e65ca8k 0.0 \n", - "e6cdkpy 0.0 \n", - "e5wc4tj 0.0 \n", - "e6ua0sb 2.0 \n", - "e5ua84v 2.0 \n", + " prop-nonzero[outdegree over C->c mid-thread responses] \\\n", + "e6p7yrp 0.666667 \n", + "e5ywqyk 0.750000 \n", + "e5qv9rj 0.666667 \n", + "e6jhojf 0.800000 \n", + "e6989ii 1.000000 \n", + "e69lgse 0.714286 \n", + "e5kwkg2 0.666667 \n", + "e6mehe7 0.500000 \n", + "e6m0hsd 1.000000 \n", + "e64r385 0.857143 \n", + "e5surbt 0.444444 \n", + "e58gxii 0.833333 \n", + "e64vc8y 0.333333 \n", + "e57504g 0.857143 \n", + "e5borjq 0.875000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.333333 \n", + "e64i9cf 0.500000 \n", + "e6q9204 1.000000 \n", + "e5modd7 0.888889 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.888889 \n", + "e5d3zaa 0.833333 \n", + "e5gnjv9 0.125000 \n", + "e69gw2t 0.800000 \n", + "e5syrih 1.000000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 0.600000 \n", + "... ... \n", + "e5smhzk 0.666667 \n", + "e5v91s0 0.444444 \n", + "e6n6di6 0.875000 \n", + "e6iqq30 0.333333 \n", + "e5bfad7 0.625000 \n", + "e6x5he5 0.200000 \n", + "e6l9uyf 0.500000 \n", + "e57hyr1 0.833333 \n", + "e5b8sj7 0.750000 \n", + "e6nlep7 0.000000 \n", + "e6ltazd 1.000000 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 0.625000 \n", + "e5ua84v 1.000000 \n", + "e65m7kq 0.625000 \n", + "e5ggtru 1.000000 \n", + "e5pmmig 1.000000 \n", + "e64l6vq 0.666667 \n", + "e6fjx0d 0.666667 \n", + "e5h3xyy 0.625000 \n", + "e589ri5 0.333333 \n", + "e5beuqa 1.000000 \n", + "e5lqoj1 0.333333 \n", + "e5kvch1 0.833333 \n", + "e6srvwm 0.750000 \n", + "e5o65mk 1.000000 \n", + "e647cm8 0.555556 \n", + "e58n526 0.000000 \n", + "e69r2kg 0.750000 \n", + "\n", + " prop-nonzero[outdegree over C->c responses] \n", + "e6p7yrp 1.000000 \n", + "e5ywqyk 1.000000 \n", + "e5qv9rj 0.900000 \n", + "e6jhojf 1.000000 \n", + "e6989ii 1.000000 \n", + "e69lgse 1.000000 \n", + "e5kwkg2 0.900000 \n", + "e6mehe7 1.000000 \n", + "e6m0hsd 1.000000 \n", + "e64r385 0.875000 \n", + "e5surbt 0.900000 \n", + "e58gxii 1.000000 \n", + "e64vc8y 0.900000 \n", + "e57504g 0.875000 \n", + "e5borjq 1.000000 \n", + "e64n9zv 1.000000 \n", + "e582ud3 0.900000 \n", + "e64i9cf 1.000000 \n", + "e6q9204 0.833333 \n", + "e5modd7 0.900000 \n", + "e5xhbyd 1.000000 \n", + "e5oaf7h 1.000000 \n", + "e6nir3u 1.000000 \n", + "e6c3xdn 0.900000 \n", + "e5d3zaa 0.857143 \n", + "e5gnjv9 0.888889 \n", + "e69gw2t 1.000000 \n", + "e5syrih 1.000000 \n", + "e5sa2yf 1.000000 \n", + "e6ai7z5 1.000000 \n", + "... ... \n", + "e5smhzk 1.000000 \n", + "e5v91s0 0.900000 \n", + "e6n6di6 0.888889 \n", + "e6iqq30 1.000000 \n", + "e5bfad7 1.000000 \n", + "e6x5he5 1.000000 \n", + "e6l9uyf 0.888889 \n", + "e57hyr1 0.857143 \n", + "e5b8sj7 1.000000 \n", + "e6nlep7 0.900000 \n", + "e6ltazd 0.833333 \n", + "e57a6qq 1.000000 \n", + "e5qc7eb 1.000000 \n", + "e6hqt5y 0.888889 \n", + "e5ua84v 0.857143 \n", + "e65m7kq 0.888889 \n", + "e5ggtru 0.888889 \n", + "e5pmmig 0.800000 \n", + "e64l6vq 1.000000 \n", + "e6fjx0d 0.750000 \n", + "e5h3xyy 0.888889 \n", + "e589ri5 0.900000 \n", + "e5beuqa 0.833333 \n", + "e5lqoj1 0.857143 \n", + "e5kvch1 0.857143 \n", + "e6srvwm 0.888889 \n", + "e5o65mk 1.000000 \n", + "e647cm8 1.000000 \n", + "e58n526 0.900000 \n", + "e69r2kg 1.000000 \n", "\n", "[10000 rows x 140 columns]" ] }, - "execution_count": 21, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "threads_corpus.get_vector_matrix(\"hyperconvo\").to_dataframe()" + "threads_corpus.get_vector_matrix('hyperconvo').to_dataframe()" ] }, { @@ -4752,16 +18766,16 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 28, "metadata": {}, "outputs": [], "source": [ - "df = threads_corpus.get_vector_matrix(\"hyperconvo\").to_dataframe()" + "df = threads_corpus.get_vector_matrix('hyperconvo').to_dataframe()" ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 40, "metadata": {}, "outputs": [], "source": [ @@ -4771,7 +18785,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 43, "metadata": {}, "outputs": [], "source": [ @@ -4780,25 +18794,25 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 44, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['max[indegree over c->c responses]',\n", - " 'argmax[indegree over c->c responses]',\n", - " 'norm.max[indegree over c->c responses]',\n", - " '2nd-largest[indegree over c->c responses]',\n", + "['2nd-argmax[indegree over C->C mid-thread responses]',\n", + " '2nd-argmax[indegree over C->C responses]',\n", + " '2nd-argmax[indegree over C->c mid-thread responses]',\n", + " '2nd-argmax[indegree over C->c responses]',\n", + " '2nd-argmax[indegree over c->c mid-thread responses]',\n", " '2nd-argmax[indegree over c->c responses]',\n", - " 'norm.2nd-largest[indegree over c->c responses]',\n", - " 'mean[indegree over c->c responses]',\n", - " 'mean-nonzero[indegree over c->c responses]',\n", - " 'prop-nonzero[indegree over c->c responses]',\n", - " 'prop-multiple[indegree over c->c responses]']" + " '2nd-argmax[outdegree over C->C mid-thread responses]',\n", + " '2nd-argmax[outdegree over C->C responses]',\n", + " '2nd-argmax[outdegree over C->c mid-thread responses]',\n", + " '2nd-argmax[outdegree over C->c responses]']" ] }, - "execution_count": 25, + "execution_count": 44, "metadata": {}, "output_type": "execute_result" } @@ -4816,38 +18830,38 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 45, "metadata": {}, "outputs": [], "source": [ - "C_C_indegree_feats = [x for x in feat_names if \"indegree over C->C responses\" in x]\n", - "C_C_mid_outdegree_feats = [x for x in feat_names if \"outdegree over C->C mid-thread responses\" in x]\n", - "motif_count_feats = [x for x in feat_names if (\"count\" in x) and (\"mid\" not in x)]" + "C_C_indegree_feats = [x for x in feat_names if 'indegree over C->C responses' in x]\n", + "C_C_mid_outdegree_feats = [x for x in feat_names if 'outdegree over C->C mid-thread responses' in x]\n", + "motif_count_feats = [x for x in feat_names if ('count' in x) and ('mid' not in x)]" ] }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 48, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['max[indegree over C->C responses]',\n", - " 'argmax[indegree over C->C responses]',\n", - " 'norm.max[indegree over C->C responses]',\n", + "['2nd-argmax[indegree over C->C responses]',\n", + " '2nd-largest / max[indegree over C->C responses]',\n", " '2nd-largest[indegree over C->C responses]',\n", - " '2nd-argmax[indegree over C->C responses]',\n", - " 'norm.2nd-largest[indegree over C->C responses]',\n", - " 'mean[indegree over C->C responses]',\n", + " 'argmax[indegree over C->C responses]',\n", + " 'entropy[indegree over C->C responses]',\n", + " 'max[indegree over C->C responses]',\n", " 'mean-nonzero[indegree over C->C responses]',\n", - " 'prop-nonzero[indegree over C->C responses]',\n", + " 'mean[indegree over C->C responses]',\n", + " 'norm.2nd-largest[indegree over C->C responses]',\n", + " 'norm.max[indegree over C->C responses]',\n", " 'prop-multiple[indegree over C->C responses]',\n", - " 'entropy[indegree over C->C responses]',\n", - " '2nd-largest / max[indegree over C->C responses]']" + " 'prop-nonzero[indegree over C->C responses]']" ] }, - "execution_count": 27, + "execution_count": 48, "metadata": {}, "output_type": "execute_result" } @@ -4858,20 +18872,20 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 49, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['count[reciprocity motif]',\n", + "['count[dyadic interaction motif]',\n", " 'count[external reciprocity motif]',\n", - " 'count[dyadic interaction motif]',\n", " 'count[incoming triads]',\n", - " 'count[outgoing triads]']" + " 'count[outgoing triads]',\n", + " 'count[reciprocity motif]']" ] }, - "execution_count": 28, + "execution_count": 49, "metadata": {}, "output_type": "execute_result" } @@ -4893,7 +18907,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 51, "metadata": { "scrolled": true }, @@ -5004,7 +19018,7 @@ "prop-nonzero[indegree over C->C responses] 0.666667 1.000000" ] }, - "execution_count": 29, + "execution_count": 51, "metadata": {}, "output_type": "execute_result" } @@ -5022,7 +19036,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 52, "metadata": {}, "outputs": [ { @@ -5131,7 +19145,7 @@ "prop-nonzero[outdegree over C->C mid-thread res... 0.833333 1.000000" ] }, - "execution_count": 30, + "execution_count": 52, "metadata": {}, "output_type": "execute_result" } @@ -5149,7 +19163,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 53, "metadata": {}, "outputs": [ { @@ -5216,7 +19230,7 @@ "count[reciprocity motif] 3.0 8.0" ] }, - "execution_count": 31, + "execution_count": 53, "metadata": {}, "output_type": "execute_result" } @@ -5234,7 +19248,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 54, "metadata": {}, "outputs": [], "source": [ @@ -5247,7 +19261,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 55, "metadata": {}, "outputs": [], "source": [ @@ -5257,20 +19271,20 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 56, "metadata": {}, "outputs": [], "source": [ - "svd = TruncatedSVD(\n", - " n_components=7, algorithm=\"arpack\"\n", - ") # deals with an issue where the randomized alg hangs\n", + "svd = TruncatedSVD(n_components=7, algorithm='arpack') # deals with an issue where the randomized alg hangs\n", "svd.fit(feat_mtx)\n", - "U, s, V = svd.transform(feat_mtx) / svd.singular_values_, svd.singular_values_, svd.components_.T" + "U, s, V = svd.transform(feat_mtx) / svd.singular_values_, \\\n", + " svd.singular_values_, \\\n", + " svd.components_.T" ] }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 58, "metadata": {}, "outputs": [], "source": [ @@ -5289,7 +19303,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 59, "metadata": {}, "outputs": [ { @@ -5368,7 +19382,7 @@ "6 -0.605281 0.017527" ] }, - "execution_count": 36, + "execution_count": 59, "metadata": {}, "output_type": "execute_result" } @@ -5386,7 +19400,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 60, "metadata": {}, "outputs": [ { @@ -5418,17 +19432,37 @@ " 5\n", " 6\n", " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " 2nd-argmax[indegree over C->C responses]\n", + " 0.139340\n", + " 0.113322\n", + " 0.106261\n", + " -0.316196\n", + " 0.261848\n", + " 0.887307\n", + " -0.024356\n", + " \n", + " \n", + " 2nd-largest / max[indegree over C->C responses]\n", + " -0.189063\n", + " -0.433123\n", + " -0.472973\n", + " -0.252028\n", + " 0.338874\n", + " 0.598865\n", + " 0.126351\n", + " \n", " \n", - " max[indegree over C->C responses]\n", - " 0.574040\n", - " 0.331249\n", - " 0.451891\n", - " 0.298591\n", - " -0.098847\n", - " -0.345627\n", - " -0.371700\n", + " 2nd-largest[indegree over C->C responses]\n", + " 0.264775\n", + " -0.595095\n", + " -0.177363\n", + " -0.032607\n", + " 0.369844\n", + " 0.636419\n", + " 0.037712\n", " \n", " \n", " argmax[indegree over C->C responses]\n", @@ -5441,44 +19475,34 @@ " 0.001900\n", " \n", " \n", - " norm.max[indegree over C->C responses]\n", - " 0.136265\n", - " 0.633611\n", - " -0.054505\n", - " 0.660700\n", - " 0.089491\n", - " -0.360409\n", - " -0.050698\n", - " \n", - " \n", - " 2nd-largest[indegree over C->C responses]\n", - " 0.264775\n", - " -0.595095\n", - " -0.177363\n", - " -0.032607\n", - " 0.369844\n", - " 0.636419\n", - " 0.037712\n", + " entropy[indegree over C->C responses]\n", + " -0.001602\n", + " -0.493754\n", + " 0.383985\n", + " -0.657503\n", + " -0.376240\n", + " 0.178189\n", + " 0.056053\n", " \n", " \n", - " 2nd-argmax[indegree over C->C responses]\n", - " 0.139340\n", - " 0.113322\n", - " 0.106261\n", - " -0.316196\n", - " 0.261848\n", - " 0.887307\n", - " -0.024356\n", + " max[indegree over C->C responses]\n", + " 0.574040\n", + " 0.331249\n", + " 0.451891\n", + " 0.298591\n", + " -0.098847\n", + " -0.345627\n", + " -0.371700\n", " \n", " \n", - " norm.2nd-largest[indegree over C->C responses]\n", - " -0.206815\n", - " -0.210187\n", - " -0.667660\n", - " 0.126509\n", - " 0.418324\n", - " 0.523214\n", - " 0.050272\n", + " mean-nonzero[indegree over C->C responses]\n", + " 0.561474\n", + " 0.246293\n", + " 0.073209\n", + " 0.492266\n", + " 0.272666\n", + " -0.249315\n", + " -0.489793\n", " \n", " \n", " mean[indegree over C->C responses]\n", @@ -5491,24 +19515,24 @@ " -0.522766\n", " \n", " \n", - " mean-nonzero[indegree over C->C responses]\n", - " 0.561474\n", - " 0.246293\n", - " 0.073209\n", - " 0.492266\n", - " 0.272666\n", - " -0.249315\n", - " -0.489793\n", + " norm.2nd-largest[indegree over C->C responses]\n", + " -0.206815\n", + " -0.210187\n", + " -0.667660\n", + " 0.126509\n", + " 0.418324\n", + " 0.523214\n", + " 0.050272\n", " \n", " \n", - " prop-nonzero[indegree over C->C responses]\n", - " -0.945359\n", - " -0.101535\n", - " 0.238357\n", - " -0.078078\n", - " -0.107471\n", - " 0.146344\n", - " -0.010486\n", + " norm.max[indegree over C->C responses]\n", + " 0.136265\n", + " 0.633611\n", + " -0.054505\n", + " 0.660700\n", + " 0.089491\n", + " -0.360409\n", + " -0.050698\n", " \n", " \n", " prop-multiple[indegree over C->C responses]\n", @@ -5521,24 +19545,14 @@ " -0.415719\n", " \n", " \n", - " entropy[indegree over C->C responses]\n", - " -0.001602\n", - " -0.493754\n", - " 0.383985\n", - " -0.657503\n", - " -0.376240\n", - " 0.178189\n", - " 0.056053\n", - " \n", - " \n", - " 2nd-largest / max[indegree over C->C responses]\n", - " -0.189063\n", - " -0.433123\n", - " -0.472973\n", - " -0.252028\n", - " 0.338874\n", - " 0.598865\n", - " 0.126351\n", + " prop-nonzero[indegree over C->C responses]\n", + " -0.945359\n", + " -0.101535\n", + " 0.238357\n", + " -0.078078\n", + " -0.107471\n", + " 0.146344\n", + " -0.010486\n", " \n", " \n", "\n", @@ -5546,49 +19560,49 @@ ], "text/plain": [ " 0 1 2 \\\n", - "max[indegree over C->C responses] 0.574040 0.331249 0.451891 \n", - "argmax[indegree over C->C responses] 0.021994 -0.548696 -0.072024 \n", - "norm.max[indegree over C->C responses] 0.136265 0.633611 -0.054505 \n", - "2nd-largest[indegree over C->C responses] 0.264775 -0.595095 -0.177363 \n", "2nd-argmax[indegree over C->C responses] 0.139340 0.113322 0.106261 \n", - "norm.2nd-largest[indegree over C->C responses] -0.206815 -0.210187 -0.667660 \n", - "mean[indegree over C->C responses] -0.303707 -0.230767 0.639219 \n", + "2nd-largest / max[indegree over C->C responses] -0.189063 -0.433123 -0.472973 \n", + "2nd-largest[indegree over C->C responses] 0.264775 -0.595095 -0.177363 \n", + "argmax[indegree over C->C responses] 0.021994 -0.548696 -0.072024 \n", + "entropy[indegree over C->C responses] -0.001602 -0.493754 0.383985 \n", + "max[indegree over C->C responses] 0.574040 0.331249 0.451891 \n", "mean-nonzero[indegree over C->C responses] 0.561474 0.246293 0.073209 \n", - "prop-nonzero[indegree over C->C responses] -0.945359 -0.101535 0.238357 \n", + "mean[indegree over C->C responses] -0.303707 -0.230767 0.639219 \n", + "norm.2nd-largest[indegree over C->C responses] -0.206815 -0.210187 -0.667660 \n", + "norm.max[indegree over C->C responses] 0.136265 0.633611 -0.054505 \n", "prop-multiple[indegree over C->C responses] 0.355365 -0.438412 -0.063833 \n", - "entropy[indegree over C->C responses] -0.001602 -0.493754 0.383985 \n", - "2nd-largest / max[indegree over C->C responses] -0.189063 -0.433123 -0.472973 \n", + "prop-nonzero[indegree over C->C responses] -0.945359 -0.101535 0.238357 \n", "\n", " 3 4 5 \\\n", - "max[indegree over C->C responses] 0.298591 -0.098847 -0.345627 \n", - "argmax[indegree over C->C responses] 0.348774 -0.487366 -0.578004 \n", - "norm.max[indegree over C->C responses] 0.660700 0.089491 -0.360409 \n", - "2nd-largest[indegree over C->C responses] -0.032607 0.369844 0.636419 \n", "2nd-argmax[indegree over C->C responses] -0.316196 0.261848 0.887307 \n", - "norm.2nd-largest[indegree over C->C responses] 0.126509 0.418324 0.523214 \n", - "mean[indegree over C->C responses] 0.266533 0.317526 -0.027597 \n", + "2nd-largest / max[indegree over C->C responses] -0.252028 0.338874 0.598865 \n", + "2nd-largest[indegree over C->C responses] -0.032607 0.369844 0.636419 \n", + "argmax[indegree over C->C responses] 0.348774 -0.487366 -0.578004 \n", + "entropy[indegree over C->C responses] -0.657503 -0.376240 0.178189 \n", + "max[indegree over C->C responses] 0.298591 -0.098847 -0.345627 \n", "mean-nonzero[indegree over C->C responses] 0.492266 0.272666 -0.249315 \n", - "prop-nonzero[indegree over C->C responses] -0.078078 -0.107471 0.146344 \n", + "mean[indegree over C->C responses] 0.266533 0.317526 -0.027597 \n", + "norm.2nd-largest[indegree over C->C responses] 0.126509 0.418324 0.523214 \n", + "norm.max[indegree over C->C responses] 0.660700 0.089491 -0.360409 \n", "prop-multiple[indegree over C->C responses] 0.156168 0.692934 -0.008221 \n", - "entropy[indegree over C->C responses] -0.657503 -0.376240 0.178189 \n", - "2nd-largest / max[indegree over C->C responses] -0.252028 0.338874 0.598865 \n", + "prop-nonzero[indegree over C->C responses] -0.078078 -0.107471 0.146344 \n", "\n", " 6 \n", - "max[indegree over C->C responses] -0.371700 \n", - "argmax[indegree over C->C responses] 0.001900 \n", - "norm.max[indegree over C->C responses] -0.050698 \n", - "2nd-largest[indegree over C->C responses] 0.037712 \n", "2nd-argmax[indegree over C->C responses] -0.024356 \n", - "norm.2nd-largest[indegree over C->C responses] 0.050272 \n", - "mean[indegree over C->C responses] -0.522766 \n", + "2nd-largest / max[indegree over C->C responses] 0.126351 \n", + "2nd-largest[indegree over C->C responses] 0.037712 \n", + "argmax[indegree over C->C responses] 0.001900 \n", + "entropy[indegree over C->C responses] 0.056053 \n", + "max[indegree over C->C responses] -0.371700 \n", "mean-nonzero[indegree over C->C responses] -0.489793 \n", - "prop-nonzero[indegree over C->C responses] -0.010486 \n", + "mean[indegree over C->C responses] -0.522766 \n", + "norm.2nd-largest[indegree over C->C responses] 0.050272 \n", + "norm.max[indegree over C->C responses] -0.050698 \n", "prop-multiple[indegree over C->C responses] -0.415719 \n", - "entropy[indegree over C->C responses] 0.056053 \n", - "2nd-largest / max[indegree over C->C responses] 0.126351 " + "prop-nonzero[indegree over C->C responses] -0.010486 " ] }, - "execution_count": 37, + "execution_count": 60, "metadata": {}, "output_type": "execute_result" } @@ -5606,18 +19620,16 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 62, "metadata": {}, "outputs": [], "source": [ - "subreddits = [\n", - " convo.meta[\"original_convo_meta\"][\"subreddit\"] for convo in threads_corpus.iter_conversations()\n", - "]" + "subreddits = [convo.meta['original_convo_meta']['subreddit'] for convo in threads_corpus.iter_conversations()]" ] }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 63, "metadata": {}, "outputs": [ { @@ -5626,7 +19638,7 @@ "10000" ] }, - "execution_count": 39, + "execution_count": 63, "metadata": {}, "output_type": "execute_result" } @@ -5637,28 +19649,29 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 64, "metadata": {}, "outputs": [], "source": [ - "U_df[\"subreddit\"] = subreddits" + "U_df['subreddit'] = subreddits" ] }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 65, "metadata": {}, "outputs": [], "source": [ - "subreddit_means = U_df.groupby(\"subreddit\").mean()\n", + "subreddit_means = U_df.groupby('subreddit').mean()\n", "subreddit_df = pd.DataFrame(\n", - " data=Normalizer().fit_transform(subreddit_means.values), index=subreddit_means.index\n", - ")" + " data=Normalizer().fit_transform(subreddit_means.values),\n", + " index = subreddit_means.index\n", + " )" ] }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 66, "metadata": {}, "outputs": [ { @@ -5774,7 +19787,7 @@ "AskReddit -0.316899 " ] }, - "execution_count": 42, + "execution_count": 66, "metadata": {}, "output_type": "execute_result" } @@ -5792,37 +19805,40 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 67, "metadata": {}, "outputs": [], "source": [ "tsne = TSNE(random_state=2018)\n", - "tsne_df = pd.DataFrame(data=tsne.fit_transform(subreddit_df.values), index=subreddit_df.index)" + "tsne_df = pd.DataFrame(data=tsne.fit_transform(subreddit_df.values),\n", + " index=subreddit_df.index)" ] }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 68, "metadata": { "scrolled": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA4sAAAMtCAYAAAA2VuSQAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdd1xX1f/A8deHvacKqCgooOAEceDIkQMHOdNQE3JUjtyj5UDLva2vlSmYmmam5sitOHAPTAUXDixxoaIoQ+D8/uDHzY+Ao9y+n48Hj/zce+65594P0OfNOff91imlFEIIIYQQQgghxH0MXvQAhBBCCCGEEEK8fCRYFEIIIYQQQgiRiwSLQgghhBBCCCFykWBRCCGEEEIIIUQuEiwKIYQQQgghhMhFgkUhhBBCCCGEELlIsCiEEEIIIYQQIhejFz2Ah8nKyuLixYtYW1uj0+le9HCEEEIIIYQQL4hSitu3b1O4cGEMDGTO63l4qYPFixcv4urq+qKHIYQQQgghhHhJXLhwgaJFi77oYbwRXupg0draGsj+hrCxsXnBoxFCCCGEEEK8KLdu3cLV1VWLEcSz91IHizlLT21sbCRYFEIIIYQQQsjjac+RLPYVQgghhBBCCJGLBItCCCGEEEIIIXKRYFEIIYQQQgghRC4SLAohhBBCCCGEyEWCRSGEEEIIIYQQuUiwKIQQQgghhBAiFwkWhRBCCCGEEELkIsGiEEIIIYQQQohcJFgUQgghhBBCCJGLBItCCCGEEEIIIXKRYFEIIYQQQgghRC4SLAohhBBCCCGEyEWCRSGEEEIIIYQQuUiwKIQQQgghhBAiFwkWhRBCCCGEEELkIsGiEEIIIYQQQohcJFgUQgghhBBCCJGLBItCCCGEEEIIIXKRYFEIIYQQQgghRC4SLAohhBBCCCGEyEWCRSGEEEIIIYQQuUiwKIQQQgghhBAiFwkWhRBCPDWhoaG0aNHiqfer0+lYvnz5U+9XCCGEEPkzetEDEEIIIR4lISEBe3v7Fz0MIYQQ4o0iwaIQQoiXVnp6OiYmJjg7O7/ooQghhBBvHFmGKoQQ4oktWbKEcuXKYW5ujqOjI/Xr1+fOnTu52u3bt4+CBQsybtw4OnfuTLNmzfT237t3j0KFCjF79mwA6tSpQ69evejbty8FChSgUaNGgP4y1HPnzqHT6Vi8eDG1atXC3NycypUrc/LkSfbt24e/vz9WVlY0btyYq1evPtsbIYQQQrzGJFgUQgjxRBISEggODqZz587ExsYSGRlJq1atUErptdu8eTMNGjTg66+/ZsiQIXTt2pW1a9eSkJCgtVm1ahV3796lXbt22ra5c+diYmJCVFQU3333Xb7jGD58OF9++SUHDx7EyMiI9u3bM3jwYKZNm8b27ds5ffo0w4YNe/o3QAghhHhDyDJUIYQQjyUzS7H37HV27T1MRkYGzVu0xM3NDYBy5crptV22bBmdOnXixx9/1ALB6tWrU6pUKebNm8fgwYMBCA8P591338XKyko71tPTk/Hjxz9yPAMHDtRmHvv06UNwcDCbNm2iRo0aAHTp0oWIiIj/etlCCCHEG0tmFoUQQjzS2qMJ1By3meBZu5l2KB2z4hXw8i5DrYZBzJo1ixs3bmht9+zZw7vvvsu8efP0ZgwBunbtSnh4OACXL19mzZo1dO7cWa9NpUqVHmtM5cuX1/7t5OQE6AetTk5OXLly5ckuVAghhBAaCRaFEEI81NqjCXSff5CEpFQAdAaGFGr3FQXbjODIHWtGT5hCqVKlOHv2LAAlS5akdOnSzJkzh3v37un11alTJ86cOcOuXbuYP38+7u7u1KpVS6+NpaXlY43L2NhY+7dOp8tzW1ZW1pNfsBBCCCEACRaFEEI8RGaWImxlDOqB7TqdDtOiPtjX6oBTyFRMTExYtmwZAAUKFGDz5s2cPn2atm3b6gWMjo6OtGjRgvDwcCIiIvjggw+e49UIIYQQ4klIsCiEECJfe89e12YUc6RdPEHSrsWkJZzi3q0rxO3bwpUrV/H29tbaFCpUiM2bN3P8+HGCg4PJyMjQ9nXt2pW5c+cSGxtLSEjIc7sWIYQQQjwZCRaFEELk68rt1FzbDEwsSL1wlCtLRvD3Dx9xc/s8QvsPpXHjxnrtnJ2d2bx5M0eOHKFDhw5kZmYCUL9+fVxcXGjUqBGFCxd+LtchhBBCiCenUw/mOn+J3Lp1C1tbW5KSkrCxsXnRwxFCiDfOrrhEgmftfmS7hd2qEVDS8bH6TE5OpkiRIoSHh9OqVav/OkQhhBBvCIkNnj+ZWRRCCJGvKu4OuNiaoctnvw5wsTWjirvDI/vKysriypUrjBo1Cjs7O955552nOlYhhBBCPF0SLAohhMiXoYGO4UE+ALkCxpzXw4N8MDTIL5z8R3x8PE5OTvz888/MmTMHIyMp9SuEEEK8zOT/1EIIIR4qsKwLMzv6EbYyRi/ZjbOtGcODfAgs6/JY/bi5ufESP/kghBBCiAdIsCiEEOKRAsu60MDHmb1nr3PldiqFrLOXnj7OjKIQQgghXk0SLAohhHgshga6x05iI4QQQohXnzyzKIQQQgghhBAiFwkWhRBCCCGEEELkIsGiEEIIIYQQQohcJFgUQgghhBBCCJGLBItCCCGEEEIIIXKRYFEIIYQQQgghRC4SLAohhHioqKgoypUrh7GxMS1atHjRwxFCCCHEcyLBohBCCE2dOnXo27ev3rb+/ftTsWJFzp49S0REBIcPHyY4OBhXV1fMzc3x9vZm2rRpL2bAQgghhHhmjF70AIQQQrzc4uLi+PjjjylatCgAS5cupVChQsyfPx9XV1d27tzJhx9+iKGhIb169XrBoxVCCCHE06JTSqkXPYj83Lp1C1tbW5KSkrCxsXnRwxFCiNdaaGgoc+fOfWib8PBwQkNDc23v2bMnsbGxbN68+RmNTgghxJtOYoPnT5ahCiGEAGDatGkEBATQrVs3EhIS+Ouvv/jrr7+wsbFh6tSpJCQk0K5duzyPTUpKwsHB4TmPWAghhBDPkixDFUIIAYCtrS0mJiZYWFjg7OysbdfpdNja2uptu9/OnTv55ZdfWL169fMaqhBCCCGeAwkWhRDiDZeZpdh79jpXbqdyK+UeT/J0wtGjR2nevDnDhw+nYcOGz3CUQgghhHjeJFgUQog32NqjCYStjCEhKRWASwm3SNj/F42PJhBY1uWhx8bExPD222/z4Ycf8uWXXz6P4QohhBDiOZJnFoUQ4g219mgC3ecf1AJFAJ2hMXdS0+k+/yBrjybke+yxY8eoW7cuISEhfP31189juEIIIYR4ziRYFEKIN1BmliJsZQwPLjg1si1EWsIJ7iVd5stFu7iXkZnr2KNHj1K3bl0aNmxI//79uXTpEpcuXeLq1avPZ/BCCCGEeC4kWBRCiDfQ3rPX9WYUc9hUaQU6Ay7+2IMDX7dmZdSfudosWbKEq1evMn/+fFxcXLSvypUrP4+hCyGEEOI5kTqLQgjxBvo9+m/6LIp+ZLtp71WkecUiz35AQgghxCNIbPD8PfOZxb///puOHTvi6OiIubk55cqVY//+/c/6tEIIIR6ikLXZU20nhBBCiNfPM82GeuPGDWrUqEHdunVZs2YNBQsW5NSpU9jb2z/L0wohhHiEKu4OuNiacSkpNddziwA6wNnWjCruDs97aEIIIYR4STzTYHHcuHG4uroSHh6ubXN3d3+WpxRCCPEYDA10DA/yofv8g+hAL2DU/f9/hwf5YGigy+NoIYQQQrwJnuky1BUrVuDv78+7775LoUKF8PX1ZdasWfm2T0tL49atW3pfQgghno3Asi7M7OiHs63+UlNnWzNmdvR7ZJ1FIYQQQrzenmmCGzOz7A8g/fv3591332Xfvn306dOH7777jpCQkFztR4wYQVhYWK7t8hCrEEI8O5lZir1nr3PldiqFrLOXnsqMohBCiJeNJLh5/p5psGhiYoK/vz87d+7UtvXu3Zt9+/axa9euXO3T0tJIS0vTXt+6dQtXV1f5hhBCCCGEEOINJ8Hi8/dMl6G6uLjg4+Ojt83b25v4+Pg825uammJjY6P3JYQQQgghhBDi+XumwWKNGjU4ceKE3raTJ09SvHjxZ3laIYQQL1idOnXo27fvix6GEEIIIf6DZxos9uvXj927dzN69GhOnz7Nzz//zA8//EDPnj2f5WmFEEK85JRSZGRkvOhhCCGEEOIhnmmwWLlyZZYtW8bChQspW7Yso0aNYurUqXTo0OFZnlYIIcQLFBoaytatW5k2bRo6nQ6dTkdERAQ6nY41a9ZQqVIlTE1N2bFjB1lZWYwZMwZ3d3fMzc2pUKECS5Ys0evv6NGjNG7cGCsrK5ycnHj//fe5du2atn/JkiWUK1cOc3NzHB0dqV+/Pnfu3Hnely2EEEK8dp5pgpv/Sh5iFUKIV09SUhKNGzembNmyjBw5EoBjx45Rv359ypcvz8SJEylRogT29vbMnDmT+fPnM3XqVDw9Pdm2bRsff/wx69ato3bt2ty8eRMvLy+6du1Kp06dSElJYciQIWRkZLB582YSEhIoVqwY48ePp2XLlty+fZvt27fTqVMnrKysXvCdEEII8TRJbPD8Gb3oAQghhHj1PVh+w9jEBAsLC5ydnQE4fvw4ACNHjqRBgwZAdgbs0aNHs3HjRgICAgAoUaIEO3bs4Pvvv6d27dp88803+Pr6Mnr0aO1cc+bMwdXVlZMnT5KcnExGRgatWrXSnocvV67c87x0IYQQ4rUlwaIQQoj/ZO3RBMJWxpCQlKptux5/A3vX3EtB/f39tX+fPn2au3fvasFjjvT0dHx9fQE4fPgwW7ZsyXOWMC4ujoYNG/L2229Trlw5GjVqRMOGDWnTpg329vZP6/KEEEKIN5YEi0IIIf61tUcT6D7/IA8+z5CekcXm2CusPZpAYFkXbbulpaX27+TkZABWr15NkSJF9I43NTXV2gQFBTFu3Lhc53ZxccHQ0JANGzawc+dO1q9fz4wZM/jiiy/Ys2cP7u7uT+kqhRBCiDeTBItCCCH+lcwsRdjKmFyBIoDO0BhUFmErY2jg45zn8T4+PpiamhIfH0/t2rXzbOPn58dvv/2Gm5sbRkZ5/y9Lp9NRo0YNatSowbBhwyhevDjLli2jf//+//bShBBCCIEEi0IIIf6lvWev6y09vZ+RbSHSEk5wIf486w+cwjQrK1cba2trBg4cSL9+/cjKyqJmzZokJSURFRWFjY0NISEh9OzZk1mzZhEcHMzgwYNxcHDg9OnTLFq0iB9//JH9+/ezadMmGjZsSKFChdizZw9Xr17F29v7WV++EEII8dp7pqUzhBBCvL6u3M47UASwqdIKdAZc/LEHTaqUIj4+Ps92o0aNYujQoYwZMwZvb28CAwNZvXq1toS0cOHCREVFkZmZScOGDSlXrhx9+/bFzs4OAwMDbGxs2LZtG02aNMHLy4svv/ySSZMm0bhx42dyzUIIIcSbREpnCCGE+Fd2xSUSPGv3I9st7FaNgJKOz2FEQgghXmcSGzx/MrMohBDiX6ni7oCLrRm6fPbrABdbM6q4OzzPYQkhhBDiKZFgUQghxL9iaKBjeJAPQK6AMef18CAfDA3yCyeFEEII8TKTYFEI8cbQ6XQsX74cgHPnzqHT6YiOjn6hY3ocL/NYA8u6MLOjH862ZnrbnW3NmNnRT69shhBCCCFeLZINVQjxWgkNDeXmzZtaUHi/hISE51asXafTsWzZMlq0aPGf+3J1dSUhIYECBQr894E9A4FlXWjg48zes9e5cjuVQtbZS09lRlEIIYR4tUmwKIR4Yzg7513v72lKT0/HxMTkqff3PMb+Xxga6CSJjRBCCPGakWWoQog3xv3LUHMcP36c6tWrY2ZmRtmyZdm6dave/qNHj9K4cWOsrKxwcnLi/fff59q1a9r+OnXq0KtXL/r27UuBAgVo1KgRbm5uALRs2RKdTqe9jouLo3nz5jg5OWFlZUXlypXZuHGj3vnc3NwYNWoUnTp1wsbGhg8//DDXMtTMzEy6dOmCu7s75ubmlCpVimnTpun1ExoaSosWLZg4cSIuLi44OjrSs2dP7t27999vpBBCCCHeCBIsCiHeaIMGDWLAgAEcOnSIgIAAgoKCSExMBODmzZvUq1cPX19f9u/fz9q1a7l8+TJt27bV62Pu3LmYmJgQFRXFd999x759+wAIDw8nISFBe52cnEyTJk3YtGkThw4dIjAwkKCgoFw1CCdOnEiFChU4dOgQQ4cOzTXmrKwsihYtyq+//kpMTAzDhg3j888/Z/HixXrttmzZQlxcHFu2bGHu3LlEREQQERHxtG6dEEIIIV5zsgxVCPFKy8xSes/KPWnl2F69etG6dWsAZs6cydq1a5k9ezaDBw/mm2++wdfXl9GjR2vt58yZg6urKydPnsTLywsAT09Pxo8fn6tvOzs7veWjFSpUoEKFCtrrUaNGsWzZMlasWEGvXr207fXq1WPAgAHa63Pnzun1a2xsTFhYmPba3d2dXbt2sXjxYr1A1t7enm+++QZDQ0NKly5N06ZN2bRpE926dXuymySEEEKIN5IEi0KIV9baowmErYwhISlV23bnaAKeto+fWCUgIED7t5GREf7+/sTGxgJw+PBhtmzZgpWVVa7j4uLitGCxUqVKj3Wu5ORkRowYwerVq0lISCAjI4OUlJRcM4v+/v6P7Ovbb79lzpw5xMfHk5KSQnp6OhUrVtRrU6ZMGQwNDbXXLi4uHDly5LHGKoQQQgghwaIQ4pW09mgC3ecf5MGJxJT0TKL/usPaown/uWxDcnIyQUFBjBs3Ltc+F5d/+ra0tHys/gYOHMiGDRuYOHEiHh4emJub06ZNG9LT0/XaPaq/RYsWMXDgQCZNmkRAQADW1tZMmDCBPXv26LUzNjbWe63T6cjKynqssQohhBBCSLAohHjlZGYpwlbG5AoU7xe2MoYGPs6PLN+we/du3nrrLQAyMjI4cOCAtiTUz8+P3377DTc3N4yMnuzXpbGxMZmZmXrboqKiCA0NpWXLlkB2MPrgEtPHERUVRfXq1enRo4e2LS4u7on7EUIIIYR4GElwI4R45ew9e11v6emDstLucP5kDPNXbyU6Opro6GguXLiQZ9tvv/2WZcuWcfz4cXr27MmNGzfo3LkzAD179uT69esEBwezb98+4uLiWLduHR988EGuQPBBbm5ubNq0iUuXLnHjxg0g+9nGpUuXEh0dzeHDh2nfvv2/munz9PRk//79rFu3jpMnTzJ06FAtiY4QQgghxNMiwaIQ4pVz5Xb+gSJAWvwREiJ6E/pOXXx9ffH19dVLCHO/sWPHMnbsWCpUqMCOHTtYsWIFBQoUAKBw4cJERUWRmZlJw4YNKVeuHH379sXOzg4Dg4f/+pw0aRIbNmzA1dUVX19fACZPnoy9vT3Vq1cnKCiIRo0a4efn98TX/9FHH9GqVSvatWtH1apVSUxM1JtlFEIIIYR4GnRKPWnuwOfn1q1b2NrakpSUhI2NzYsejhDiJbErLpHgWbsf2W5ht2pSKF4IIYR4TUhs8PzJzKIQ4pVTxd0BF1sz8nsaUQe42JpRxd3heQ5LCCGEEOK1IsGiEOKVY2igY3iQD0CugDHn9fAgn0cmtxFCCCGEEPmTYFEI8UoKLOvCzI5+ONua6W13tjVjZke//1w2QwghhBDiTSelM4QQr6zAsi408HFm79nrXLmdSiHr7KWnMqP4chgxYgTLly8nOjr6sY+5dOkS77//Pjt37sTY2JibN28+s/EJIYQQ4uEkWBRCvNIMDXSSxOYlNXDgQD755JMnOmbKlCkkJCQQHR2Nra3tMxqZEEIIIR6HBItCCCGeCSsrK6ysrJ7omLi4OCpVqoSnp+czGpUQQgghHpc8syiEEG+wtWvXUrNmTezs7HB0dKRZs2bExcUBkJ6eTq9evXBxccHMzIzixYszZswY7dj4+HiaN2+OlZUVNjY2tG3blsuXL2v7R4wYQcWKFbXX+/bto0GDBhQoUABbW1tq167NwYMHtf1ubm789ttv/PTTT+h0OkJDQ5/59QshhBAifxIsCiHEG+zOnTv079+f/fv3s2nTJgwMDGjZsiVZWVlMnz6dFStWsHjxYk6cOMGCBQtwc3MDICsri+bNm3P9+nW2bt3Khg0bOHPmDO3atcv3XLdv3yYkJIQdO3awe/duPD09adKkCbdv3wayg8nAwEDatm1LQkIC06ZNex63QAghhBD5kGWoQgjxBmvdurXe6zlz5lCwYEFiYmKIj4/H09OTmjVrotPpKF68uNZu06ZNHDlyhLNnz+Lq6grATz/9RJkyZdi3bx+VK1fOda569erpvf7hhx+ws7Nj69atNGvWjIIFC2Jqaoq5uTnOzs7P4Gpff+fOncPd3Z1Dhw7pzeoKIYQQ/4bMLAohxBskM0uxKy6R36P/ZldcIsdPnCQ4OJgSJUpgY2OjzRzGx8cTGhpKdHQ0pUqVonfv3qxfv17rJzY2FldXVy1QBPDx8cHOzo7Y2Ng8z3358mW6deuGp6cntra22NjYkJycTHx8/DO95lfVrl27MDQ0pGnTpk+1Xzc3N3Q6HYsWLcq1r0yZMuh0OiIiIp7qOYUQQryaZGZRCCFeoDp16lCxYkWmTp36zM+19mgCYStjSEhK1bZdnt0dH68SzJo1i8KFC5OVlUXZsmVJT0/Hz8+Ps2fPsmbNGjZu3Ejbtm0pXLgwiYmJXLlyhUKFCj3R+UNCQkhMTGTatGkUL14cU1NTAgICSE9Pf9qX+lqYPXs2n3zyCbNnz+bixYsULlz4qfXt6upKeHg47733nrZt9+7dXLp0CUtLy6d2HiGEEK82mVkUQoinLDQ0FJ1Ox8cff5xrX8+ePfWStyxdupRRo0Y98zGtPZpA9/kH9QLFzJRbpF67wEW3xtxz8sHb25sbN27oHWdjY0O7du2YNWsWEyZMIDY2lokTJ7Jo0SISExMpWrSoFujGxMRw8+ZNfHx88hxDVFQUvXv3pkmTJpQpUwZTU1OuXbv2zK75VZacnMwvv/xC9+7dadq0qd5M340bN+jQoQMFCxbE3NwcT09PwsPD8+wnMzOTzp07U7p0ab0Z3A4dOrB161YuXLigbZszZw4dOnTAyEj/78g3b96ka9euFCxYEBsbG+rVq8fhw4e1/TmJjObNm4ebmxu2tra899572rOoQgghXl0SLAohxDPg6urKokWLSElJ0balpqby888/U6xYMW2bg4MD1tbWz3QsmVmKsJUxqAe2G5hZYWBuw+3oNXwWsYENGzfRv39/bf/kyZNZuHAhx48f5+TJk/z6668AtG/fnrZt21KuXDmuXbvGhQsX2Lt3L506daJ27dr4+/vnOQ5PT0/mzZtHbGwse/bsoUOHDpibmz+ry36lLV68mNKlS1OqVCk6duzInDlzUCr7HRw6dCgxMTGsWbOG2NhYZs6cSYECBXL1kZaWxrvvvkt0dDTbt2/X+75zcnKiUaNGzJ07F4C7d+/yyy+/0Llz51z9vPvuu1y5coU1a9Zw4MAB/Pz8ePvtt7l+/brWJi4ujuXLl7Nq1SpWrVrF1q1bGTt27NO+LUIIIZ4zCRaFEOIZ8PPzw9XVlaVLl2rbli5dSrFixfD19dW21alTh759+2qv3dzcGD16NJ07d8ba2ppixYrxww8/6PV94cIF2rZti52dHQ4ODjRv3pxz587ptZkzZ442e+fk7MyxJVO0fefHNeP2oT+4uvQrstJTSDl7iOipXWndpo0209SzZ0/27t3L+PHj8ff3p3z58mzYsAEAIyMjDAwMMDU1JS0tjcmTJ1O1alUOHDjAL7/8ku89mT17Njdu3MDPz4/333+f3r17P/FS1jfF7Nmz6dixIwCBgYEkJSWxdetWIPt5Ul9fX/z9/XFzc6N+/foEBQXpHZ+cnEzTpk25evUqW7ZsoWDBgrnO0blzZyIiIlBKsWTJEkqWLJkrKc6OHTvYu3cvv/76K/7+/nh6ejJx4kTs7OxYsmSJ1i4rK4uIiAjKli1LrVq1eP/999m0adNTvitCCCGeNwkWhRDiGencubPe8sA5c+bwwQcfPPK4SZMm4e/vz6FDh+jRowfdu3fnxIkTANy7d49GjRphbW3N9u3biYqKwsrKisDAQO3Zv5kzZ9KzZ08+/PBDjhw5wqdT5mBkp/+8280dP2PhGUDhrjNxCZ1KsX6/0qRdKCtXruTMmTOMHz+elStX8tlnn5GcnMy1a9e0a0lISCAhIYE//viDokWLMnLkSG2bk5OTdo4RI0YQHR2tvfb19WXfvn2kpKRw8uRJ2rRpw7lz5/SC5eXLl79xyVUeTDoUE3ucvXv3EhwcDGQH5+3atWP27NkAdO/enUWLFlGxYkUGDx7Mzp07c/UZHBzMnTt3WL9+Pba2tnmet2nTpiQnJ7Nt2zbmzJmT56zi4cOHSU5OxtHRESsrK+3r7NmzWj1OyP4jx/0z5C4uLly5cuU/3RchhBAvniS4EUKI/ygzS7H37HWu3E6lkLUZ/79akI4dO/LZZ59x/vx5IPuZvUWLFhEZGfnQ/po0aUKPHj0AGDJkCFOmTGHLli2UKlWKX375haysLH788Ud0Oh0A4eHh2NnZERkZScOGDfnqq68YMGAAffr0AaCGoSM2R/TPYelTG6vyDfS29Rn8Jf4lHQFwd3dn165dLF68mLZt22JlZYWdnR2AXlkLQ0NDrK2tpdTFv5RX0qH0nT+RkZGhl9BGKYWpqSnffPMNjRs35vz58/zxxx9s2LCBt99+m549ezJx4kStfZMmTZg/fz67du3KVbIkh5GREe+//z7Dhw9nz549LFu2LFeb5ORkXFxc8vyezfl+ADA2Ntbbp9PpyMrKetzbIIQQ4iUlwaIQTyA0NJSbN2+yfPnyf91HREQEffv25ebNm09tXOLFyevD/p2jCXja6ihYsKCWnEQpRdOmTfN8tuxB5cuX1/6t0+lwdnbWZmkOHz7M6dOncz3nmJqaSlxcHFeuXOHixYu8/fbb2r4q7g642JpxKSlVe27RxNnjn3MAzrZm7F+zkF7h4cTHx5OSkkJ6errU6nuGcpIO3f8sqcrK5PLB9djX7cLoTzpQ0/Of5aMtWrRg4cKFfPzxxxQsWJCQkBBCQkKoVasWgwYN0gsWu3fvTtmyZXnnnXdYvXo1tWvXznMMnTt3ZuLEibRr1w57e/tc+/38/Lh06RJGRkZaWRUhhBBvDgkWhcjDsyxs3a5dO5o0afJU+xQvRl4f9gFS0jOJ/usOa48m0LlzZ3r16gXAt99++1j9PmyWJjk5mUqVKrFgwYJcxxUsWBADg9xPFxga6Bge5EP3+QfR/f82A2Oz7L7//3Vto5MMHjSISZMmERAQgLW1NRMmTGDPnj2PNWbxZPJLOpRyei9ZqclYV2jI3ONZdGteBkOD7HepdevWWhmNSpUqUaZMGdLS0li1ahXe3t65zvHJJ5+QmZlJs2bNWLNmDTVr1szVxtvbm2vXrmFhYZHnOOvXr09AQAAtWrRg/PjxeHl5cfHiRVavXk3Lli3zTWYkhBDi9SDPLArxnJmbm0tSj9dAfh/27xe2MoYGDRuRnp6uPWv4X/n5+XHq1CkKFSqEh4eH3petrS3W1ta4ubnlSi4SWNaFmR39cLY109vubGvGzI5+3ImPoXr16vTo0QNfX188PDz0nknLj4mJCZmZmf/5ut40e89e15uNzpH853rMi1dEZ2pJQlIqe8/+k3G0devW7N+/HyMjIz777DPKly/PW2+9haGhIYsWLcrzPH379iUsLIwmTZrk+WwjgKOjY75ZaXU6HX/88QdvvfUWH3zwAV5eXrz33nucP39e7/lUIYQQrycJFsUba+3atdSsWRM7OzscHR1p1qyZ9uHY3d0dyE7IodPpqFOnjt6xEydOxMXFBUdHR3r27Mm9e/e0fWlpaQwcOJAiRYpgaWlJ1apV9Z73iYiI0HvW5/Dhw9StWxdra2tsbGyoVKkS+/fv12u7atUqSpUqhYWFBW3atOHu3bvMnTsXNzc37O3t6d27t3xgf87y+7B/v4SkVA7EJxEbG0tMTAyGhob/+bwdOnSgQIECNG/enO3bt3P27FkiIyPp3bs3f/31F5CdWGbSpElMnz6dU6dOcfDgQWbMmEFgWRd2DMl+fq1LLXcWdqvGjiH1CCzrgqenJ/v372fdunWcPHmSoUOHsm/fvkeOx83NjW3btvH3339LzcQncOV23t87hdoMp9C7I/JsV6VKFZRSDBs2jJiYGO7evUtiYiLLly/Xfme5ubmhlNJbEdG/f39u3bpF9erVAXIlFXrQzZs3tTqgANbW1kyfPp2///6b9PR04uPjmT9/Pq6urkDuREaQHaQ+mKFXCCHEq0eCRfHGunPnDv3792f//v1s2rQJAwMDWrZsSVZWFnv37gVg48aNJCQk6JU/2LJlC3FxcWzZsoW5c+cSERGhl72xV69e7Nq1i0WLFvHnn3/y7rvvEhgYyKlTp/IcR4cOHShatCj79u3jwIEDfPrpp3rLEO/evcv06dNZtGgRa9euJTIykpYtW/LHH3/wxx9/MG/ePL7//nu9NPbi2cvvw35e7WxsbLCxsXkq57WwsGDbtm0UK1aMVq1a4e3tTZcuXUhNTdXOERISwtSpU/nf//5HmTJlaNasmfb9l7Oksaq7IwElHbXXH330Ea1ataJdu3ZUrVqVxMRELcnOw4wcOZJz585RsmTJPMsziLwVsjZ7dKMnaCeEEEI8CzqVU+X3JXTr1i1sbW1JSkp6ah+0xJvrwYyVVdwdtA/KANeuXaNgwYIcOXIEKyurPJ9ZDA0NJTIykri4OG2WqG3bthgYGLBo0SLi4+MpUaIE8fHxepkM69evT5UqVRg9enSuBDc2NjbMmDGDkJCQXGOOiIjggw8+4PTp05QsWRKAjz/+mHnz5nH58mWsrKyA7Dpsbm5ufPfdd0/7tol87IpLJHjW7ke2W9itGgH/n2FUiByZWYqa4zbrJR26X07SoR1D6un9nhJCiDeZxAbPnyS4EW+EvDJW2t27hl3sMuKP/8m1a9e0BCLx8fH4+Pjk21eZMmX0lhO6uLhw5Eh2XYIjR46QmZmJl5eX3jFpaWk4OuYdMPTv35+uXbsyb9486tevz7vvvqsFhpA9k3T/aycnJ9zc3LRAMWfb865pFhoayty5c4HsFPwODg6UL1+e4OBgQkND80y08jrJK8Po/XI+7Fdxd3jeQxOvgAeTDt3/PZQTGg4P8pFAUQghxAv1en+aE4J/MlY++HzZsYgv2RMbT7fPx7Jnzx4t62NOYfP8PCpTpaGhIQcOHCA6Olr7io2NZdq0aXn2N2LECI4dO0bTpk3ZvHkzPj4+evXO8jrfy1LTLDAwkISEBM6dO8eaNWuoW7cuffr0oVmzZmRkZOR5zP3Pd77Kcj7swz8f7nPIh33xOB6VdCiwrMsLGpkQQgiRTYJF8VrLL2NlZsotMq7/hV31dqy46ohXqdLcuHFD229iYpLd7gmTxvj6+pKZmcmVK1dyZap8WNFyLy8v+vXrx/r162nVqhXh4eFPdN4XxdTUFGdnZ4oUKYKfnx+ff/45v//+O2vWrNGe49TpdMycOZN33nkHS0tLvv7661xJfgCWL1+uFZnP8dVXX1GoUCGsra3p2rUrn376qd6y4MjISKpUqYKlpSV2dnbUqFGD8+fPP+Or/od82Bf/VU7SoYXdqjHtvYp6SYeEEEKIF02WoYrXWn4ZKw3MrDAwt+H24XXEWznwv59vMW/6GG1/oUKFMDc3Z+3atRQtWhQzMzNsbW0feT4vLy86dOhAp06dmDRpEr6+vly9epVNmzZRvnx5mjZtqtc+JSWFQYMG0aZNG9zd3fnrr7/Yt28frVu3/u8X/4LUq1ePChUqsHTpUrp27Qpkz56OHTuWqVOnYmRkxObNmx/Zz4IFC/j666/53//+R40aNVi0aBGTJk3Ssj5mZGTQokULunXrxsKFC0lPT2fv3r25As5nLbCsCw18nB/6PKwQD2NooJPnWoUQQryUJFgUr7X8MlbqdAYUeGcwNzZ+z8XZPZm8xYOIWTO1EhlGRkZMnz6dkSNHMmzYMGrVqqVX/uJhwsPD+eqrrxgwYAB///03BQoUoFq1ajRr1ixXW0NDQxITE+nUqROXL1+mQIECtGrVirCwsH97yc/MgwmCHpYaq3Tp0vz555/a6/bt2/PBBx889rly0v936dJFO27YsGGsX7+e5ORkIPsh96SkJJo1a6Y905lXYfLnQT7sCyGEEOJ1JNlQ3xDnzp3LM7vn604yVj4deSUIurNhOp62OnZuXpurfbt27Th69CjHjh1Dp9Mxf/58OnTooO1/MCMsZC9DbdmyJUoprl69iqenJ9OnT6dTp05am/79+7N582atptsHH3zAwoULadCgAfXr16dt27a4uMjyPSGEEOJ1JLHB8yfPLL6k6tSpk6tocl7bxD9GjBiRKxDOyViZ34JAHeAiGSsfKr8EQSnpmUT/lcTaowm5jomNjdWWiwJYWlrq7TcwMODBv1Pdn/imYMGCD11OmtM2PDycXbt2Ub16dX755Re8vLzYvfvRfxwQQgghhBCPJsGieGk9KitpDqVUvpk3JWNl3rKyshg/fjweHh6YmppSrFgxvv76ayC7/Ee9evUwNzfH0dGRkM5dyUxP0Y69tnoKV5Z+xb1r50k5c4CgKqXo0aOHFsBt3ryZI0eOsG/fPszMshO/jB8/Xju+Tp06/Prrr9pfBwsUKMDQoUM5dOiQ1sbNzQ0bGxv27dsH/JMkZ+7cuRw5coSvv/6azMxMunTpQqtWrRg5ciSJiYk4Ojry888/613rnDlzKFOmDKampri4uNCrVy8AOnfunGtp8L179yhUqBCzZ89+WrdaCCGEEOKVJcHiSyg0NJStW7cybdo0dDodOp2ONm3a5NrWsmVLtm/fTuPGjbGyssLJyQk/Pz/c3d21AGDUqFGMGTOGWrVqAdCiRQvKlCmDhYUFFSpU4Ntvv0Wn02kJWAwNDTEyMsLc3JwyZcrwxx9/EBcXh6+vLwYGBlhZWVG5cmU2btyol70yISEBLy8vDAwMKFCgAI6OjhgYGFCxYkWSkpIAuHnzJl5eXpiammJqaoqJiQmWlpZ8/PHHpKenazODM2fOxNbWFlNTU8zMzChdujQeHh5a8FKpUiV0Oh1r1qyhUqVKmJqaMn/+fMLCwjh8+LB2f3KycUrGytw+++wzxo4dy9ChQ4mJieHnn3/GycmJO3fu0KhRI+zt7dm3bx8jpv3I9ZMHuL7hO73jU8//SVZ6CibOXtjW68ac8AjCwsIYPXo0zZo1Q6fTMXnyZE6cOAGQq27lli1bMDIyonnz5nz++edMmDCB//3vf3pt3nrrLWbPnq3Vchw0aBB3796lVKlSdO7cmbi4OE6cOMHQoUPZsGEDLVu25Pz589y9e1frY+bMmfTs2ZMPP/yQI0eOsGLFCjw8PADo2rUra9euJSHhn5nRVatWcffuXdq1a/f0brYQQgghxKtKvcSSkpIUoJKSkl70UJ6rmzdvqoCAANWtWzeVkJCgEhIS1JkzZ5SRkZGqWLGi2rZtm1q/fr2qXbu2MjY2Vp999pmKjY1VISEhysjISHl7e6vTp0+r7du3qxYtWqjSpUuriIgIBShnZ2dlbGys5s2bp9q0aaOcnJwUoKpWraqqVaumAgIClJ+fn/Lz81MrV65UW7duVdHR0apTp07KyspKnTx5Un355ZfKzMxM/fDDDyrnW6h+/frK2dlZmZmZqcqVK6tKlSopU1NTVaBAAdW+fXutTdGiRZW5ublq0qSJCgkJUTY2NsrR0VF9/vnnavjw4crS0lIVL15cFSxYUH377bcqMjJS6XQ6ZW5urg4dOqT+/PNP1adPHwWo8uXLq/Xr16vTp0+rv/76Sw0YMECVKVNGu2d3797Vu68ZmVlq5+lravmhv9TO09dURmbWc39vX5T7r31D9FllamqqZs2alavdDz/8oOzt7VVycrJSSqnlh/5ShdoMV+gMVNFe81TxIauUZdm3laFNIWVRpp4iu5a40ul0ytTUVNWvX1/17NlT2djYqFu3bimllALUsmXLtHPUrl1beXt7q6VLlyoPDw9lbm6uSpYsqVxcXLTvp+LFi6spU6aokSNHqgIFCihAlSlTRvXu3VtVq1ZNKaXUpUuXVIsWLZSLi4syMTFRxYsXV5UrV1atWrXSzlW4cGH1xRdf5HtffHx81Lhx47TXQUFBKjQ09N/faCGEEEI8M29qbPAiSbD4kqpdu7bq3bu39gG/W7/PlL29verTp4/WZuDAgQpQJ06cULdu3VKmpqZq3Lhx2rbU1FRlYWGhdu7cqc6ePasA9eOPP6ouXbqo4OBgdezYMe3D/saNG1W5cuXUiBEj1OrVqxWgUlJStHOFh4crW1tb7XWZMmVUt27dFKBiY2MVoLp166YMDQ3VX3/9pU6dOqUA9eGHHyoDAwO1fPlyZWNjozp27KgcHBzUnTt3lFJKlSxZUrVv315ZWVmpYcOGKWNjY2VkZKQWLFiglFLqwIEDClCFChVS48ePV0optWXLFgWo5cuX692z4cOHqwoVKjybN+QVtubIRVVt9EZVfMgqVXzIKuX8/iQFqPA1u3O17devn6pTp472eufpa8q17y8KUE7tx2rBonkJf62/4kNWqXdDPlR169ZVSil169YtVa5cOVWgQAHVsWNHNX/+fO39Vir7e/uDDz7QO+/y5cuVkZGRysjIUEr9EyzmANT8+fNV/fr1VceOHbXt33zzjfLz81MFChRQlpaWytjYWFWuXFkppdTly5cVoDZv3pzvvZk8ebIqXbq0Uio7+DQyMlLbtm173FsrhBBCiOfoTY4NXhQpnfESub80wV83Ujix/y9+//9Mnlf/2MbdGzf45ptv+fHHHwFITc1OOFKhQgV0Oh1paWlayYW4uDju3bvH3bt3adCggZZM5JNPPiEjIwNfX1+9rJHly5end+/edO/eXVsyuHXrVho1akRycjKLFi3i9u3b2NnZkZGRQUpKCp6engCcOHECIyMjXFxcKFasGEWKFAHA3t4eNzc3srKy2LJli9aPUopChQoB2XUGa9WqRXJyMklJSbi4uBAfH0+NGjW0a3v77bfZunUr//vf/7Czs9OK2/v7+z+7N+M1kZOc5v5UMjpjUwC+XH4U56LFHroMt4q7A042plx4cIdh9q8OHdnLeV1SzLiSlQWAtbU1Bw8eJDIykvXr1zNs2DBGjBjBvn37sLOze+yxp6enM3nyZBo1agTAihUr2LhxIxs2bABg0aJFDBw4kEmTJhEQEIC1tTUTJkxgz549AJibmz/yHJ06deLTTz9l165d7Ny5E3d3d23JthBCCCHEm06eWXxJrD2aQM1xmwmetZs+i6L568Zd7qT9k7QlKz0VA3MbzLxrM33xeqKjo6lRowYNGzZkz549/Prrr0D2M1enTp3irbfe0urRrV69mtWrVwPZH7BjYmJYsmSJXrZJY2NjunbtypkzZ7SkH82aNWPGjBkMHDiQgwcPYmpqyvbt24mOjqZcuXKPnYAGsoNCFxcXgoKC8Pf3Jzo6mujoaE6cOEFISIjW7sEP+IaGhmzYsIGAgADs7OyYMWOGVkrhwQybQl9mliJsZYxeoAhgbF8YnZEpqecPE7Yyhsysf1p4e3tz+PBh7ty5A2QnCApyug06A0wciuj1c3+CoAczlxoZGVG/fn3Gjx/Pn3/+yblz59i8ebO2Pyegy7F79248PT0xNDT8p3+djj/++IO33noLgH379vHbb79Rv359AKKioqhevTo9evTA19cXDw8P4uLitOOtra1xc3Nj06ZN+d4jR0dHWrRoQXh4OBEREU9UC1IIIYQQ4nUnweJLIK/SBDpDY1BZ2msT55Koe2kYGJsx63AK7iVKUrNmTc6fP4+Pjw9vv/025ubmxMXF4eHhgaWlJT4+PpiamhIfH4+bmxsAxYoVw8PDA1dX1zzH4urqyrvvvgtkJwCZNWsWUVFRNGrUiNTUVEqUKIGzszPnzp3j6tWrAJQqVYqMjAwSEhKIj4/n4sWLnD59mhs3bnD+/HkMDAyoV68ely5dwsDAgFOnTlGkSBE8PDzw8PDgxIkTWFlZYWtri4mJCSYmJkRFRWljysjI4MyZM3To0IFDhw5hZJT3hLiJiQmZmZn/6b14new9ez1XuQsAnZEJNlVbcyMynFNRq1kaeYDdu3cze/ZsOnTogJmZGSEhIRw9epQtW7Ywd9Jw6ge1oUhh/RnI/BIErVq1iunTpxMdHc358+f56aefyMrKolSpUlqb+Ph4+vfvz4kTJ1i4cCEzZsygT58+ev0YGxuzceNGEhMTAZg8eTKtWrXS9nt6erJ//37WrVvHyZMnGTp0qJY9NceIESOYNGkS06dP59SpUxw8eJAZM2botenatStz584lNjZW7w8XQgghhBBvOlmG+oLlN/tjZFuItIQTZCRdRmdshrVvU27tXcad2K2c3u9LxO+KMmXKMHnyZN577z2GDBlCt27d6NevH3PnzmX27Nlcv36d+vXr069fPz777DMgu/7d9u3bsbGxoXnz5nrn7Nu3L40bN9bKUOzevRtvb2/u3bvH4cOHMTU1pVu3bly+fJm0tDRiYmIAKF26NPXr12fVqlUYGxtrhdVNTU1ZunQpbdu2pW3btnzzzTds3ryZlJQU2rVrR7t27bSlhb169UKn02FgYED37t0ZNGgQDg4O3Lx5k/Hjx3Pr1i0aNWrE0qVLteyqD3Jzc+Ps2bNER0dTtGhRrK2tMTU1fbpv2Cvkyu3cgWIO2xrvoTMw5Ob2BbRf9w2FC7vw8ccfY2Fhwbp16+jTpw+VK1fGwsKC1q1bM3nyZMwtLNl79jpfxDiQkWLCliH18iw5Ymdnx9KlSxkxYgSpqal4enqycOFCypQpo7Xp1KkTKSkpVKlSBUNDQ/r06cOHH374RNf30UcfcejQIdq1a4dOpyM4OJgePXqwZs0arU1ISAipqalMmTKFgQMHUqBAAdq0aaPXT/369XFxcaFMmTIULlz4icYghBBCCPE60ymlHoxTXho5ddiSkpKwsbF50cN5JnbFJRI8K3cR8XvX/+ba6sncu3IOlZFGkY9ncy/xb66uHI9KzV5eWqJECWrVqsWtW7fYsmULaWlpWFlZkZqaSkpKCoULF+ajjz7C0tKS6dOnc+bMGezt7alSpQqff/455cuXx97eHoAbN24wdOhQ1qxZQ3x8PPfu3aNly5bMmjWL27dv07lzZ6KiosjKykKn02FtbY2vry+bNm1CKUVCQgJvvfUWcXFx2Nvbk5aWxp07d9DpdNja2lKpUiXmz59P3bp1tecpc/j7+7Njxw7GjBnDjBkzcHFxoXbt2ixevJikpCSysrIwNDREp9NRvHhxAgMDmT59Ojdu3NB7Bi4tLY0OHTqwadMmbt68SXh4OKGhoc/0/XuZ5fe99aCF3aoRUNLxOYwoW506dahYsSJTp059bud8mOTkZIoUKUJ4eLjezKUQQgghXi5vQmzwspGZxRcsv9kfY4ciuLw/SW+bka0TxfosAp78A/6DS/xy3P+3ggeX5+VwdHTUe94sLy4uLnTo0IHly5ezZs0abZnrvHnzKF++PNu3b8fKyoqCBQty9uxZfvvtN3x9fTl06BDdunVj0aJFjBgxgoEDB1KhQgVMTEy4evUqgwYNYsmSJRw+fFjvl8K0adNyjcHU1JQlS5Y88l68Kaq4O+Bia8alpNRcM9fwT3KaKu4Oz3toL4WsrCyuXbvGpEmTsLOz45133nnRQxJCCCGEeKlIsPiCFbI2e3Sj+7ysH/A3bNxE1P4/uXbrDt/8tITMzEyKFi1K27ZtMTY2ply5cgBER0dTtmxZbQbH3d2dmJgYvv/+e0JCQrCysmL+/PnUrl0ba2trpk6dypYtW+SvR/+CoYGO4UE+dJ9/EB3oZ0T9//8OD/LJcynpmyA+Ph53d3eKFi1KREREvs/CCiGEEEK8qWQZ6guWmaWoOW5zvrM/98v5SJ9XUpEXae3RBPpNmsvJxWPJunsLAwsbDHRgmHWPoKZNaNiwIW3atMHExAQrKysMDAz0sp5mZGRga2vL5cuXtW2ff/45Y8aMYciQIYwdO/ZFXNZrY+3RBMJWxuglu3GxNWN4kM9L9X0khBBCCPEwb0Js8LKRP6W/YA+b/XmQ80v4AV+r4+dUDtdPFvyzQynS/o7FxDqBGTNm8MUXX7By5UoAfvrpJ6pWrarXz/0lE7KysoiKisLQ0JDTp08/l+t4nQWWdaGBj7NWw7OQdfbM9Js6oyiEEEIIIR6PBIsvgcCyLszs6Jdr9sfZxpTgKsVwK2D5Un7Azy+TKwA6HWZFfThj68f+6eMp4e5GVFQUhQsX1spg5GfChAkcP36crVu30qhRI8LDw6X+3X9kaKB7rklshBBCCCHEq0+CxRcgIiKCvn37cvPmTW3bqzj7k18dv7SLJ0g9fxgzN18u3LJlwnc/cfXqVby9vQkLC6N3797Y2toSGBhIWloa+/fv58aNG/Tv359Dhw7x+eefU7lyZWrUqMHkyZPp06cPtWvXpkSJEi/gKoUQQgghhHgzSbD4EnkVZn9GjBjB8uXLiY6OzjeTq4GJBakXjnJr/+9kpd3lf0VdmTRpEo0bNwbAwsKCCRMmMGjQICwtLSlXrhx9+/YlNTWVjh074uHhgbOzMwAffvghq1ev5v3332fbtm16y1WFEEIIIYQQz44Ei+Jfyy+Tq3EBVwq1HobOMPvb68EyH+3bt6d9+/Z5Hnvs2DFCQ0P1Zl1///33pzdoIYQQQgghxGMxeNEDeBXVqVOHXr160atXL2xtbSlQoABDhw7VahbeuHGDTp06YW9vj4WFBY0bN+bUqVP59nf16lX8/f1p2bIlaWlpZGVlMWbMGNzd3TE3N6dChQp69QMjIyPR6XSsW7cOX19fzM3NqVevHleuXGHNmjV4e3tjY2ND+/btuXv3rnZcWloavXv3plChQpiZmVGzZk327duXq99NmzZhbW2NsbEx1atX58SJE7i5udG+fXvCwsI4fPgwOp2O6h4FMDy9FR1wflwzzo9rRsLcfsRPbk3Srl8gK5O7G7+hfX1/zM3NKVWqVK76iJmZmfTv3x87OzscHR0ZPHgwDybofdT9uHHjBh06dKBgwYKYm5vj6elJeHj4v3pvhRBCCCGEENkkWPyX5s6di5GREXv37mXatGlMnjyZH3/8EYDQ0FD279/PihUr2LVrF0opmjRpwr1793L1c+HCBWrVqkXZsmVZsmQJpqamjBkzhp9++onvvvuOY8eO0a9fPzp27MjWrVv1jh0xYgTffPMNO3fu5MKFC7Rt25apU6fy888/s3r1atavX8+MGTO09oMHD+a3335j7ty5HDx4EA8PDxo1asT169f1+v3iiy/4+eef2blzJ0ZGRnTu3BmAihUrMmDAAMqUKUNCQgIJCQlM/vRjvWNNC5fCpfO3WJdvAErRoLI3v/76KzExMQwbNozPP/+cxYsXa+0nTZpEREQEc+bMYceOHVy/fp1ly5bp9fmo+zF06FBiYmJYs2YNsbGxzJw5kwIFCjzpWyqEEEIIIYS4n3qJJSUlKUAlJSW90HFkZGapnaevqeWH/lI7T19Tb9Wurby9vVVWVpbWZsiQIcrb21udPHlSASoqKkrbd+3aNWVubq4WL16slFIqPDxc2draquPHjytXV1fVu3dvra/U1FRlYWGhdu7cqTeGLl26qODgYKWUUlu2bFGA2rhxo7Z/zJgxClBxcXHato8++kg1atRIKaVUcnKyMjY2VgsWLND2p6enq8KFC6vx48fn2+/q1asVoIoVK6amTJmihg8fripUqKA3tjVHLiqyq36ogi2/UMWHrFLVRm9Ua45czHUve/bsqVq3bq29dnFx0c6vlFL37t1TRYsWVc2bN3/s+xEUFKQ++OCDXOcSQgghhBCvj5clNniTyDOLj5BXQfPr8TeoVqE8Ot0/mUoDAgKYNGkSMTExGBkZ6dURNDExwczMjA4dOtC7d2/q1KnDrVu38PX15cMPP2T48OGEhISwcuVKUlJSSEtLo379+lr/GRkZpKWlUapUKby9vTl37hwATk5O2jmcnJwwMTGhadOmnD17Fjc3N0qUKMGVK1cAaNGiBffu3aNGjRoA9O3bl2nTpvH2228TGxtLeno6gYGBAJQvX546depQsWJFQkJCgOzlovc7deoUXbp0Ye/evXpZSrvUcuedd6ppmVy//fZb5syZQ3x8PCkpKaSnp1OxYkUAkpKSSEhI0LtXRkZG+Pv7a0tRT58+zd27d2nQoIHe+dPT0/H19QWge/futG7dmoMHD9KwYUNatGhB9erVH+v9FUIIIYQQQuRNlqE+RE7B+QfLQ6RnZLErLpG1RxMeq5/+/fuTnJxM+/bt2bBhAydPnkQphaurK6tWraJdu3bastWcpaz29vbs27eP6OhowsLCMDIyomDBgsybN0977i8sLEw7x+7du7l37x5ff/01sbGxjB49mi1btnDt2jUAKlWqpDemrVu3UqBAAW3/vn37yMjIAMDY2Fhrd39AfL9WrVphYmLCnj17+O6777TtVd0dCSjpiKGBjkWLFjFw4EC6dOnC+vXriY6O5oMPPiA9Pf2x7htAcnIyAKtXryY6Olr7iomJ0Z5bbNy4MefPn6dfv35cvHiRt99+m4EDBz72OYQQ/05ERAR2dnYvehhCCCGEeEYkWMzHQwvOA2kXTxK2MobMrOwWu3fvxtPTEx8fHzIyMpi9dD2/R//NxsPniIiIQKfT0axZM8qWLUuXLl0AaNSoEV5eXmzYsIExY8ZQq1YtmjdvjqmpKVeuXOHo0aN4eHjg5ORERkYGERER+Pv74+XlBaD3DOOyZcswNzenVatWuLu706pVKwICAkhMTASgZcuWAKxZs4YbN24QExNDr169OHnyJD4+PkRGRlK6dOlH3hcTExOSkpI4fvw4P/30ExUqVOCtt97Ks21UVBTVq1enR48e+Pr64uHhQVxcnLbf1tYWFxcX9uzZo23LyMjgwIED2msfHx9MTU2Jj4/Hw8ND78vV1VVrV7BgQUJCQpg/fz5Tp07lhx9+eOS1CCGEEEIIIfIny1DzkV/B+RwZt69ybOkMfvW3RF07y4wZM5g0aRJxaVbYe1enV/ePcQjsSWbydTIyMnAq7Erz5s2B7DqDhoaGGBgY0K1bN9auXcuQIUOoWrUqzs7ODBw4kLFjx7Jw4UL8/Pw4d+4cxsbG7Nixg5IlS2pjuHr1KgB37tzR/m1lZaXtT01N1WYGq1SpgpmZGZ9//jmXLl2iVKlSHDhwgNTUVLp06UK7du2oWLEix44de+h9cXNz4+LFixQqVAgTExPS0tIwNTXNs62npyc//fQT69atw93dnXnz5rFv3z7c3d21Nn369GHs2LF4enpSunRpJk+erFc2w9ramoEDB9KvXz+ysrKoWbMmSUlJREVFYWNjQ0hICMOGDaNSpUqUKVOGtLQ0Vq1ahbe390OvQwghhBBCCPFwMrOYj/wKzuewLFMPlZFOl1YN6dmzJ3369KFY9SC6zz+IZcPemDiX5MqSkVxbNTn7gNo92HTiWq5+coJGHx8frfzFqFGjcHZ2Zvv27Xh7ezN58mSUUnpB1v1ylmpaWFjoLdXs0aMHnp6eQPZy0oYNG1K0aFHGjx9PTEwM169fx8rKir///pudO3dSoUKFR96X1q1b4+3tzaVLlyhYsCALFy7Mt+1HH31Eq1ataNeuHVWrViUxMZEePXrotRkwYADvv/8+ISEhBAQEYG1trc2C5hg1ahRDhw5lzJgxeHt7ExgYyOrVq7X7YWJiwmeffUb58uV56623MDQ0ZNGiRY+8FiHedG9CGSB/f38sLCy0MkBCCCGEeAIvNr/Ow73IjEc7T19TxYesyvPL1LWssq70jio+ZJXaefqaUio7Y2q10RtztXXtu1hhYKQKtvhMVRu9UWVkZqmbN28qS0tL1adPn4dmT/3111+VUv9kT73fsmXL1P1vX+HChdXIkSMfek1Tp05VZcqUUX5+fmrNmjVKKaWaN2+uOnXqpIyMjFRycrLWtnbt2qpPnz7a6+LFi6spU6YopZRat26dMjIyUhcv/pPtdO3atQpQy5Yte+x7LIR4sWrXrq2srKxUnz591PHjx9X8+fOVhYWF+uGHH5RSSr3zzjvK29tbbdu2TUVHR6tGjRopDw8PlZ6erpTS/90UHx+vSpUqpUJCQlRGRoZSSqmvvvpKlS5dWq1du1bFxcWp8PBwZWpqqiIjI5VS/2RgrlatmtqxY4c6ePCg8vDwULVr11YNGzZUBw8eVNu2bVOOjo5q7Nix2rh79+6tChcurP744w917NgxFRISouzt7VViYqJev1WrVlWRkZHq2LFjqlatWqp69erP69YKIYR4BiQb6vMny1DzUcXdARdbMy4lpeb73KKLrRlV3B2A/JetGphaYFW2Hje2zOGsmTWL1pmxbPYUDAwM0Ol0eHp60rx5c7p168b333+PtbU1n376KUWKFNGWrT6OsLAwevfuja2tLYGBgaSlpbF//35u3LhB//79gexZhH79+mFiYkLNmjW1bQMHDqRy5cpYWlo+1rnq16+Pl5cXISEhTJgwgVu3bvHFF1889liFEC9GZpZi79nrXLmdSiFrMxTg6urKlClT0Ol0lCpViiNHjjBlyhTq1KnDihUrtGePARYsWICrqyvLly/n3Xff1fo9ceIEDRo0oGXLlkydOhWdTkdaWhqjR49m48aNBAQEAFCiRAl27NjB999/T+3atbXjv/rqKy1Tc5cuXfjss8+Ii4vTMi23adOGLVu2MGTIEO7cucPMmTOJiIigcePGAMyaNYsNGzYwe/ZsBg0apPX79ddfa+f59NNPadq0KampqZiZmT27myyEEEK8RmQZaj4MDXQMD/IBIO98oDA8yAdDg+y9D1u2al+vKyZFSnPltzA+6dSKGjVq4O3trX1gCQ8Pp1KlSjRr1oyAgACUUvzxxx96WUkfpWvXrvz444+Eh4dTrlw5ateuTUREhN7S1XLlymFnZ0fFihW1Zxvr1KlDZmYmderUeexzGRgYsGzZMlJSUqhSpQpdu3bl66+/fuzjhRDP39qjCdQct5ngWbvpsyia4Fm7ORR/g6KlcpcBOnXqVJ5lgBwdHSlVqhSxsbHatpSUFGrVqkWrVq2YNm2a1tf9ZW+srKy0r59++kkv0RVkl+vJ4eTkhIWFhV5JHicnJ60MUFxcnF4ZIMjO4FylShW9cT3Yr4uLC4DWjxBCCCEeTWYWHyKwrAszO/rlqrPo230qw4N8CCzrom0rZJ3/X6oNTC0oGJT91+6F3apR3tmMsLAwPvzwQyC7TMZPP/2U7/GhoaGEhobqbWvRooX2XFGO9u3b0759+/zHYWDA9evX9bZVrFgxVz+Q/czP/XJqO+bw8vJi+/btetvy6kcI8eLllAF68Cf0/jJA9/8+exKmpqbUr1+fVatWMWjQIIoUKQLol73J2Xb/Mfd7sFzPg38o0+l0ZGVlPfHY8ioD9G/6EUIIId5UMrP4CIFlXdgxpB4Lu1Vj2nsVWditGjuG1Mv1wSpn2Wpes5Dpl+O4E7MV+4zrGN04R4cOHQCeaJmpeLg6derQt2/fl6afxzVixAgqVqz40DbPe0x5edJ6ejkJRu7PbCtejP9aBuj+0jaJiYmcOHECHx8fbZuBgQHz5s2jUqVK1K1bl4sXLwKPX/bmSZUsWRITExOioqK0bffu3WPfvn164xJCCCHEfyczi4/B0EBHQEnHR7YZHuRD9/kH0UGuD2a39i7l+MZvaTTHlEqVKrF9+3YKFCjwzMYsHi4yMpK6dety48YNvSBo6dKlT7T893l4Gcf0KNWrVychIQFbW9sXPZQ33r8tA/Qkz1MbGhqyYMECgoODqVevHpGRkVoZoIeVvfk3LC0t6d69O4MGDcLBwYFixYoxfvx47t69q9WwFUIIIcTTIcHiU5TfstXiXmWYM2D7v17m9aZLT0/HxMTkuZzLwcHhuZznSbyMY3oUExMTnJ2dX/QwBE9WBsjU2Ig+ffpoS+TDw8Pp06cPzZo1Iz09nbfeeivf56mNjIxYuHAh7dq10wLGUaNGUbBgQcaMGcOZM2ews7PDz8+Pzz///D9d09ixY8nKyuL999/n9u3b+Pv7s27dOuzt7f9Tv0IIIYR4wItNxvpwr2p63IzMLLXz9DW1/NBfaufpayojM+tFD+mVUrt2bdWzZ0/Vp08f5ejoqOrUqaOOHDmiAgMDlaWlpSpUqJDq2LGjunr1qt4x95f6+Omnn1SlSpWUlZWVcnJyUsHBwery5ctKKaXOnj2ryJ781b5CQkLy7Of69evq/fffV3Z2dsrc3FwFBgaqkydPavtzSgesXbtWlS5dWllaWqpGjRrplRXZsmWLqly5srKwsFC2traqevXq6ty5c0oppYYPH64qVKigfvrpJ1W8eHFlY2Oj2rVrp27dupXvtRUvXlyNHDlSvffee8rCwkIVLlxYffPNN499fydNmqTKli2rLCwsVNGiRVX37t3V7du39dqEh4crV1dXZW5urlq0aKEmTpyolUg4ceKEAlRsbKzeMZMnT1YlSpTQrhlQN27c0Pbv2LFD1a5dW5mbmys7OzvVsGFDdf36daWUUpmZmWr06NHKzc1NmZmZqfLly2ulY8R/86RlgIQQQoiX1asaG7zK5JnFZyBn2WrzikUIKOmoZUwVj2/u3Lnac0ljx46lXr16+Pr6sn//ftauXcvly5dp27Ztvsffu3ePUaNGcfjwYZYvX865c+e0JEGurq789ttvQHbK/4SEBKZNm5ZnP6Ghoezfv58VK1awa9culFI0adKEe/fuaW3u3r3LxIkTmTdvHtu2bSM+Pp6BAwcCkJGRQYsWLahduzZ//vknu3bt4sMPP9TLPhkXF8fy5ctZtWoVq1atYuvWrYwdO/ah92fChAlUqFCBQ4cO8emnn9KnTx82bNjwWPfWwMCA6dOnc+zYMebOncvmzZsZPHiwtn/Pnj106dKFXr16ER0dTd26dfnqq6+0/V5eXvj7+7NgwQK9fhcsWJBvgqXo6GjefvttfHx82LVrFzt27CAoKIjMzEwAxowZw08//cR3333HsWPH6NevHx07dmTr1q2PdU0ifw97njrH/WWAhBBCCCE0LzpafRj568Gb4/7ZWN8q1ZWvr6+2b9SoUaphw4Z67S9cuKAAdeLECaVU7tm3B+3bt08B2gxaXjNfD/Zz8uRJBaioqCht/7Vr15S5ublavHixUip7Bg5Qp0+f1tp8++23ysnJSSmlVGJiogK0IuQPGj58uLKwsNCbSRw0aJCqWrVqnmNSKntmMTAwUK+fdu3aqcaNG+d7/Q/z66+/KkdHR+11cHCwatKkSa7+c2YWlVJqypQpqmTJktrrB2cbH7y/wcHBqkaNGnmePzU1VVlYWKidO3fqbe/SpYsKDg7+V9ck9K05clG5DVml3PKZWVxz5OKjOxFCCCFeMIkNnj+ZWRQv3IP132ISbvG3oTNrjyYAcPjwYbZs2aJXq6106dIAueq15Thw4ABBQUEUK1YMa2trrTB3fHz8Y48rNjb2serMWVhYULJkSe21i4uLVsvNwcGB0NBQGjVqRFBQENOmTSMhIUHvPG5ublhbW+d5fH5yipzf//rBGnP52bhxI2+//TZFihTB2tqa999/n8TERO7evatd9/3XnNf53nvvPc6dO8fu3buB7FlFPz8/7X15UM7MYl6epB6f+Hdynqd2ttUv8ePbfSqLI76T56mFEEIIkSdJcCNeqPzqv6UoY7rPP8jMjn4kJycTFBTEuHHjch2fU2j7fnfu3KFRo0Y0atSIBQsWULBgQeLj42nUqBHp6elP/Rryqgmn7qs5GR4eTu/evVm7di2//PILX375JRs2bKBatWr5Hv+sasGdO3eOZs2a0b17d77++mscHBzYsWMHXbp0IT09HQsLi8fqx9nZmXr16vHzzz9TrVo1fv75Z7p3755ve3Nz83z3PUk9PvHvBZZ1oYGPM3vPXufK7VQKWWcvPZVl8kIIIYTIjwSL4oV5VP03gLCVMdTx9WXZ0qW4ublhZPTob9njx4+TmJjI2LFjtXpu+/fv12uTk10155m5vHh7e2t15qpXrw7kXWfucfj6+uLr68tnn31GQECAFmT9Wzkzeve/9vb2fuRxBw4cICsri0mTJmFgkL2wYPHixXptvL299Wrr5XU+gA4dOjB48GCCg4M5c+YM7733Xr7nLV++PJs2bSIsLCzXvvvr8eXMAItn43HKAAkhhBBC5JBlqOKFeVT9NwUkJKUS0Kw9169fJzg4mH379hEXF8e6dev44IMP8gz2ihUrhomJCTNmzODMmTOsWLGCUaNG6bUpXrw4Op2OVatWcfXqVW12637315nbsWMHhw8fpmPHjnnWmcvP2bNn+eyzz9i1axfnz59n/fr1nDp16rECu4eJiopi/PjxnDx5km+//ZZff/2VPn36PPI4Dw8P7t27p92befPm8d133+m1yZkFnThxIqdOneKbb75h7dq1ufpq1aoVt2/fpnv37tStW5fChQvne97PPvuMffv20aNHD/7880+OHz/OzJkzqVGjBkOHDtXq8c2dO5e4uDgOHjzIjBkzmDt37pPfHCGEEEII8VRIsChemEfVf8uhLOyJiooiMzOThg0bUq5cOfr27YudnZ02O3a/ggULEhERwa+//oqPjw9jx45l4sSJem2KFClCWFgYn376KU5OTvTq1SvPc4eHh1OpUiWaNWtGQEAASql868zlxcLCguPHj9O6dWu8vLz48MMP6dmzJx999NFjHZ+fAQMGsH//fnx9ffnqq6+YPHkyjRo1euRxFSpUYPLkyYwbN46yZcuyYMECxowZo9emWrVqzJo1i2nTplGhQgXWr1/Pl19+masva2trgoKCOHz4MB06dHjoeb28vFi/fj2HDx+mSpUqBAQE8Pvvv2tZYUeNGsXQoUMZM2YM3t7eBAYGsnr1atzd3Z/grgghhBBCiKdJp+5/uOolc+vWLWxtbUlKSsLGxuZFD0c8ZbviEgmelXt544MWdqsmS+fu4+bmRt++fenbt++LHsp/VqdOHSpWrMjUqVNf9FCEEEII8ZKT2OD5k5nFV0xoaCg6nQ6dToeJiQkeHh6MHDmSjIwMIPsZvClTplCuXDnMzMywt7encePGREVF6fUTERGBnZ3dQ8/TokULAO18+X2NGDGCc+fOodPpiI6O1uvnt99+o06dOtja2mJlZUX58uUZOXIk169ff2T9Nx1S/+11cufOHTp16oSVlRUuLi5MmjRJb/+8efPw9/fH2toaZ2dn2rdvr2WFVUrh4eGRa4Y4OjoanU7H6dOniYiIyPf7UwghhBBCPDkJFl9BgYGBJCQkcOrUKQYMGMCIESOYMGECSinee+89Ro4cSZ8+fYiNjSUyMhJXV1fq1KnD8uXL/9X5EhIStK+pU6diY2Ojty2nAP2DvvjiC9q1a0flypVZs2YNR48eZdKkSRw+fJh58+ZhaKBjeFB2opgHA8ac18ODfCRb4xNYsGCBXvmJ+7/KlCnzQsc2aNAgtm7dyu+//8769euJjIzk4MGD2v579+4xatQoDh8+zPLlyzl37hyhoaFA9h8sOnfuTHh4uF6f4eHhvPXWW3h4eNCuXTu978uFCxdiZGREjRo1nudlCiGEEEK8NiQb6ivI1NQUZ2dnALp3786yZctYsWIFJUqUYMmSJaxYsYKgoCCt/Q8//EBiYiJdu3alQYMGWFpaPtH5cs4FYGtri06n09sGcO3aNb3Xe/fuZfTo0UydOlUv8YqbmxsNGjTg5s2bwD/138JWxuglu3G2NWN4kI/Uf8vDuXPn8t33zjvv5KqRmONxn7N8WjKzlFamwcogg9mzZzN//nyt3uLcuXMpWrSo1r5z587av0uUKMH06dOpXLkyycnJWFlZERoayrBhw9i7dy9VqlTh3r17/Pzzz9pso7m5uVaiIy4ujp49ezJ69GgaNGjwHK9aCCGEEOL1IcHia8Dc3JzExER+/vlnvLy89ALFHAMGDGDp0qVs2LBBW176LOXMcPXo0SPP/fcvgZX6b0+PtbU11tbWAERGRlK3bl1u3Ljx0CXHz8Laowl6fwBIv3KG9PR0UuzctDYODg4UKVKEadOmMWLECOLi4hgxYgSHDx/mxo0bWq3J+Ph4fHx8KFy4ME2bNmXOnDlUqVKFlStXkpaWxrvvvotOp2PZsmW0aNGCpKQkmjVrRtOmTRk0aNBzvW4hhBBCiNeJLEN9BWRmKXbFJfJ79N9cvZ1GTkoipRQbN25k3bp11KtXj5MnT+ZbkiFn+8mTJ5/LmE+dOkWJEiUeezYrp/5b84pFCCjp+NoFirt27cLQ0JCmTZs+lf7uf6YUshPFPOuEN2PGjMHQ0JAJEyY8tN3aowl0n38wz7IoXyw7ytqjCbm237lzh0aNGmFjY8OCBQvYt28fy5YtAyA9PV1r17VrVxYtWkRKSgrh4eG0a9cOCwsLbX9mZibt2rXDxsaGH3744d9eqhBCCCGEQGYWX3oPztBcO3mVOzFbMLewJCszg6ysLNq3b8+IESNYtWoVL0ty25dlHC+L2bNn88knnzB79mwuXrz40JqEL6s5c+YwePBg5syZk++MXWaWImxlDA+++0Z2LmBgRNrFk4StLEYDH2duJd3kr7/+ArL/uJCYmMjYsWNxdXUFYP/+/bn6b9KkCZaWlsycOZO1a9eybds2vf39+vXjyJEj7N+/HzMzs/9+0UIIIYQQbzCZWXyJ5TdDY+Zaniy7otRp2oqUlBTmzp2LpaUlXl5exMbG5tlXznYvLy8g+xnDpKQkLXtpZGQkOp1Oe5YwLxEREfkuK32Ql5cXZ86c4d69e4/V/nWWnJzML7/8Qvfu3WnatCkRERHavpz7vm7dOnx9fTE3N6devXpcuXKFNWvW4O3tjY2NDe3bt+fu3bt59h8aGsrWrVuZNm2algH0/ucaDxw4gL+/PxYWFlSvXp0TJ07oHf/777/j5+eHmZkZJUqUICwsTMuum2Pr1q2kpKQwcuRIbt26xc6dO/X2Hz58mLp162JjY82eEUEkRPQhLeEUABlJV7i2YjwAV38fy76x7Rny1UQtsy9AYmIiOp2OEiVK4Ofnx8yZMxk1apTW/8yZMylZsiTm5uakpqYyZMgQPD09CQgI0Nps2rSJ//3vf3z33XdcvHiRd955BxsbG+zt7WnevPlDn/UUQgghhBC5SbD4kspvhgZAZ2KGzsiEPy+nozMw1La/9957nDp1ipUrV+Y6ZtKkSTg6OmrJPhwcHLC2tqZs2bLPZPzt27cnOTmZ//3vf3nuf1hQ+rpZvHgxpUuXplSpUnTs2JE5c+bkmnkdMWIE33zzDTt37uTChQu0bduWqVOn8vPPP7N69WrWr1/PjBkz8ux/2rRpBAQE0K1bNy0TaM7sHGRnpZ00aRL79+/HyMhIL5HM9u3b6dSpE3369CEmJobvv/+eiIgIvv76a71zzJ49m+DgYIyNjQkODmb27Nl6+zt06EDRokUZP281LiFTsan2LjrD7O/N6xtmojIzcOowDnPPamTeucH3k0ZTs2ZN7Y8XEyZM4Msvv8TJyYno6GiGDBmiJa7ZvHkzffr0YcCAARw9epSPP/6YjIwMatWqpTeGY8eOkZmZyTvvvIO/vz8rV67k9u3btG/fHisrKwIDA/WWtAohhBBCiIeTYPEltffs9Tyf+brfnbQM9p69rr1+7733aNmyJSEhIcyePZtz587x559/8tFHH7FixQp+/PFHLROqgYEBSimOHj1KdHQ0p0+fBsg16/RvVa1alcGDBzNgwAAGDx7Mrl27OH/+PJs2beLdd99l7ty5T+U8r4LZs2fTsWNHILvsSVJSElu3btVr89VXX1GjRg18fX3p0qULW7duZebMmfj6+lKrVi3atGnDli1b8uzf1tYWExMTLCwscHZ2xtnZGUPDf/6I8PXXX1O7dm18fHz49NNP2blzJ6mp2d9bYWFhfPrpp4SEhFCiRAkaNGjAqFGj+P7777Xjb926xZIlS7Rr6NixI4sXLyY5OVlrEx8fT/369fErXxZjhyJYlq6JSaESAGTcuoppEW/MCpeiUKsvKT5wGesPnmbQoEFMnTpVG+PIkSP566+/WLVqFbdv36ZBgwYopfjtt98IDQ2lR48eeHl5ERgYiE6nIy4uTu8+9O7dG6UU8+bNo1SpUmRlZaGU4ttvvyU8PJz4+HgiIyP/xTsohBBCCPFmkmDxJXXl9sMDRQBUJl99MQBbW1sKFCjAsGHD+OWXX/j888/p2rUrnp6e1KpVi/PnzxMZGUloaKi2BPLatWskJyfj6+uLr68v3bp1A7IDzhzx8fEUK1YMCwsLWrZsSWJiot7p4+LiaN68OU5OTloNv927d2v7x40bh4ODA4sXL+att97Czc2NwMBAUlJSCAkJ0drdvHmTjz76CCcnJ8zMzChbtiyrVq36t7fuhbs/IdGiDbvZu3cvwcHBABgZGdGuXbtcM3Ply5fX/u3k5ISFhQUlSpTQ25ZToP5J3d+3i0t2KZKcvg4fPszIkSP16jHmzFDmLHtduHAhJUuWpEKFCgBUrFiR4sWL88svv2j99u/fn65du/LlR+3Iil5Gxo1/kthYVwoiadcvXJo/iJvbF2Bz9yJV3B0ee4yxsbHUqFGDtLQ0/vrrL0aMGIGfnx9nzpzJ83oPHz7M6dOnsba21q7JwcGB1NTUXAGmEEIIIYTInwSLL6lC1nkn5yjQtB+FWn0JQPLRzViZm7F3716mTZvG5MmTiYiIYODAgQD8+uuvJCUlsXbt2lyFydu0aQPAoUOHUEpps1aHDh0Csus3Hj58mF69ehEdHU3dunX56quvMDEx0ZaQJicn06RJEzZt2kR0dDRffvkl/fr1Iz4+XjuPhYUF169fZ9SoUZw4cYLJkyezdu1a9u3bB0BWVhaNGzcmKiqK+fPnExMTw9ixY/Vmxl4la48mUHPcZoJn7abPomg++nICGRkZuBQujJGREUZGRsycOZPffvuNpKQk7bj7s8bqdLpcWWR1Op1WSuJJPdg3oPWVnJxMWFgY0dHR2teRI0c4deqUliBm9uzZHDt2TBu/kZERMTExzJkzR+t3xIgRHDt2jGZNm1Lg1in+nt2duyezn2u0rtCIIh/9iGWZuty7do7YmT3537ffPPYYcyxcuJDixYtz8+bNPMvD5EhOTqZSpUp61xQdHc3Jkydp3779E98/IYQQQog3lWRDfUlVcXfAxdaMS0mpeT63CGBqW5AFP/4PI0MDSpUqxZEjR5gyZYo2S/hfTJs2jcDAQAYPHgxkJ6zZuXMna9eu1dpUqFBBm20CGDVqFMuWLWPFihX06tVL216jRg0+/fRTrZ+oqCimTJlCgwYN2LhxI3v37iU2NlZ7fu3+GbVXSU5Copz3S2VlknxsM/Z1u2Du7svQZj7U9CwIQIsWLVi4cCGlS5d+Kuc2MTEhMzPziY/z8/PjxIkTeHh45Lk/J7NoZGQkDg7/zAZev36dOnXqcPz4ce0avLy88PLyol+/ftRp0oI/Y7eAV3UAjGwK4lWnFcODvmTrgunMmjWLTz755LHG6O3tTVRUFD/88AOhoaEAtG3bFh8fn3yv6ZdffqFQoULY2Ng87q0QQgghhBAPkJnFl5ShgY7hQdkfhvOrOGhetDQbYy9rrwMCAjh16tS/ChoeFBsbS9WqVfW23Z95ErJncAYOHIi3tzd2dnZYWVkRGxurN7OY13EBAQFadtbo6GiKFi2qBYqvqrwSEqWc3ktWajJWFRpiUtCNucez8PYpQ9myZWndunWupaj/hZubG3v27OHcuXNcu3btsWchhw0bxk8//URYWBjHjh0jNjaWRYsW8eWX2bPXs2fPpkqVKrz11luULVtW+3rrrbeoXLkys2fPJiUlhV69ehEZGcn58+eJiorir5NH6RJUk4XdqlHuwjL6lLrLvLbuFEpPYMuWLfnWA83LoEGDiIiIYObMmZw6dYrJkyezdOlSbQb9QR06dKBAgQI0b96c7du3c/bsWSIjI+ndu7dWqkMIIYQQQjyaBIsvscCyLszs6IetRd6F7e9lZNF9/sE8i5zrdLpcGTefdhmLgQMHsmzZMkaPHs327duJjo6mXLlyT5Rx0tzc/KmO6UXJKyFR8p/rMS9eEQNTSxSQkJSqJSRq3bo1+/fv588//3wq5x84cCCGhob4+PhQsGDBXAF7fho1asSqVatYv349lStXplq1akyZMoXixYuTnp7O/Pnzad26dZ7Htm7dmp9++gkDAwMSExPp1KkTXl5etG3blsaNGzNq5EgCSjri5mDOt19/RtkyPgQGBuLl5ZVvlty8tGjRgmnTpjFx4kTKlCnD999/T3h4OHXq1MmzvYWFBdu2baNYsWK0atUKb29vunTpQmpqqsw0CiGEEEI8AZ16iaun37p1C1tbW5KSkt7YD3mZWYoaYzdz6ZZ+IHLp50/JunuLIl3/h7OtGTuG1OPLLz7n999/JyYmBicnJ4YPH67VRTx16hReXl6Eh4cTGhrKuXPncHd359ChQ1SsWJHIyEjq1q3LjRs3sLOzo3379iQlJbF69WrtnMHBwaxZs0Z7ZrFcuXK0bduWoUOHAtkzjUWLFiU0NFTLcunm5oaPjw9//PGHXj9JSUn88ccfbN26lXr16uktQ30V/R79N30WRT+y3bT3KtK8YpFnPyAhhBBCiNeMxAbPnzyz+JLbe/Z6rkAxR8btqyRumkV6xcZ8NT2OGTNmMGnSJADq1avHN998Q0BAAJmZmQwZMiRX0pSH6d27NzVq1GDixIk0b96cdevW6T2vCODp6cnSpUsJCgpCp9MxdOjQPJc/RkVFMX78eFq0aMGGDRv49ddftSC0du3avPXWW7Ru3ZrJkyfj4eHB8ePH0el0BAYGPvZ4X7T8EhL923ZCCCGEEEK8aLIM9SX3sBIalmXqoTLSSfipPxOHD6ZPnz58+OGHAEyaNAlXV1dq1apF+/btGThwIBYWFo993mrVqjFr1iymTZtGhQoVWL9+vfYcW47Jkydjb29P9erVCQoKolGjRvj5+eXqa8CAAezfvx9fX1+++uorJk+eTKNGjbT9v/32G5UrVyY4OBgfHx8GDx78VJ67fJ5yEhLl93ypDnCxNctVMkIIIYQQQoiXlSxDfcntikskeNbuR7Zb2K0aASUdn8OInoybmxt9+/alb9++L3ooz1xONlRAL9FNTgA5s6MfgWVdnvu4hBBCCCFeBxIbPH8ys/iSkxmrV0dOQiJnW/2lps62ZhIoCiGEEEKIV448s/iSyymh0X3+QXTkPWM1PMgHQ4P8wknxPAWWdaGBjzN7z17nyu1UCllnB/Ly/gghhBBCiFeNLEN9Raw9mkDYyhi98gwutmYMD/KRGSshhBBCCPHak9jg+ZOZxVeEzFgJIYQQQgghnid5ZvEVYmigI6CkI80rFiGgpKMEikK8QLdv36ZDhw5YWlri4uLClClTqFOnjpbMSafTsXz5cr1j7OzsiIiIAODcuXPodDqWLl1K3bp1sbCwoEKFCuzatUtrf/78eYKCgrC3t8fS0pIyZcro1SwVQgghhHiWJFgUQvwnOUFPdHT0ix7Kc9W/f3+ioqJYsWIFGzZsYPv27Rw8ePCJ+/niiy8YOHAg0dHReHl5ERwcTEZGBgA9e/YkLS2Nbdu2ceTIEcaNG4eVldXTvhQhhBBCiDzJMlQhXkF16tShYsWKTJ069UUP5Y2SmaXYe/Y65y9dI2LuXBbMX8Dbb78NQHh4OIULF37iPgcOHEjTpk0BCAsLo0yZMpw+fZrSpUsTHx9P69atKVeuHAAlSpR4ehcjhBBCCPEIEiwK8RpSSpGZmYmRkfyIPy33J5lKv3KGjHv3GHvgHjY+CQSWdcHW1pZSpUo9cb/ly5fX/u3ikp2s6sqVK5QuXZrevXvTvXt31q9fT/369WndurVeeyGEEEKIZ0mWoQrxigkNDWXr1q1MmzYNnU6HTqcjIiICnU7HmjVrqFSpEqampuzYsYO4uDiaN2+Ok5MTVlZWVK5cmY0bN2p9ff7551StWjXXOSpUqMDIkSO11z/++CPe3t6YmZlRunRp/ve//z2Xa31ZrD2aQPf5B/WyEQNcvZ1G9/kHWXs0IdcxOp2OB5NN37t3L1c7Y2NjvWMAsrKyAOjatStnzpzh/fff58iRI/j7+zNjxoz/fD1CCCGEEI9DgkUhXjHTpk0jICCAbt26kZCQQEJCAq6urgB8+umnjB07ltjYWMqXL09ycjJNmjRh06ZNHDp0iMDAQIKCgoiPjwegQ4cO7N27l7i4OK3/Y8eO8eeff9K+fXsAFixYwLBhw/j666+JjY1l9OjRDB06lLlz5z7/i38BMrMUYStj9GqcGtk6g4ERqQmnAAhbGcP1Gzc5efKk1qZgwYIkJPwTRJ46dYq7d+8+8fldXV35+OOPWbp0KQMGDGDWrFn/+lqEEEIIIZ6ErFET4hVja2uLiYkJFhYWODs7A3D8+HEARo4cSYMGDbS2Dg4OVKhQQXs9atQoli1bxooVK+jVqxdlypShQoUK/PzzzwwdOhTIDg6rVq2Kh4cHAMOHD2fSpEm0atUKAHd3d2JiYvj+++8JCQl5Ltf8Iu09ez3XjKKBqQVWZetxc8scDM2sOW9hS+vgaRgYGGizg/Xq1eObb74hICCAzMxMhgwZojeL+Dj69u1L48aN8fLy4saNG2zZsgVvb++ndm1CCCGEEA8jM4tCvAIysxS74hL5PfpvdsUlovJp5+/vr/c6OTmZgQMH4u3tjZ2dHVZWVsTGxmozi5A9u/jzzz8D2c86Lly4kA4dOgBw584d4uLi6NKlC1ZWVtrXV199pTcb+Tq7cjs1z+329bpiUqQ0V34L48ovX1KirJ+2VBdg0qRJuLq6UqtWLdq3b8/AgQOxsLB4onNnZmbSs2dPvL29CQwMxMvL641bAiyEEEKIF0dmFoV4yd2fWCXH9fgb2LveydXW0tJS7/XAgQPZsGEDEydOxMPDA3Nzc9q0aUN6errWJjg4mCFDhnDw4EFSUlK4cOEC7dq1A7KDTYBZs2blerbR0NDwqV3jy6yQtVme2w1MLSgYNEh73aFDOVr9OJUPP/wQgMKFC7Nu3Tq9Y27evKn9283NLdczjXZ2dnrb5PlEIYQQQrxIMrMoXqjIyEh0Op3eh+hXTV7F15+W+xOr/DWzM7f2/Q7APWXI5phLeSZWuV9UVBShoaG0bNmScuXK4ezszLlz5/TaFC1alNq1a7NgwQIWLFhAgwYNKFSoEABOTk4ULlyYM2fO4OHhoffl7u7+TK75ZVPF3QEXWzN0D2xPvxzHnZitZNxIwCb5AtO+7A1A8+bNn/8gxWvhTa1ZKoQQ4uUlwaJ4rTyt4PNJAsCEhAQaN24MwIgRI6hYseJTOfeDiVVcQqZgVbERAEa2hUhLOMFnP23m8pWrWvbMB3l6erJ06VL8/f3p0KED7du3z7Nthw4dWLRoEb/++qu2BDVHWFgYY8aMYfr06Zw8eZIjR47QvHlzihQpku/YXyeGBjqGB/kA5AoYb+1dysXwTzg77zPu3r3D9u3bKVCgwPMfpBBCCCHEMyDBongjxcfH88knn1CiRAlMTU1xdXUlKCiITZs2PXFfzs7OmJqaAtnLPh+3j/wCy5zg88HEKoYWthgYZy+JtKnSCnQGHJ7SBWenQnrPIN5v8uTJ2NvbExMTw7Zt22jUqBF+fn7MmTOHqVOnau3atGlDYmIid+/epUWLFnp9dO3alR9//JHw8HDKlStH7dq1OXz4MCYmJo91na+DwLIuzOzoh7PtP0tSTZxK4tfne/44eIbbSTfYsGED5cqVe4GjFEIIIYR4uiRYFM9cVlYWY8aMwd3dHXNzcypUqMCSJUvybb9jxw5q1aqFubk5rq6u9O7dmzt3/nk+Ly0tjSFDhuDq6oqpqSkeHh7Mnj2bc+fOUbduXQDs7e3R6XSEhoYCUKdOHXr16kXfvn1xcHDA09OTzZs3ExoaSqlSpbh8+TJbt26lbdu2ZGRkaOeqU6cOvXv3ZvDgwTg4OODs7MyIESP0xqvT6Vi2bBkZGRlYWVmRkpJCcHAwDg4OWFpa4u/vz549ex7rXq1cuZKgoCBsbW1p5O/JlaVfafvuX4Zq7FCEAk36YuJUEmMTU8aPH8+GDRuwt7fXm9mbOXMmf/31V/YxxsZcunSJDRs24ODgAPwTsC5ZsgQXFxdSUlKwsrLi5s2bdO3alYIFC2JjY8OPP/5IREQEaWlpTJ48mfPnz2tL5tzd3QkPD9eCzJYtW6LT6XBzc9PG8fvvv+Pn54eZmRklSpQgLCxM7z6/CgLLurBjSD0WdqvGtPcqsrBbNXYMqUdgWZcXPTTxL2RlZTF+/Hg8PDwwNTWlWLFifP311wAMGTIELy8vLCwsKFGiBEOHDtWrkZnzczNv3jzc3NywtbXlvffe4/bt21qbtWvXUrNmTezs7HB0dKRZs2a5kkLt3bsXX19fzMzM8Pf359ChQ3r7MzMz6dKli/a7s1SpUkybNu0Z3hUhhBDiAeollpSUpACVlJT0ooci/oOvvvpKlS5dWq1du1bFxcWp8PBwZWpqqiIjI9WWLVsUoG7cuKGUUur06dPK0tJSTZkyRZ08eVJFRUUpX19fFRoaqvXXtm1b5erqqpYuXari4uLUxo0b1aJFi1RGRob67bffFKBOnDihEhIS1M2bN5VSStWuXVtZWVmpQYMGqVq1aqlChQqpkydPKgsLC9WjRw8VGxurli1bphwdHdXw4cMVoGbNmqUcHR0VoBwcHNTMmTPV3LlzlU6nUxMmTFCA+uOPPxSgDA0N1ZYtW9Rnn32mTExMVK1atdT27dvV/PnzVcmSJZWpqamytbVV1atXV+fOnVPh4eEK0Pvq27evMjQ0VICaPn26mrsyUtm91UnZVG2tjOwLK0AZmFkp24B2yrX/b8rIoagyc/NVzd8LUSVLllQlSpRQgDI3N1ft2rVTt27dUqNGjVJRUVGqatWqKigoSDk5OSl3d/dc527QoIGysLBQEydOVEopVb9+fRUUFKQmTJigzMzM1CeffKIcHR1VYmKiunv3rhowYIAqU6aMSkhIUAkJCeru3bvqypUrClDh4eEqISFBXblyRSml1LZt25SNjY2KiIhQcXFxav369crNzU2NGDHi+X4jCnGfwYMHK3t7exUREaFOnz6ttm/frmbNmqWUUtrPzdmzZ9WKFSuUk5OTGjdunHbs8OHDlZWVlWrVqpU6cuSI2rZtm3J2dlaff/651mbJkiXqt99+U6dOnVKHDh1SQUFBqly5ciozM1MppdTt27dVwYIFVfv27dXRo0fVypUrtZ/hQ4cOKaWUSk9PV8OGDVP79u1TZ86cUfPnz1cWFhbql19+eX43SgghXiISGzx/EiyKpy4jM0vtPH1NLT/0l4o89reysLBQO3fu1GvTpUsXFRwcnCtY7NKli/rwww/12m7fvl0ZGBiolJQUdeLECQWoDRs25HnuB/vLUbt2beXr66sSExOVTqdTo0ePVp9//rkqVaqUysrK0tp9++23ysrKSgGqaNGiytvbW/n7+6vevXsrKysrlZiYqCpXrqyCg4MVoMqXL68ANXPmTJWYmKiaNWumDAwMVGJiorp3756ytbVVAwcOVKdPn1YxMTEqIiJCnT9/Ps+Aq2rVqqpDhw4KUMuWLVMZmVmq2uiNyq5WR+XUYYIytHJUluUbKgNLO2VZrr7CwFD5fbZYDR02TFlZWamaNWsqQH399de5PrjWrl1b9enTR02YMEFVqFBBFS1aVI0cOVINGDBAGRsbqytXrqhu3bqpJk2aqO3btysbGxuVmpqq3nnnHdWpUyellFIlS5ZU33//vVIq+8NyhQoVct3/nLHf7+2331ajR4/W2zZv3jzl4uKS53soxLOS87tp4Y7jysTEVH3//Q+PddyECRNUpUqVtNfDhw9XFhYW6tatW9q2QYMGqapVq+bbx9WrVxWgjhw5opRS6vvvv1eOjo4qJSVFazNz5ky9YDEvPXv2VK1bt36scQshxOtGYoPnT0pniKfqwTIP6VfPc/fuXeq9XR9Dg3/Sg6Snp+Pr65vr+MOHD/Pnn3+yYMECbZtSiqysLM6ePcuRI0cwNDSkdu3aTzy2SpUqcfr0aZRSlC5dmnnz5hEQEKAVUQeoUaOGVi4iNDSU7du3U6ZMGUaPHs306dPZu3cvLi4u3LhxA4CRI0fSokULnJ2dcXBw4NKlS5ibm+Pg4MD169dJSkqiWbNmlCxZEkCvoLqVlRVGRkY4OzsD8Oeff/LRRx9p156TWKV70nvZBxgYYlKgOMYORbl9YAVG1gX4un1Ndi85RlZWFosWLaJo0aL4+Pjw/vvvs2nTJsqXL8/06dPZt28fO3fuxMDAABsbGywsLLC2tiYzM5PixYtTsGBBunbtSvXq1bV74ODgwN27dzEzM+O3334jJSXlX9VWPHz4MFFRUdoSP8heXpeamsrdu3efuPagEP/G/b+b0i6eID09jZmnzCl2NCHXUuJffvmF6dOnExcXR3JyMhkZGdjY2Oi1cXNzw9raWnvt4uLClStXtNenTp1i2LBh7Nmzh2vXrmmJpeLj4ylbtiyxsbGUL19eq8sJEBAQkGvc3377LXPmzCE+Pp6UlBTS09P/cxItIYQQ4nFJsCiempwyD/dXjlP3soNGuxZD+brjW7zlVUjbZ2pqmiv4SE5O5qOPPqJ37965+i9WrBinT59+4nFlZilupdzj0l3FnxduPvZx5cuXZ/v27RgbG2NpaYmNjQ1XrlxBp9NptfD8/f31jjEy+udHysHBgdDQUBo1akSDBg2oX78+bdu2xcUl72fczM3Nc20LLOvCewUu8M2Mb8i8fZUbkeGg02FgaETBggUJLOvC7iV5f3CNj4+nQ4cOhIWFkZqaStmyZfHw8GDSpEl6AVpObcYqVapQpkwZtm/fjouLC506dWLBggVs3LhRC6jt7Owe+/7lSE5OJiwsjFatWuXad/8HZSGelQd/N+mMsxNSXb2dRvf5B5nZ0U8LGHft2qX93DRq1AhbW1sWLVrEpEmT9Po0NjbWe63T6fQyDQcFBVG8eHFmzZpF4cKFycrKomzZsno1Th9l0aJFDBw4kEmTJhEQEIC1tTUTJkx47GeghRBCiP9KEtyIp+LBMg85jB1dwdCYjFtXmXU4BfcSJbU6fa6urrn68fPzIyYmJldNPw8PD0xMTChXrhxZWVls3bo1z3HkZOjMzMwEsj8k1hy3mZiEW2w9cZWwbddBp2PF1n14e3uza9cuvSLoUVFRWtD1qA+D8E+glcPJyYmUlBSuX78OQHh4OLt27aJ69er88ssveHl5sXv37jzHXr58+VyZVHft2sWET3sxuNt7OBYoRJvQj+n2yQDMjHRcu3yRy5cva2Pdt2+f3lhTU1MpXrw4X3zxBdbW1tjb23P+/Pk8z52ja9euHD16lEuXLvHbb7/x4Ycf4unpqb0HOWUhTExMtHt8P2Nj41zb/fz8OHHiRJ7vqYGB/AoSz1Zev5uM7QujMzIl5fxhAMJWxpCZld1i586d2s+Nv78/np6ej/y5eVBiYiInTpzgyy+/5O2338bb21tbjZDD29ubP//8k9TUfzIeP/i7ISoqiurVq9OjRw98fX3x8PD4V7P74uX1qCRqDyYbq1evHocPZ3/fJiUlYWhoyP79+4HspE0ODg5Uq1ZNO37+/Pna/2vT09Pp1asXLi4umJmZUbx4ccaMGfP8LlYI8UqST2riqXiwzEMOA1MLbKq04vrmHzm1YzVLIw9w8OBBZsyYwdy5c3O1HzJkCDt37qRXr15ER0dz6tQpfv/9d3r16gVkz6CFhITQuXNnli9fztmzZ4mMjGTx4sUAFC9eHJ1Ox6pVq1i07SgfzYnSLz9hbo25mx/zZv9AiYAmXLhwgU8++YTjx4/z+++/M2zYMPr37/+v70O5cuUwNjamRYsWREVFcebMGc6cOUOdOnXYuXMnZcuW5eeffwZyB1zDhw9n4cKFAFy4cIEjR47w1VdfUbx4cYZ++SVWFmbUKFeSrNtXMTIyomTJkoSEhHD58mXu3LnDl19+CaDNAhoZGREfH8+iRYtISUkhOjqaZcuW5XnuHB07duTq1asUL16ckydP4ubmxrlz59i5cydffPGF9qHEzc2Ns2fPEh0dzbVr10hLS9O2b9q0iUuXLmkfjocNG8ZPP/1EWFgYx44dIzY2lkWLFmnjFeJZyut3k87IBJuqrbkZGc7to5uIP3eWOUvXM3v2bDw9PbWfm7i4OKZPn6793Dwue3t7HB0d+eGHHzh9+jSbN2/O9Xulffv26HQ6unXrRkxMDH/88QcTJ07Ua+Pp6cn+/ftZt24dJ0+eZOjQoXp/FBKvh7lz52JpacmePXsYP348I0eOZMOGDQC8++67XLlyhTVr1nDgwAH8/Px4++23uX79Ora2tlSsWJHIyEgAjhw5gk6n49ChQ9rjFFu3btUe25g+fTorVqxg8eLFnDhxggULFuhlrRZCiLxIsCieiiu3cweKOexqdcS2ejuSdv9K+0bVCQwMZPXq1bi7u+dqW758ebZu3crJkyepVasWvr6+DBs2jMKFC2ttZs6cSZs2bejRowelS5emW7duWmmNIkWKEBYWxqeffkpwnfIkbvgu1znsG3YHlcUnXTowePBgtm3bRvny5enUqRNKqf8UxBgaGlKiRAkKFSpEYGAgpUuX5ssvv+TKlSusX7+eU6dOac8tPhhwBQQE8OuvvwLQv39/6tWrx40bN7QPrhkZGWzbto1ly5ah0+lYvnw5ycnJzJo1iwsXLvDFF18A/yzttLCwoF+/fvTq1YsDBw6QkJDA0KFDtXNv27aNW7du6ZWwsLe3p3Xr1ly4cIFixYoxePBgvLy8eO+99zh//jxOTk4AtG7dmsDAQOrWrUvBggW1IHfSpEls2LABV1dX7ZnURo0asWrVKtavX0/lypWpVq0aU6ZMoXjx4v/6PgvxuPL73WRb4z1sKrfk5vYFXPyxO1980oUrV67wzjvvaD83FStWZOfOndrPzeMyMDBg0aJFHDhwgLJly9KvXz8mTJig18bKyoqVK1dy5MgRfH19+eKLLxg3bpxem48++ohWrVrRrl07qlatSmJiIj169HiyGyBeOplZil1xifwe/Te3Uu5Rrnx5hg8fjqenJ506dcLf359NmzaxY8cO9u7dy6+//qrNck+cOBE7Ozut/FSdOnW0YDEyMpIGDRrg7e3Njh07ANiwYQMLFiwgOjqa+Ph4PD09qVmzJsWLF6dmzZoEBwcTGRmJTqfj5s2bL+iOCCFeZjp1/xq8l8ytW7ewtbUlKSkpV3IB8XLZFZdI8Ky8l1feb2G3agSUdHzh48lIvs6tXb9geflPrl+9TMGCBalUqRL9+vWjTp06Wu3E+wvU29nZMXXqVEJDQ4mMjKRu3brcuHFD7zm+ESNGsHz5cqKjo7l8+TIff/wxe/bsITExERcXF0JCQhg+fDgGBgakpaXRoUMHNm3axM2bNwkPDyc0NDTXuQcPHsycOXNIS0ujadOmVKtWjREjRmj/Y88557fffkvNmjU5ffo0K1euZOrUqZw7dw7I/kBRsWJFpk6dCmQvd/voo484ceIEaWlpektxN2/ezNtvv83ixYt59913n8bbIcQL87L9bhJvtgeTwF36+VPsCpcg4sfvtOdmmzdvjqOjI5UqVaJ37965nmdPSUlh4MCBjBs3jhUrVtCpUycSExN59913adiwIcePH8fMzIzevXtTpEgRAA4dOkRWVhYNGjTA0dGRwMBAmjVrRsOGDfP9/5kQLyOJDZ4/CRbFU5GZpag5bjP/x96dx/WU/X8Af33a952KaZEWpc0uGUKUaGTLEsqSsUQZ+1gqayJkN6HIEsYeslbIUihKKZoSM5ElKZSW8/ujX/fb9anEVBjv5+PxeXznc++55557S9/7vuec93maVyg0bxEABAA0FKVwZXZ3XlbU+nIs4W94hiV8slzgUEv0s2xa7+2pD0eOHIGcnBwMDAzw8OFDeHp6QllZmXuj/KVCQ0Mxbdo0/PPPP9wcUEK+V9/a3yby46oqCdzTvXMg0VgPqrbjuURLTk5OUFJSgrGxMdavX8/1HFampKQENTU1vH79Gqqqqrh+/Tp69+6NK1eu4P79+/Dz88PkyZMxY8YM5OTkID4+HpaWlnjz5g1Onz6N8+fP4+DBg7C1tYWHhwcFi+S7QbFBw6NhqKROVCzzAJQ/fFVW8d3b0aTBHsYay9cuy2Zty9WFli1bQk5OrspP5aVCais/Px+TJ09GixYt4Obmhnbt2uHYsWNf3L53794hPT0dfn5++PXXXylQJP8J39rfJvJjqirR0ruHsSh6ksyN7Ji19TgEAgHu3bsHoDw52D///IMZM2ZAX18fd+7cQb9+/dCyZUu0bdsWAQEBUFJSgrm5OTZs2IDc3FwcPHgQ+/btw40bN+Dt7Y0OHTrw2qGgoAB5eXlER0fj/fv3OHToEJKSknhlQkJCoKSkhDNnzsDY2BhycnKwt7dHdnY2r9y2bdtgbGwMKSkptGjRAps2beL2DRo0iMs1AABeXl4QCAS4f/8+gPJkO7Kysjh//jwA4M8//4SZmRmkpaWhqqoKW1tbbnoJIeTromCR1Bl7U01sHtEaGor8AExDUYqXmr4htG+mAk1FKaGHwwoCAJqKUmjfTKXB2nTq1CkkJCRU+fnll18+u75Ro0YhLS0NhYWFePLkCUJCQqCq+uXD6Pz9/dGiRQtoaGhg7ty5X1wPId+ab+lvE/kxVZVoSUqrJcDKUPYuDwzAk5RbUFJWxcuXLwEAtra2kJSUxK1bt7Bx40Y4OzujU6dOGD16NFxdXbFgwQKEhITAxsYGe/bsgZSUFFatWoX27dujRYsWePToES9YXL16NTZs2ID+/fujc+fOcHR0hKKiIpYsWSLU3nfv3mHVqlUIDQ3FpUuXkJWVhRkzZnD79+zZg4ULF2Lp0qVISUnBsmXLsGDBAi5xXdeuXXk9otHR0VBTU+O2xcXFobi4GJ06dUJ2djaGDRuGMWPGICUlBVFRURgwYAC+4YFvhPxQaJ1FUqfsTTXR00QDsRmvkJNfiMby5QFZQ7+15xa0330bAoD3Nvdr9SZ86wldfHx8eCnbCfkv+Vb+NpEfU1WJlkQkZSEQl0Jp/gsAQGFWIga4jMO+TStRXFyMf/75B4WFhejWrRtmzpwJxhjOnDmDLl26YN68eXj//j1WrlyJpUuXYu3atZCSkkKXLl0wffp0PH78GPfv3+ctoyEvL4/58+ejuLgYhw4dQrt27RAZGYn9+/cLJVcqLi7Gli1b0Lx5cwCAh4cHFi1axO339vZGQEAAt35us2bNkJycjK1bt8LV1RU2Njbw9PTE8+fl2buTk5OxYMECREVFYcKECYiKikK7du0gIyOD+/fvo6SkBAMGDOD+f9LMzKxufwCEkC9GwSKpc6Iigm8iUURFb0LlZAJAeW+Ct6MJ9SYQ8oP5Vv42kR9PdVMe5CzsUJL7DxhjKHqSjD6Oa5B0OQIjR45EdHQ0mjRpgl27diEpKQn9+vWDt7c3d6y1tTXWrl0LR0dHMMagq6uLtm3bAgDWrl3LS3IGAO7u7jh16hSUlZWxY8cObntWVpZQu2RkZLhAEQA0NTWRk5MDAHj79i3S09MxduxYuLu7c2VKSkqgqKgIADA1NYWKigqio6MhISGBVq1aoW/fvti4cSOA8p5GGxsbAICFhQV69OgBMzMz2NnZoVevXhg0aBCUlZU/5xYTQupJgwWLfn5+mDt3Ljw9PbmMjITUN+pNIIQQ8rW1b6YCJRlxvH5XzNsupW2OF4nnUZyTARFRMTjbdsCN/18OIzc3l1sjsbZkZWXrpL3i4uK87wKBgBsWWrGGY1BQkNCcSFFRUa58ly5dEBUVBUlJSdjY2MDc3BxFRUVISkrC1atXuWGtoqKiOHfuHK5evYqzZ89i/fr1mDdvHm7cuFHlEluEkIbVIHMW4+LisHXrVpibmzfE6QjhqehN6GfZFFbNVSlQJIQQ8k2Q1GoJ9uE93tw8Cjnd8qGXFWsnRkVFcb1vxsbGiImJ4R0bExMDQ0NDLkCrDWNjY8TGxvK2Xb/+6aVlKlNXV0eTJk3w119/QV9fn/epHNxVzFusuA4RERF06dIFK1euRFFREaytrbmyAoEA1tbW8PX1RXx8PCQkJHDkyJHPahchpH7Ue7BYUFAAFxcXBAUF0ZACQshXlZmZCYFAgISEhHo9j5ubG2+NTkLIjy0245VQryIAiErJQbyRLt7ei4JIk5aIzXiFLl264Pbt20hLS+N6FqdPn44LFy5g8eLFSEtLw86dO7FhwwZe0pnamDBhAh48eICZM2ciNTUVe/fuRUhIyGdfj6+vL5YvX45169YhLS0NiYmJCA4OxurVq7kyNjY2SE5Oxr1799C5c2du2549e9C2bVuuF/TGjRtYtmwZbt68iaysLBw+fBjPnz+HsbHxZ7eLEFL36j1YnDx5Mvr06QNbW9tPli0qKsKbN294H0IIqS03NzcIBALuU7H49N27dwEAWlpayM7OhqmpKQAgKioKAoEAr1+/rtN2BAYGftEDGCHkv6mqBDcVpLRMAVYGKW0z5OQXQkVFBSYmJtDQ0ICRkRGA8mU0Dhw4gLCwMJiammLhwoVYtGgR3NzcPqsd2traOHToEI4ePQoLCwts2bIFy5Yt++zrGTduHLZt24bg4GCYmZmha9euCAkJ4fUsmpmZQUlJCZaWlpCTkwNQHiyWlpZyPaZA+XIely5dgoODAwwNDTF//nwEBASgd+/en90uQkjdE7B6zE0cFhaGpUuXIi4uDlJSUrCxsYGlpWW1cxZ9fHzg6+srtJ0W3iSE1IabmxuePXuG4OBgAMDTp08xf/583L17t8okDlFRUbQYNSGk3l1Lf4lhQZ8e7rnPvSMlYSKkBm/evIGioiLFBg2o3noWHz9+DE9PT27tn9qYO3cu8vLyuM/jx4/rq3mEkP8oSUlJaGhoQENDA5aWlpgzZw4eP36M58+f84ahZmZmolu3bgAAZWVlCAQC7i19WVkZ/P39oa+vD0lJSWhra2Pp0qXcORITE9G9e3duAenx48dzSR8A4WGoNjY2mDp1KmbNmgUVFRVoaGjQMiWE/EC+xbV/CSGkNuotWLx16xZycnLQunVriImJQUxMDNHR0Vi3bh3ExMRQWloqdIykpCQUFBR4H0II+VIFBQXYvXs39PX1oarKf1uvpaWFQ4cOAQBSU1ORnZ2NwMBAAOUvrvz8/LBgwQIkJydj7969UFdXB1CeNt7Ozg7KysqIi4vDwYMHcf78eXh4eNTYlp07d0JWVhY3btyAv78/Fi1ahHPnztXDVRNCvjUVa/8CEAoYv9bav4QQUhv1Fiz26NEDiYmJSEhI4D5t27aFi4sLEhISPit7F/n++fj4wNLS8ms341/5+Bq+JImJrq4uLR1Tx0rLGK6lv8SxhL/xPL8I4eHhkJOTg5ycHOTl5XH8+HHs378fIiL8P3eioqJQUSl/i9+4cWNoaGhAUVER+fn5CAwMhL+/P1xdXdG8eXN07twZ48aNAwDs3bsXhYWF2LVrF0xNTdG9e3ds2LABoaGhePbsWbXtNDc3h7e3NwwMDDBq1Ci0bdsWFy5cqL8bQwj5plSs/auhyB9tpaEohc0jWtPav4SQb1K9rbMoLy/PJZGoICsrC1VVVaHt5Nvl5uaGnTt3AgDExMSgoqICc3NzDBs2DG5ubkIP4PVNIBDgyJEjXxSkPXr0CAAgLS2N5s2bw9PTkwsAvkRgYCA+d8pvXFxcna2DRYCIpGz4nkhGdl558ogXac8h18wCqwPXo4thY+Tm5mLTpk3o3bu3ULr46qSkpKCoqAg9evSodr+FhQXv52htbY2ysjKkpqZyPZAf+3jpoMqLXH+vPp7zGRISAi8vrxoTBvn4+ODo0aP1npGWkG8Rrf1LCPneNOyTPvku2dvbIzs7G5mZmTh9+jS6desGT09P9O3bFyUlJV+7ebW2aNEiZGdnIykpCSNGjIC7uztOnz79xfUpKip+dlKURo0aQUZG5ovPSf4nIikbE3ff5gLFCh8EElh8KRcPC2XRrl07bNu2DW/fvkVQUFCt6pWWlq6P5la5yHVZWVm9nKs2nj59Ck9PT+jr60NKSgrq6uqwtrbG5s2b8e7du1rV0alTJ2RnZ0NRUbGeW1u/li9fjnbt2kFeXh6NGzeGk5MTUlNTv3azyH8Urf1LCPmeNGiwGBUVRUPwvkMVCUOaNm2K1q1b4/fff8exY8dw+vRpbnmA169fY9y4cWjUqBEUFBTQvXt33LlzR6iurVu3QktLCzIyMnB2dkZeXh63Ly4uDj179oSamhoUFRXRtWtX3L59m9uvq6sLAOjfvz8EAgH3HQCOHTuG1q1bQ0pKCnp6evD19RUKZOXl5aGhoQE9PT3Mnj0bKioqvDljtb2GCh8PQ83Pz4eLiwtkZWWhqamJNWvWwMbGBl5eXrxrqPxvICsrC/369YOcnBwUFBTg7OzMG8pYMfQ1NDQUurq6UFRUxNChQ5Gfn19tu34EpWUMvieSUVO/rvMv9vD09IRAIICIiAjev38vVEZCQqK8vv+fQy0QCJCSkgJpaelqh4gaGxvjzp07ePv2LbctJiYGIiIiXJr7b91ff/2FVq1a4ezZs1i2bBni4+Nx7do1zJo1C+Hh4Th//nyt6pGQkICGhgYEgm/3YffDhw+fLBMdHY3Jkyfj+vXrOHfuHIqLi9GrVy/ez5gQQgj5EVHPIvki3bt3h4WFBQ4fPgwAGDx4MHJycnD69GncunULrVu3Ro8ePfDq1SvumIcPH+LAgQM4ceIEIiIiEB8fj0mTJnH78/Pz4erqiitXruD69eswMDCAg4MDFxjFxcUBAIKDg5Gdnc19v3z5MkaNGgVPT08kJydj69atCAkJ4WWvrKysrAyHDh1Cbm4uFyzU9hpq8ttvvyEmJgbHjx/HuXPncPnyZV6wW1U7+vXrh1evXiE6Ohrnzp3DX3/9hSFDhvDKpaen4+jRowgPD0d4eDiio6Ph5+dXqzbVlYr1CydMmCC0b/LkybxMovUtKioKYqIiuP67LR6t6IvH64bj2UFvfHieCQBgJcUoKciFfPeJsLBxxJQpU1BQUABHR0ehunR0dCAQCBAeHo7nz58DKA+AZs+ejVmzZmHXrl1IT0/H9evXsX37dkRFRWH8+PGQkJCAq6srkpKSEBkZiSlTpmDkyJHVDkH91kyaNAliYmK4efMmnJ2dYWxsDD09PfTr1w8nT56Eo6MjL3NshdevX0MgECAqKgpA7dap9PPzg7q6OuTl5TF27FgUFv6vJzgpKQkiIiLcvX/16hVEREQwdOhQrsySJUu4Bb1LS0sxduxYNGvWDNLS0jAyMuKSElWoeImzdOlSNGnShAvgN23aBAMDA64XddCgQdwxERERcHNzQ8uWLWFhYYGQkBBkZWXh1q1bX3R/CSGEkP8M9g3Ly8tjAFheXt7XbsoPy9XVlfXr16/KfUOGDGHGxsbs8uXLTEFBgRUWFvL2N2/enG3dupUxxpi3tzcTFRVlT5484fafPn2aiYiIsOzs7CrrLy0tZfLy8uzEiRPcNgDsyJEjvHI9evRgy5Yt420LDQ1lmpqa3HcdHR0mISHBZGVlmZiYGAPAVFRU2IMHDxhjrNbXYGFhUeW9efPmDRMXF2cHDx7k9r9+/ZrJyMgwT09PXjvWrFnDGGPs7NmzTFRUlGVlZXH77927xwCw2NhY7pwyMjLszZs3XJmZM2eyDh06VHnP6ourqyvT0tJiioqK7N27d9z29+/fMyUlJaatrc1cXV0bpC2RkZEMAGvivpX9NDmUabiuZVI65kxUvhGTMenGAHAfaVk51q5dO/bnn38yxhjLyMhgAFh8fDxX36JFi5iGhgYTCATc71dpaSlbsmQJ09HRYeLi4kxbW5stW7aMO/eVK1dYt27dmJSUFFNRUWHu7u4sPz+fd78q/7vp2rUr7/eAMcb69evXYPesshcvXjCBQMCWL19eY7mq7lVubi4DwCIjIxlj//tZ5ObmMsYYCw4OZoqKilz5/fv3M0lJSbZt2zZ2//59Nm/ePCYvL8/9OyorK2Nqamrcv5ujR48yNTU1pqGhwdVha2vL5s2bxxhj7MOHD2zhwoUsLi6O/fXXX2z37t1MRkaG7d+/nyvv6urK5OTk2MiRI1lSUhJLSkpicXFxTFRUlO3du5dlZmay27dvs8DAwGqv/cGDBwwAS0xM/NTtJIQQ0oAoNmh41LNIhHycXbK6HC6MMQgEAty5cwcFBQVQVVXlslDKyckhIyMD6enpXHltbW00bdqU+25lZcUlBQGAZ8+ewd3dHQYGBlBUVISCggIKCgqqXEy9sjt37mDRokW8c7u7uyM7O5s392rmzJlISEjAxYsX0aFDB6xZswb6+vpcHbW5hur89ddfKC4uRvv27bltioqKNQ5LTElJgZaWFrS0tLhtJiYmUFJSQkpKCrdNV1cX8vLy3PevlRildevW0NLS4nqTAeDw4cPQ1tZGq1atuG1lZWVYvnw51/tjYWGBP//8k9tf0Rt14cIFtG3bFjIyMujUqRNvjtidO3fQrVs3yMvLQ0FBAW3atMHNmzd57RGVUYSonDIkNfQh37YfSvOfQ7HDAOjMDofO7HBIapnCcbALYmNjMXDgQGRnZ2Py5MmQkpJC//79sXfvXu7eZmdnc/MHX7x4gYEDB2Lp0qUQFxfHn3/+iUePHmHYsGHcuoydO3dGZGQkhgwZgpcvX6JXr16wsrLi1l188uQJ9uzZw7vmj4fgHz16lBvG3ZAePnwIxpjQ76aamhr3ez979uw6OdfatWsxduxYjB07FkZGRliyZAlMTEy4/QKBAF26dOH1VI4ePRpFRUW4f/8+iouLcfXqVXTt2hVA+bxPX19ftG3bFs2aNYOLiwtGjx6NAwcO8M4rKyuLbdu2oWXLlmjZsiWysrIgKyuLvn37QkdHB61atcLUqVOrbHNZWRm8vLxgbW39XSZjEwgEOHr06Fc7nhBCyH9LvWVDJd+nqrJLSpS+R0RStlBa75SUFDRr1gwFBQXQ1NTkHvgq+5wEMK6urnj58iUCAwOho6MDSUlJWFlZfXLOUUFBAXx9fTFgwAChfVJS/0tRrqamBn19fejr6+PgwYMwMzND27ZtYWJiUmfXUB++pcQoY8aMQXBwMFxcXAAAO3bswOjRo3n3bfny5di9eze2bNkCAwMDXLp0CSNGjECjRo24h34AmDdvHgICAtCoUSNMmDABY8aMQUxMDADAxcUFrVq1wubNmyEqKoqEhASh+6CuIIUXH4DSord4m3IJACAQLf+TJgAgISYCDQVJrvyoUaPw4sULREVFQVxcHL/99luVQbevry/8/f2xcuVKrF+/Hi4uLnj06BG3LuPAgQORmpoKBQUFSEtLIzs7G8OGDYO/vz/69++P/Px8XL58+bMz5dan0jLGZV98/iSvyjKxsbEoKyuDi4sLioqK6uS8KSkpQkOXraysEBkZyX3v2rUr/vjjDwDlcweXLVuGtLQ0REVF4dWrVyguLoa1tTVXfuPGjdixYweysrLw/v17fPjwQWhZHjMzM94Q8549e0JHRwd6enqwt7eHvb09+vfvX2WyqcmTJyMpKQlXrlypi1tACCGEfNcoWCSciuySHz/iFpaUYeLu27x1oC5evIjExERMmzYNP/30E54+fQoxMTFe0pmPZWVl4Z9//kGTJk0AANevX+clBYmJicGmTZvg4OAAAHj8+DFevHjBq0NcXJxLRlKhdevWSE1N5XoJa0NLSwtDhgzB3LlzueQ4tbmG6ujp6UFcXBxxcXHQ1tYGAOTl5SEtLQ1dunSp8hhjY2M8fvwYjx8/5noXk5OT8fr1a17vy6dULG+yfPlyzJkzh9t+9OhR9O/f/4uClsrBRWN5Ka53ecSIEZg7dy63DElMTAymTJmCadOmYdiwYSgqKsKyZctw/vx5WFlZcffmypUr2Lp1Ky9YXLp0Kfd9zpw56NOnDwoLCyElJYWsrCzMnDkTLVq0AAAYGBgItTFp1XAUlZSBFZe/2JDW7wBxVS1ugWtdVVku8cr9+/dx/vx5xMXFoW3btgCAbdu2VVmvm5sbhg0bBgBYtmwZ1q1bh9jYWNjb2/PWZax4iZCeno6SkhIMGDAAOjo6AMqDlW/Fxy+ASt+/AQQCnIiOQ//+/blyenp6AP6XDbZiWZzKvz/FxcV13r6KJFAPHjxAcnIyOnfujPv37yMqKgq5ublc7zMAhIWFYcaMGQgICICVlRXk5eWxcuVK3Lhxg1fnx8vTyMvL4/bt24iKisLZs2excOFC+Pj4IC4ujvcyyMPDA+Hh4bh06RJ++umnWrX/w4cPvMD0v664uFjoxU1d+dHuJSGEfA9oGCoBUHN2yYqEIfN2X0LczVtYtmwZ+vXrh759+2LUqFGwtbWFlZUVnJyccPbsWWRmZuLq1auYN28eb+iglJQUXF1dcefOHVy+fBlTp06Fs7MzNDQ0AJQHBKGhoUhJScGNGzfg4uIitIyBrq4uLly4gKdPnyI3NxcAsHDhQuzatQu+vr64d+8eUlJSEBYWhvnz59d4zZ6enjhx4gRu3rxZ62uojry8PFxdXTFz5kxERkbi3r17GDt2LERERKrNFGlrawszMzO4uLjg9u3biI2NxahRo9C1a1cuoKktKSkprFixgrsn/0ZEUjY6r7iIoVsuwzMsAcOCruNUUjZy3hSiUaNG6NOnD0JCQhAcHIw+ffrwlk14+PAh3r17h549e/KG81Ykiqms8rqDmprlLyEqevp+++03jBs3Dra2tvDz86tyKPDVmCv449BZNB80C2IqTaFiNxnA/xa4VpH930NnamoqxMTE0Lp1a26bvr4+lJWVheqt3C5ZWVkoKCjUOOzXwsICPXr0gJmZGQYPHoygoKA6+TnUhaqWFxGVVoCUjiV2bf8DR2OrH2LdqFEjAEB2dja37XPXRjQ2NhYK5K5fv877bmZmBmVlZSxZsgSWlpaQk5ODjY0NoqOjERUVBRsbG65sTEwMOnXqhEmTJqFVq1bQ19ev1TBxoHydWFtbW/j7++Pu3bvIzMzExYsXAZQHxB4eHjhy5AguXryIZs2aVVuPjY0NPDw84OXlBTU1NdjZ2dU6EdDJkydhbm4OKSkpdOzYEUlJSdwxL1++xLBhw9C0aVPIyMjAzMwM+/bt453742zKAGBpaQkfH58q2/rhwwd4eHhAU1MTUlJS0NHRwfLly7n9Dx48QJcuXSAlJQUTExNedmgAXJKj/fv3o2vXrpCSksKePXtq1dbaZodevHgxRo0aBQUFBYwfPx4AMHv2bBgaGkJGRgZ6enpYsGCB0IuKEydOoF27dpCSkoKamhrvxUdRURFmzJiBpk2bQlZWFh06dKhy1AghhJBPo2CRAABiM14JrVdXoTDjFp5sHInbK13Qy84ekZGRWLduHY4dOwZRUVEIBAKcOnUKXbp0wejRo2FoaIihQ4fi0aNHvOyQ+vr6GDBgABwcHNCrVy+Ym5tj06ZN3P7t27cjNzcXrVu3xsiRIzF16lQ0btyY15aAgACcO3cOWlpa3Dw5Ozs7hIeH4+zZs2jXrh06duyINWvWcL081TExMUGvXr2wcOHCWl9DTVavXg0rKyv07dsXtra2sLa2hrGxMW8obGUCgQDHjh2DsrIyunTpAltbW+jp6WH//v21Ol9ltra20NDQ4D0IVlabhzsbGxv8Mmw0nN0mIHbxADw7sBAlec/waEVfFOS9RsKTPEQkZcPZ2Rk+Pj7YsmUL+vTpw83j27dvHzfH6+TJk7hx4wYGDBgASUlJbuhsRQZbgD+8tiKgrhhe6+Pjg3v37qFPnz64ePEiTExMcOTIEV57mzVrhnGOPyN1vx9+dR8Hxasbsc+9I67M7i40ZPpzfO6wX1FRUZw7dw6nT5+GiYkJ1q9fDyMjI2RkZHxxG+pCTS+AVHpNAmOlGNa3O/btC0NKSgpSU1Oxe/du3L9/H6KiopCWlkbHjh3h5+eHlJQUREdHf/IFzMc8PT2xY8cOBAcHIy0tjZvjXFnFvMU9e/ZwgaG5uTmKiopw7tw5hIWFcWUNDAxw8+ZNnDlzBmlpaViwYAEuXbr0yXsdHh6OdevWISEhAY8ePcKuXbtQVlbGjWqYPHkydu/ejb1790JeXh5Pnz7F06dPq1xuBQB27twJCQkJxMTEYMuWLbW+HzNnzkRAQADi4uLQqFEjODo6ckFQYWEh2rRpg5MnTyIpKQnjx4/HyJEjERsbW+v6P7Zu3TocP34cBw4cQGpqKvbs2cONnCgrK8OAAQMgISGBGzduYMuWLdXOVZ0zZw48PT2RkpICOzu7WrW1ttmhV61aBQsLC8THx2PBggUAyl++hYSEIDk5GYGBgQgKCsKaNWu4Y06ePIn+/fvDwcEB8fHxuHDhAm++uIeHB65du4awsDDcvXsXgwcPhr29PR48ePDF95IQQn5YXzO7zqdQxqOGczT+CdOZHf7Jz9H4J5+ujDDGGCsoKGCKiops27Zt9Xqeisybhw8fZlJSUuzx48eMMcaOHDnCKv6JP3nyhK1cuZLFx8ez9PR0tm7dOiYqKspu3LjB1dOla1cmIiHNFNoPYE3GbWFNxm1hTSdsZwCYVPP2TNqgI+u47Dx7lvOcAWBqamqsqKiIHTp0iAFgTk5O7MGDB0xCQoLt2rWLTZ06lTVp0oSdOnWK3bt3j7m6ujJlZWV27NgxXgZNxhiLj49nAFhGRkaV1zh06FDm6OjIGBPOwMkYY2/fvmXKysrs8OHD3LbKGUhTUlIYAHbz5k1uf0XGy4rstIxVnW1XUVGRBQcHM8YYi4mJYQDYixcvqv15lJSUsKZNm7KAgIBqyzSEqw9f1PhvuenkXUy+dV/WRKs846ucnBxr3749W7lyJXv79i1jjLHk5GRmZWXFpKWlmaWlJTt79myV2VCHDRvGy0LbuHFjZmtry7Zv386WLFnC1NTUmJycHHN2dmbTp0/nZRVmjLE1a9YwAOz06dPctn79+jERERFmamrKbSssLGRubm5MUVGRKSkpsYkTJzJTU1OmoKDAlakqg/Ply5dZ165dmbKyMpOWlmbm5ua8DKqV2175ExwczEpKy9jVhy/Y0fgn7OrDF6xL166sVatW3LGfkzU2LCyMK/Py5UsmLS3Na8fH+vTpw6ZPn859r5xNuYKFhQXz9vbmXUvF7/CUKVNY9+7dWVlZmVDdZ86cYWJiYuzvv//mtp0+fZp3fMW1rV27tto2VtXWz8kO7eTk9Mm6V65cydq0acN9t7KyYi4uLlWWffToERMVFeVdF2PlWbPnzp37yXMRQr5tFBs0PJqzSAAAjeWr7v360nI/ovj4eNy/fx/t27dHXl4eFi1aBADo169fnZ+Ll7AkvwhiDOjfvz8sLS3h7e2N7du388o3bdoUM2bM4L5PmTIFZ86cwYEDB7g38vnviyGm3ATK3cZw5UrynvHqyc4rRMKTNwCAkJAQSEhIcPP4pKWloa+vj5kzZ8LLywt5eXlYuXIl1NXVceHCBfz88884d+4cTp06VeO1vX//HjNnzsSgQYPQrFkzPHnyBHFxcRg4cGC1x8jIyMDd3R3e3t5wcnISGvrbokUL2NraYvz48di8eTPExcUxffp0SEtLf9aC8pXXZXRwcIC0tDTu3buHCxcuoFevXmjcuDFu3LiB58+fw9jYuNb11oec/KpHClQQk1OBSs8JCBxqiX6WTassY2xsjKtXr/K2sUpzGG1sbMAYg5ubG+zt7REcHIzS0lI8e/YMERER8PT0xM8//4zs7GyIiVX/fzdeXl684YlA+ZxbHx8fXmZOSUlJBAcHIzg4mNvm5uaG5s2bc9+ryjDbuXPnGochsmrm9VYMya486uJVVi6sW39ZltSKebwAoKKiAiMjIy7zcWlpKZYtW4YDBw7g77//xocPH1BUVFRlEp7acnNzQ8+ePWFkZAR7e3v07dsXvXr1AvC/bMwVc8g/bl9lHw+L/1RbPyc7dFVD7vfv349169YhPT0dBQUFKCkpgYKCArc/ISEB7u7uVbY1MTERpaWlMDQ05G0vKiqCqqpqlccQQgipHg1DJQCA9s1UoKkoheoemwUANBWl0L6ZSkM267tTMaTK1tYWb9++xeXLl6Gmplan56h4gB0WdB2eYQmITnuOyw+eIyIpGytWrMDOnTt5S28A5Q93ixcvhpmZGVRUVCAnJ4czZ87wliX5UFoGCfXmH59OyPOC8gfnj5OIVFi8eDHGjRuH0tJSzJo1C/b29jh58iQMDAzQvn17LjlOdURFRfHy5UuMGjUKhoaGcHZ2Ru/eveHr61vjcR4eHkhJScHBgwer3L9r1y6oq6ujS5cu6N+/P9zd3SEvL1/tMOGqNG3aFL6+vpgzZw7U1dXh4eEBBQUFXLp0CQ4ODjA0NMT8+fMREBCA3r1717re+tDQL4AkJSWhoaGBpk2bonXr1vj9999x7NgxnD59mgvgPl6WoTZz0wBg69at0NLSgoyMDJydnZGXV3VGV6A8KKgYwi4lJYXOnTvzhj/XVlXzPQHgQ0kZYjILEJFUPpezrhIBrVy5EoGBgZg9ezYiIyORkJAAOzs7XjZoERERocC2pnO1bt0aGRkZWLx4Md6/fw9nZ2cMGjTos9v28b/12rT1S+u+du0aXFxc4ODggPDwcMTHx2PevHm8uj+ey15ZQUEBREVFcevWLSQkJHCflJQUBAYGfnb7CCHkR0c9iwQAICoigLejCSbuvg0BwJvnVBFAejuaQFSk9r0wP5pWrVrh1q1b9XqO2mSstbOzw9y5c+Hm5sbtr3i4W7t2LczMzCArKwsvLy/eA5iEqAgE+Chw+P9eN+WfXbhAUllStMq2Vcw/FQgEcHFxgb+/Px4+fMibO7pmzRpoamoKPfBaWlrytn08n7Kyit6sj2lpafEenD/uSdLU1OT1aj558gQ5OTm8LLpV1fv69Wve9wULFnBzqypERERU296vpeIF0NO8wirnLQpQngyoPl8Ade/eHRYWFjh8+DDGjRsntL9iblqTJk2QmJjIBfCzZs3iyjx8+BAHDhzAiRMn8ObNG4wdOxaTJk3irWNZ2axZs3Do0CHs3LkTOjo68Pf3h52dHR4+fMj1gn9KTfM9K/ieSEZPEw1eIqCKedTVJQK6fv06ly05NzcXaWlpXA90TEwM+vXrhxEjRgAon1OYlpbGy4zcqFEjXsKhN2/efHK+poKCAoYMGYIhQ4Zg0KBBsLe3x6tXr7hszNnZ2VyCqY+TD1XnU239kuzQFa5evQodHR3MmzeP2/bxCyZzc3NcuHABo0ePFjq+VatWKC0tRU5ODn7++edaXQ8hhJDqUc8i4dibamLziNbQUOQHDBXZJf9N0hDy79X2AXbpsuU4ceIErl27xm2v/HBnYWEBPT09pKWl8Y6VlxaHrKQYr3dZRLo802lpwSuud1kkl//gVpHqvvKSJs2bN+cSgFQoLi5GXFzcZy0LUpcuXryI48ePIyMjA1evXsXQoUOhq6v7yYfX71XFCyAAQiMGvvQFUEVWz5evcnEt/SWOJfyNa+kvUdPqLC1atEBmZmaV++bPn49OnTpBV1cXL1++RF5eHg4cOAAAXBKlwsJC7Nq1C5aWlujSpQvWr1+PsLAwBAYGCq2B+vbtW2zevBkrV65E7969YWJigqCgIEhLSwsNza7Jxwm/nmwegzdxx3hlsvMKEZvx6rMSAS1atAgXLlxAUlIS3NzcoKamBicnJwDlyXvOnTuHq1evIiUlBb/++iuePeMPA+/evTtCQ0Nx+fJlJCYmwtXVFaKiVb+8AcqTbu3btw/3799HWloaDh48CA0NDSgpKcHW1haGhoa8DNWVA7SafKqtX5IdunLdWVlZCAsLQ3p6OtatWyeU3Mrb2xv79u2Dt7c3UlJSkJiYiBUrVgAADA0N4eLiglGjRuHw4cPIyMhAbGwsli9fjpMnT9bq+gghhPwPBYuEx95UE1dmd8c+944IHGpZJ9klSd2oKWMtUN4bnJ1XiPdyTeHi4oJ169Zx+2rzICoA0EFPhftvABARl4REEyPkXf8TH148Rn+NPHgv5PeqVZ7H9/z5cxQUFEBWVhYTJ07EzJkzERERgeTkZLi7u+Pdu3cYO3ZsXdyOz1ZcXIzff/8dLVu2RP/+/dGoUSNERUXV25px34K6fgHUqVMn7IlMQJ8ttzAs6DrGzlsF65Y63NIqVWGMVRsg7N+/H9bW1tDQ0MCvv/6KwsJC3tBoANDW1kbTpv+bU2llZYWysjIYGxsLvfBIT09HcXExrK2tuW3i4uJo37690NDsmnxqvufH5Xbs2IGSkhK0adMGXl5eWLJkSZXl/fz84OnpiTZt2uDp06c4ceIE97Jl/vz5aN26fGSAjY0NNDQ0uECywty5c9G1a1f07dsXffr0gZOTE2++5sfk5eXh7++Ptm3bol27dsjMzMSpU6cgIiICERERHDlyBO/fv0f79u0xbtw4LF26tFbXXZu2fm526Aq//PILpk2bBg8PD1haWuLq1atCPfk2NjY4ePAgjh8/DktLS3Tv3p2XiTU4OBijRo3C9OnTYWRkBCcnJ14vJyGEkNqjYahEiKiIAFbNKRHAt+ZzHmAXLVrEW4Jj/vz5+Ouvv2BnZwcZGRmMHz8eTk5OQnO/dFVlMWFEa94i7mq9vfDm3Hq83DMN+261gL+/P5ckA+DP4xs9ejRGjRqFkJAQ+Pn5oaysDCNHjkR+fj7atm2LM2fOVLm2YUOws7ODnZ3dVzn312RvqomeJhpcQqTG8uVDT79kSPnFtJeYF/FEqHf7/YdSJDx5i4ikbKEANCUlpcp1Cyvmpvn6+sLOzg4XLlyAj49Pree9SUpKCi2tU1dqmsepMdxPqNynEgFV6Ny5M29txcpUVFR48zmroqCgwFtKBABcXV2rPa+7u3u1iWCA8l64y5cvV3u8rq5ulddRm7bKy8vzhgq/ffsWvr6+3FqKAKrtcfb394e/vz9v28cJkAYMGIABAwZUeby4uDh8fX0/Oc+ZEELIp1HPIvkuubm5QSAQwM/Pj7f96NGjn5Xd0sbGBgKBQOhTUlJS103+16p7gFXrMw2NB8znldPV1UVRURH3oFfxcJefn49nz55h8eLF2LlzJ++BLyoqCmvXrhXqXf5z7mC8zkhC4fv3iI+PR8+ePcEY4y2WvmDBAmRnZ6OsrIxLZiIlJYV169bh+fPnKCwsxJUrV9CuXbs6vy/k0ypeAHk6WePGiVBeoFh5UXeBQIBt27ahf//+kJGRgYGBAY4fPw6gfBj0b4F7kbmiL8oKC1CYdRcvT60FK3qLt0kX8P7BdYzznI3SMsYtiq6mpobExEQkJycLzSGtSDaydOlSLF26FGJiYlWuZ5mVlYV//vkH6enp0NPTw8iRIyEQCJCQkMAbhurj44MRI0ZAVFQUbdu2haKiIoYOHYpXr15xw59rs1B8Tk4Olnq6IStgAP7eMhYF9yKF2lT6Jgdvji9FTwsdKCgowNnZmddT7+PjA0tLS+zYsQPa2tpcsqPS0lL4+/tDQ0MDjRs3rnVP3vcoPj4e+/btQ3p6Om7fvg0XFxcA9ZMdmhBCSP2hYJF8t6SkpLBixQrk5ub+q3rc3d2RnZ3N+9SU5v9raciMtRXBRT/LprBqrkqJjX4gvr6+cHZ2xt27d+Hg4AAXFxe8evUKsRmv8Ort/3r9JJsaQ7mHOwQSMpA26gxJLXOw5l1w8to9DB06FAcPHsS7d+/QrVs3TJgwAfb29tyxN27cwMGDB8EYw9KlS2FmZlZtJlQpKSkMGDAAHTt2ROfOnZGamoohQ4ZAUVFRqGxGRgZ0dXUhEAiwYMECXLhwATY2Ntzw59osFO/m5oYnTx4jIOQQGjnNRUH8KZS9q9QDz8qQc2gJmkiXIjo6GufOncNff/2FIUOG8OpJT0/H6dOnERERwQ2jHDJkCJ48eYLo6GisWLEC8+fPx40bN77o5/Q9aIjs0IQQQuoXBYvku2VrawsNDQ0sX7682jKHDh1Cy5YtISkpCV1dXQQEBAiVkZGRgYaGBu8DfDqtf0Xvweek9f836iNhCfnvKy1jXDKaopIylNWUjQblwdKwYcOgr6+PZcuWoaCgALGxsULDoAWi4hCRlAUEAoiIS6Lo8V38E+SO/l0tcfToUejo6GDjxo04f/48Zs2ahc6dO3PHBgYGwt7eHtOnT8fSpUuxZs0aqKmpQVJSUqg9mpqaSExMxIcPH3Dw4EGYm5tzmXc/VlZWhmvXrmHo0KHci6SMjAycOXMGYmJi2LlzJ1atWoUePXrA1NSUWxeyQlpaGk6fPo2goCBMc+mLHTOc0WLwTLCSIq6M9PMUlLx8hNNH/0SbNm3QoUMH7Nq1C9HR0bwlOsrKyrBjxw6YmJjg999/h52dHdLT07F27VoYGRlh9OjRMDIyQmSkcM/lf0FFduiCggK8evUK586dg5mZ2dduFiGEkM9EwSL5bomKimLZsmVYv349njx5IrT/1q1bcHZ2xtChQ5GYmAgfHx8sWLCgykW7q1KR1j85ORmBgYEICgrCmjVreGUqp/WPiIhAfHw8Jk2aVBeXVyXKWEs+x8drcj7PL8K6Cw+4NQKrYm5uzv23rKwsFBQUkJOTU+M8PrU+06AzOxw6s8OxYksoAOD27duYMmUKFBQUICcnh+joaDg7O8PJyQkpKSno0KED/P398eLFC+Tn52P69OmQlJTkLVXy4cMH/PPPP1i6dCny8vLw/v17HDx4kDfvNSQkhBtOrauri0aNGnHDn1euXIlGjRqhXbt2tVooPiUlBWJiYmjTpg2A8n9vtwLcIK+giP6tm2Kfe0f8aiEJbS0taGlpcceZmJhASUmJl0RHV1cX8vLy3Hd1dXWYmJhw6zJWbMvJyan2vhJCCCFf27c31o6QapSWMS5Jx/P8IogxoH///rC0tIS3t7dQavzVq1ejR48e3BAwQ0NDJCcnY+XKlbw1CDdt2oRt27Zx33/99VcEBATw0t/r6upixowZCAsL460BV5HWvyJb4/r169GnTx8EBARwPZR1rS4TlpD/rqrW5BQIBHjzvphbk9PeVFNo6OfH2WEFAgHKysrQvpkKVGQlwM+hW6kcyl9aNJXN5xZF/3hZBzk5uc+6hkaNGqFJkybYt28fxowZAwUFhRrLV9f2f0NURABREQHMmirCqrkq4mo5J7qqttRH+wghhJD6RMEi+S5EJGXzMnS+SHsOidL3iEjKxooVK9C9e3fMmDGDd0xKSopQMgVra2usXbsWpaWl3IOsi4sLb32xiqQZ+/fvx7p165Ceno6CggKUlJQIPaxWl9Y/NTW13oJFgDLWkppVtyaniIwiSgteAShfk7PDTzKfXNS9gqiIAG6ddDH7f+9VIBAVA1gZbxi0nkT+JxdFNzY2FpqrV9WC8NLS0ggPD4eDgwPs7Oxw9uxZXm/d56jNQvEtWrRASUkJbt26xSVjSk1N5fV2Vixm//jxY653MTk5Ga9fv/5qa4gSQggh9YWGoZJvXkUPycdrDBaWlGHi7tt4p2IAOzs7zJ0794vqV1RUhL6+PvdRU1Pj0vo7ODggPDwc8fHxmDdvXq3T+hPyNVW3JqeUjjne3ovE+8dJePTwPpych9e4qPvH2jcrf0GhrlA+JFVUQR3sw3tIP0+GXx8ddNFTrNWi6FOnTkVERARWrVqFBw8eYMOGDYiIiKjynLKysjh58iTExMTQu3dvFBQUfO7tAFC7heKNjIxgb2+PX3/9FTdu3MCtW7cwbtw4SEtLc/XY2trCzMwMLi4uuH37NmJjYzFq1Ch07doVbdu2/aK2EUIIId8qChbJN626HpLKfE8kY+my5Thx4gSuXbvGbTc2NkZMTAyvbExMDAwNDT/5gHz16lXo6Ohg3rx5aNu2LQwMDPDo0SOhchVp/Stcv34dIiIivHlQhDS06tbkVOzoDEktU+T8uQg5B31h2blnjYu6VyfCqwv2uXfE1hnD0H+4G3KOrcDQLmbc2nifWhS9Y8eOCAoKQmBgICwsLHD27FnesO+PycnJ4fTp02CMoU+fPnj79u1ntxmo3ULxwcHBaNKkCbp27YoBAwZg/PjxvPUcBQIBjh07BmVlZXTp0gW2trbQ09PjrWtKCCGE/FcIWFUr7n4j3rx5A0VFReTl5X1yrgr5b7qW/hLDgoSHp704uQZlRW+59QX3uXfEZt9pOHjwIAoLC8EYw+3bt9GuXTv4+PhgyJAhuHbtGiZOnIhNmzZxcxZtbGxgaWmJtWvX8uo/fvw4Bg4ciNDQULRr1w4nT56Er68vSktLuSFpPj4+WLVqFaysrLBq1Sq8efMG48aNQ+vWrbFv3776vC2E1Ki6fzcf2+fe8Ycezvz27Vs0bdoUAQEBGDt27NduDiGEkE+g2KDhUc8i+aZV10NSVblFixbxkkW0bt0aBw4cQFhYGExNTbFw4UIsWrSIl9ymOr/88gumTZsGDw8PWFpa4urVq1yinMr09fUxYMAAODg4oFevXjWm9SekoTTkmpzfE1oonhBCCPk81LNIvmnfcg+Jj48Pjh49ioSEhAY9LyG1UTHXFwA/I+r//++PuNRKfHw8xo0bh9TUVEhISKBNmzZYvXo1rf9HCCHfCYoNGh5lQyXftIoekqd5hVXOW6xI1/+j9ZAQ8ikVa3JWziIMlP978XY0+eECReB/C8UTQgghpHYoWCTfNFERAbwdTTBx920IUHUPibejCa0xSEgVaE1OQgghhPwbNAyVfBc+XmcRKJ9z9aP2kBBCCCGE/GgoNmh41LNIvgvUQ0IIIYQQQkjDomCRfDdERQQ/dJp/QgghhBBCGhItnUEIIYQQQgghRAgFi4QQQgghhBBChFCwSAghhBBCCCFECAWLhBBCCCGEEEKEULBICCGEEEIIIUQIBYuEEEIIIYQQQoRQsEgIIYQQQgghRAgFi4QQQgghhBBChFCwSAghhBBCCCFECAWLhBBCCCGEEEKEULBICCGEEEIIIUQIBYuEEEIIIYQQQoRQsEgIIYQQQgghRAgFi4QQQgghhBBChFCwSAghhBBCCCFECAWLhBBCCCGEEEKEULBICCGEEEIIIUQIBYuEEEIIIYQQQoRQsEgIIYQQQgghRAgFi4QQQgghhBBChFCwSAghhBBCCCFECAWLhBBCCCGEEEKEULBICCGEEEIIIUQIBYuEEEIIIYQQQoRQsEgIIYQQQgghRAgFi4QQQgghhBBChFCwSAghhBBCCCFECAWLhBBCCCGEEEKEULBICCGEEEIIIUQIBYuEVCMzMxMCgQAJCQlfuylfpDbtj4qKgkAgwOvXrwEAISEhUFJS+lfnrYs6vgYbGxt4eXl9M/XUlo+PDywtLWss09BtIoQQQsh/AwWL5D/Fzc0NAoEAEyZMENo3efJkCAQCuLm5NXzD6kiLFi0gKSmJp0+f1kv9Q4YMQVpaWq3L6+rqYu3atf+qju/Vx4F2hcOHD2Px4sVfp1HV+BbbRAghhJBvHwWL5D9HS0sLYWFheP/+PbetsLAQe/fuhba29lds2b9z5coVvH//HoMGDcLOnTvr5RzS0tJo3LjxV6+jrn348KHBzqWiogJ5efkGO19tfIttIoQQQsi3j4JF8p/TunVraGlp4fDhw9y2w4cPQ1tbG61ateK2RUREoHPnzlBSUoKqqir69u2L9PT0austLS3FmDFj0KJFC2RlZQEAjh07htatW0NKSgp6enrw9fVFSUkJgKqHgb5+/RoCgQBRUVEA/tc7dfLkSZibm0NKSgodO3ZEUlKS0Pm3b9+O4cOHY+TIkdixY4fQ/tjYWLRq1QpSUlJo27Yt4uPjhcqcOnUKhoaGkJaWRrdu3ZCZmcnbX9UQ0hMnTqBdu3aQkpKCmpoa+vfvD6B8aOOjR48wbdo0CAQCCASCauvYvHkzmjdvDgkJCRgZGSE0NJS3XyAQYNu2bejfvz9kZGRgYGCA48ePC7W/tmxsbODh4QEvLy+oqanBzs4OSUlJ6N27N+Tk5KCuro6RI0fixYsX1dYRGhqKtm3bQl5eHhoaGhg+fDhycnIAlP9su3XrBgBQVlbm9Vh/POQzNzcXo0aNgrKyMmRkZNC7d288ePCA219xv86cOQNjY2PIycnB3t4e2dnZXJmoqCi0b98esrKyUFJSgrW1NR49eiTUXl1dXSgqKmLo0KHIz8/n3Y/KbdLV1cXixYsxbNgwyMrKomnTpti4cSO3nzEGHx8faGtrQ1JSEk2aNMHUqVNr/wMghBBCyH8CBYvkP2nMmDEIDg7mvu/YsQOjR4/mlXn79i1+++033Lx5ExcuXICIiAj69++PsrIyofqKioowePBgJCQk4PLly9DW1sbly5cxatQoeHp6Ijk5GVu3bkVISAiWLl362e2dOXMmAgICEBcXh0aNGsHR0RHFxcXc/vz8fBw8eBAjRoxAz549kZeXh8uXL3P7CwoK0LdvX5iYmODWrVvw8fHBjBkzeOd4/PgxBgwYAEdHRyQkJGDcuHGYM2dOje06efIk+vfvDwcHB8THx+PChQto3749gPIA/KeffsKiRYuQnZ3NC24qO3LkCDw9PTF9+nQkJSXh119/xejRoxEZGckr5+vrC2dnZ9y9excODg5wcXHBq1evPus+VrZz505ISEggJiYGfn5+6N69O1q1aoWbN28iIiICz549g7Ozc7XHFxcXY/Hixbhz5w6OHj2KzMxMLiDU0tLCoUOHAACpqanIzs5GYGBglfW4ubnh5s2bOH78OK5duwbGGBwcHHg/33fv3mHVqlUIDQ3FpUuXkJWVxf38SkpK4OTkhK5du+Lu3bu4du0axo8fzwXnAJCeno6jR48iPDwc4eHhiI6Ohp+fX433Z+XKlbCwsEB8fDzmzJkDT09PnDt3DgBw6NAhrFmzBlu3bsWDBw9w9OhRmJmZffqmE0KqRPOGCSHfLfYNy8vLYwBYXl7e124K+YaVlJaxqw9fsKPxT5jDgKHsl1/6sZycHCYpKckyMzNZZmYmk5KSYs+fP2f9+vVjrq6uVdbz/PlzBoAlJiYyxhjLyMhgANjly5dZjx49WOfOndnr16+58j169GDLli3j1REaGso0NTV5x8fHx3P7c3NzGQAWGRnJGGMsMjKSAWBhYWFcmZcvXzJpaWm2f/9+btsff/zBLC0tue+enp6869i6dStTVVVl79+/57Zt3ryZd/65c+cyExMTXntnz57NALDc3FzGGGPBwcFMUVGR229lZcVcXFyqvF+MMaajo8PWrFnD2/ZxHZ06dWLu7u68MoMHD2YODg7cdwBs/vz53PeCggIGgJ0+fbrac9eka9eurFWrVtz3xYsXs169evHKPH78mAFgqamp3DGenp7V1hkXF8cAsPz8fMbY/352Ffeu8rkr6klLS2MAWExMDLf/xYsXTFpamh04cIAxVn6/ALCHDx9yZTZu3MjU1dUZY+W/DwBYVFRUle3y9vZmMjIy7M2bN9y2mTNnsg4dOlTZJsbKf2729va8eoYMGcJ69+7NGGMsICCAGRoasg8fPlR7Pwj52j71b/Zb8j21lZBvGcUGDY96Fsl3LSIpG51XXMSwoOvwDEtAdNpzXH7wHLeelaBPnz4ICQlBcHAw+vTpAzU1Nd6xDx48wLBhw6CnpwcFBQXo6uoCADfEtMKwYcPw9u1bnD17FoqKitz2O3fuYNGiRZCTk+M+7u7uyM7Oxrt37z7rOqysrLj/VlFRgZGREVJSUrhtO3bswIgRI7jvI0aMwMGDB7mhhikpKdww1qrqrCjToUOHas9blYSEBPTo0eOzruVjKSkpsLa25m2ztrbmXR8AmJubc/8tKysLBQUFbtjnp5SWMVxLf4ljCX/jWvpLMABt2rTh9t+5cweRkZG8n1WLFi0AoNqhx7du3YKjoyO0tbUhLy+Prl27AhD+/ahJSkoKxMTEePddVVVV6OcrIyOD5s2bc981NTW5a1dRUYGbmxvs7Ozg6OiIwMBAoV5cXV1d3pzEysdX5+OfvZWVFdemwYMH4/3799DT04O7uzuOHDnCDa8m5L+mIec0E0LI94aCRfLdikjKxsTdt5GdV8jbXlhShom7b8OihxNCQkKwc+dOjBkzRuh4R0dHvHr1CkFBQbhx4wZu3LgBQPjBwcHBgRv+V1lBQQF8fX2RkJDAfRITE/HgwQNISUlBRKT8nxdjjDum8tDD2kpOTsb169cxa9YsiImJQUxMDB07dsS7d+8QFhb22fV9Dmlp6XqtvzJxcXHed4FAUOWQ4I99/MJgWNB1xGfl4nmlX4uCggJu+G3lz4MHD9ClSxehOt++fQs7OzsoKChgz549iIuLw5EjRwDUz4NlVdde+fcmODgY165dQ6dOnbB//34YGhri+vXrNR5fm3tXHS0tLaSmpmLTpk2QlpbGpEmT0KVLly/6/SUEKB+GOWXKFHh5eUFZWRnq6uoICgrC27dvMXr0aMjLy0NfXx+nT5/mjomOjkb79u0hKSkJTU1NzJkzh3tp4ebmhujoaAQGBnJzpivmYNd0XEVbPp7TDAD37t1D3759oaCgAHl5efz8889IT0/HpUuXIC4uLpSF2svLCz///DP3PSYmBjY2NpCRkYGysjLs7OyQm5tb5f0oKirCjBkz0LRpU8jKyqJDhw7cXHYAePToERwdHaGsrAxZWVm0bNkSp06d+lc/A0II+RIULJLvUmkZg++JZLAaypzKVceHDx9QXFzMPQxUePnyJVJTUzF//nz06NEDxsbG1f6f+sSJE+Hn54dffvkF0dHR3PbWrVsjNTUV+vr6Qh8RERE0atQIAHi9QNWteVj5wT83NxdpaWkwNjYGUJ7YpkuXLrhz5w4v0Pntt9+wfft2AICxsTHu3r2LwsLCKuusKBMbG1vteatibm6OCxcuVLtfQkICpaWlNdZhbGyMmJgY3raYmBiYmJjUeFxtVPfC4ENJGS6m5CAiqfzet27dGvfu3YOurq7Qz0pWVlao3vv37+Ply5fw8/PDzz//jBYtWgj11ElISABAjddvbGyMkpIS7kUE8L/fvc+9/latWmHu3Lm4evUqTE1NsXfv3s86/mMf/+yvX7/O/c4B5S8KHB0dsW7dOkRFReHatWtITEz8V+ckP7adO3dCTU0NsbGxmDJlCiZOnIjBgwejU6dOuH37Nnr16oWRI0fi3bt3+Pvvv+Hg4IB27drhzp072Lx5M7Zv344lS5YAAAIDA2FlZcWN5sjOzoaWltYnj6vcloo5zVu2bMHff/+NLl26QFJSEhcvXsStW7cwZswYlJSUoEuXLtDT0+Ml5iouLsaePXu4F5EVozBMTExw7do1XLlyBY6OjtX+ffDw8MC1a9cQFhaGu3fvYvDgwbC3t+eSX02ePBlFRUW4dOkSEhMTsWLFCsjJydXHj4UQQmok9rUbQMiXiM14JRQgVMYAPM0vxs6TMWivpwJRUVHefmVlZaiqquKPP/6ApqYmsrKyakz2MmXKFJSWlqJv3744ffo0OnfujIULF6Jv377Q1tbGoEGDICIigjt37iApKQlLliyBtLQ0OnbsCD8/PzRr1gw5OTmYP39+lfUvWrQIqqqqUFdXx7x586CmpgYnJycUFxcjNDQUixYtgqmpKe+YcePGYfXq1bh37x6GDx+OefPmwd3dHXPnzkVmZiZWrVrFKz9hwgQEBARg5syZGDduHG7duoWQkJAa77O3tzd69OiB5s2bY+jQoSgpKcGpU6cwe/ZsAOXDHy9duoShQ4dCUlJSaKgvUJ68x9nZGa1atYKtrS1OnDiBw4cP4/z58zWe+1Nq88LA90QyeppoYPLkyQgKCsKwYcMwa9YsqKio4OHDhwgLC8O2bduEfj+0tbUhISGB9evXY8KECUhKShJap1BHRwcCgQDh4eFwcHCAtLS00MOcgYEB+vXrB3d3d2zduhXy8vKYM2cOmjZtin79+tXqOjMyMvDHH3/gl19+QZMmTZCamooHDx5g1KhRtTq+OjExMfD394eTkxPOnTuHgwcP4uTJkwDKM7SWlpaiQ4cOkJGRwe7duyEtLQ0dHZ1/dU7yY7OwsOD+Bs6dOxd+fn5QU1ODu7s7AGDhwoXYvHkz7t69ixMnTkBLSwsbNmyAQCBAixYt8M8//2D27NlYuHAhFBUVISEhARkZGWhoaHDn2LRpU43HVYz4MDAwgL+/P3fc77//DkVFRYSFhXE99YaGhtz+sWPHIjg4GDNnzgRQniW6sLCQS5Ll7++Ptm3bYtOmTdwxLVu2rPI+ZGVlITg4GFlZWWjSpAkAYMaMGYiIiEBwcDCWLVuGrKwsDBw4kEsspaen9y/uPCGEfDnqWSTfpZz86gPFyt5CHAoKCkLbRUREEBYWhlu3bsHU1BTTpk3DypUra6zLy8sLvr6+cHBwwNWrV2FnZ4fw8HCcPXsW7dq1Q8eOHbFmzRreA/WOHTtQUlKCNm3awMvLS+jtdgU/Pz94enqiTZs2ePr0KU6cOAEJCQkcP34cL1++5JarqMzY2BjGxsbYvn075OTkcOLECSQmJqJVq1aYN28eVqxYwSuvra2NQ4cO4ejRo7CwsMCWLVuwbNmyGq/ZxsYGBw8exPHjx2FpaYnu3bvzeicXLVqEzMxMNG/enOtJ/ZiTkxMCAwOxatUqtGzZElu3bkVwcDBsbGxqPPenfOqFAQBk5xUiNuMVmjRpgpiYGJSWlqJXr14wMzODl5cXlJSUuIfHyho1aoSQkBAcPHgQJiYm8PPzEwq+mzZtCl9fX8yZMwfq6urw8PCosg3BwcFo06YN+vbtCysrKzDGcOrUKaGho9WRkZHB/fv3MXDgQBgaGmL8+PGYPHkyfv3111odX53p06fj5s2baNWqFZYsWYLVq1dzPfBKSkoICgqCtbU1zM3Ncf78eZw4cQKqqqr/6pzkx1J5LvGb98W8jLqioqJQVVXlbVNXVwcA5OTkICUlBVZWVrysv9bW1igoKMCTJ0+qPWdtj6s8pxko7xn8+eefq/136ebmhocPH3I98iEhIXB2duZGJnzO/O7ExESUlpbC0NCQN486Ojqam0M9depULFmyBNbW1vD29sbdu3drVTchhNQ1Aas8MeYb8+bNGygqKiIvL6/KB37y47qW/hLDgmoeQgkA+9w7wqr5t/uAGxUVhW7duiE3N1dobUJSs2MJf8MzLOGT5QKHWqKfZdP6b9B3RFdXF15eXpTKn9SbiKRs+J5I5l7oPN07B8pahgjeugH2ppoAqv49FAgEOHLkCHbt2gVFRUXeEkh37tyBpaUlHj16BG1tbdjY2MDS0hJr167lygwYMOCLjhs4cCDk5OSwc+fOaq9p4MCBUFNTw6JFi/DTTz8hKiqKS95V8ULI19e3ymMrn3P//v1wcXHBvXv3hEY1yMnJcT2ljx8/xsmTJ3H27FmEh4cjICAAU6ZMqeGuE/LfR7FBw6OeRfJdat9MBZqKUhBUs18AQFNRCu2bqTRks/41XV1d3gPM11SxWHxNfHx8YGlp2SDt+VhjealPF/qMcoSQulHdXOK3RSWYuPs2N5e4JsbGxty6pBViYmIgLy+Pn376CUDVc6Zrc1xVzM3Ncfny5RqTOI0bNw779+/HH3/8gebNm/OyPH9qfndlrVq1QmlpKXJycoTmUFceUqulpYUJEybg8OHDmD59OoKCgmpVPyGE1CUKFsl3SVREAG/H8gQhHweMFd+9HU0gKlJdONnwHj9+jDFjxqBJkyaQkJCAjo4O1q9f/9n1PH36FFOmTIGenh4kJSWhpaUFR0fHWj+o1NaQIUOQlpZWp3XWpf/qCwNCvme1nUtcWlbzoKZJkybh8ePHmDJlCu7fv49jx47B29sbv/32Gzd0XFdXFzdu3EBmZiZevHiBsrKyWh1XFQ8PD7x58wZDhw7FzZs38eDBA4SGhiI1NZUrU5EhecmSJRg9ejTv+Llz5yIuLg6TJk3C3bt3cf/+fWzevBkvXrwQOpehoSFcXFwwatQoHD58GBkZGYiNjcXy5cu5ecNeXl44c+YMMjIycPv2bURGRvISUBFCSEOhYJF8t+xNNbF5RGtoKPJ7jjQUpbB5RGtuqNO34K+//kLbtm3x4MED7Nu3Dw8fPsSWLVuQmpoKAwODWi9zkJmZiTZt2uDixYtYuXIlEhMTERERgW7dumHy5Ml12mZpaWk0bty4TuusS9/jC4NvRWZmJg1BJfWiNsnHKuYS16Rp06Y4deoUYmNjYWFhgQkTJmDs2LG8JGEzZsyAqKgoTExM0KhRI2RlZdXquKqoqqri4sWLKCgoQNeuXdGmTRsEBQXx5jCKiIjAzc0NpaWlQgmmDA0NcfbsWdy5cwft27eHlZUVjh07BjGxqvMIBgcHY9SoUZg+fTqMjIzg5OSEuLg4aGtrAyjPsjx58mQYGxvD3t4ehoaGvOQ5hBDSUGjOIvnulZYxxGa8Qk5+IRrLl/ckfWsBQu/evZGUlIS0tDTe2oVPnz5F8+bNMWrUKGzevFloDs+2bdswY8YMHDp0CD169ODWfExNTRVa8uH169dQUlJCZmYmmjVrhvj4eG6I6OvXr6GsrIzIyEjY2NhwcyXDw8Mxd+5cpKWlwdLSEtu2beOyroaEhMDLywuvX7/mzuHn54c1a9bg3bt3cHZ2RqNGjRAREVHtkiAN4eO5UUB5j6K3o8k39cKAkB/Bf30u8dixY/H8+XMcP378azeFkB8SxQYNj5bOIN89URHBN53E5tWrVzhz5gyWLl0qtMi9hoYGXFxcsH//fqG3xv7+/vD398fZs2fRvn17vHr1ChEREVi6dGmVawN+SYKcmTNnIjAwEBoaGvj999/h6OiItLS0KjMCHjhwAD4+Pti4cSM6d+6M0NBQrFu37qundLc31URPE41v/oUBIT+C/+pc4ry8PCQmJmLv3r0UKBJCfigULBJSDyr3dj5PvwfGWLXzTYyNjZGbm4vnz59z22bPno3Q0FBER0dza3U9fPgQjDG0aNGiztrp7e2Nnj17AihfpPqnn37CkSNHuLXDKlu7di3Gjh2LsWPHAgCWLFmC8+fPo7CwdsuY1Kdv/YUBIT+KirnET/MKq5y3KED5VIHvbS5xv379EBsbiwkTJnB/Mwkh5EdAwSIhdezjYZFF/5QnSLj16BWcanF8QEAA3r59i5s3b/J67epjxLiVlRX33yoqKjAyMkJKSkqVZVNSUjBhwgSh4yMjI+u8XYSQ71PFXOKJu29DAPACxu95LnFUVNTXbgIhhHwVlOCGkDpUVcp4MWVNAAJsOBxdZcr4lJQUKCsrc4va//zzzygtLcWBAwd45QwMDCAQCHD//v0a21CR8a9ycFlTOnhCSN1yc3ODk5MTb9u1a9cgKiqKPn361Pn56rPuL/E9JR8jhBBSMwoWCakj1aWMF5VWgJSuJfJvn4T34XheyvinT59iz549GDJkCASC8jft7du3x+nTp7Fs2TKsWrWKK6uiogI7Ozts3LgRb9++FTp/RSKaiqAzO/t/gWl1CWiuX7/O/Xdubi7S0tJqHC5748aNao8nhFRv+/btmDJlCi5duoR//vnnu6n7S9mbauLK7O7Y594RgUMtsc+9I67M7k6BIiGEfGcoWCSkjtSUMl6l5wSw0mLcCZqFrfvD8fjxY0RERKBnz55o2rQpli5dyivfqVMnnDp1Cr6+vli7di23fePGjSgtLUX79u1x6NAhPHjwACkpKVi3bh03pFRaWhodO3bEgAEDMHfuXERHR1ebNn7RokW4cOECkpKS4ObmBjU1NaEekQqenp7YsWMHgoODkZaWBm9vb9y7d+/zb1QD8PHx4TLBEvK1FRQUYP/+/Zg4cSL69OmDkJAQ3v6oqCgIBAKcOXMGrVq1grS0NLp3746cnBycPn0axsbGUFBQwPDhw/Hu3bvPqrshfPjwocrtFXOJ+1k2hVVz1e9u6CkhhBAKFgmpMzn51Sd6EVdpCk3XtRBT0sC8KWPRvHlzjB8/Ht26dcO1a9egoiKc7KFz5844efIk5s+fj/Xr1wMA9PT0cPv2bXTr1g3Tp0+HqakpevbsiQsXLmDz5s3csTt27ABQPv/Ry8sLS5YsqbJdfn5+8PT0RJs2bfD06VOcOHECEhIS3P53795xweOQIUOwYMECzJo1C23atMGjR48wceLEz75PhPxoDhw4gBYtWsDIyAgjRozAjh07qpyD7OPjgw0bNuDq1at4/PgxnJ2dsXbtWuzduxcnT57E2bNnub8Fn1v3x8rKyuDv7w99fX1ISkpCW1ube2k1e/ZsGBoaQkZGBnp6eliwYAFvKHvFy5ht27ahWbNmkJIqH276559/wszMDNLS0lBVVYWtrW2VoyAIIYR8PyjBDSF15FOp4MUUG0OtzzTsc+9YbebOzMxM3vcuXbqgoKCAt01TUxMbNmzAhg0bqj2XsbExNDQ0eGs2VvUA2blzZyQlJVVZh5ubG6KionjrLP7+++/4/fffeeVWrFhRbTs+x4cPH3iBKiH/Fdu3b8eIESMAAPb29sjLy0N0dDRsbGx45ZYsWQJra2sA5ev5zZ07F+np6Vyiq0GDBiEyMhKzZ8/+7Lo/NnfuXAQFBWHNmjXo3LkzsrOzufnQ8vLyCAkJQZMmTZCYmAh3d3fIy8tj1qxZ3PEPHz7EoUOHcPjwYYiKiiI7OxvDhg2Dv78/+vfvj/z8fFy+fLleEnMRQghpONSzSEgdqUgZX91AKwHKF4uvq5TxNjY28PDwgIeHBxQVFaGmpoYFCxbwHs7evXuHMWPGQF5eHtra2vjjjz94ddy7dw/du3fnegLGjx/PBac+Pj7YuXMnjh07BoFAAIFAwGUETExMrPY4ACgpKcHUqVOhpKQEVVVVzJ49G66urrwhrhXt9/LygpqaGuzs7AAAq1evhpmZGWRlZaGlpYVJkybx6g4JCYGSkhKOHj0KAwMDSElJwc7ODo8fPxa6R6GhodDV1YWioiKGDh2K/Px8AMCuXbugqqqKoqIiXnknJyeMHDnyC34a5EdXWsZwLf0ljiX8jef5Raj4Z5iamorY2FgMGzYMACAmJoYhQ4Zg+/btQnWYm5tz/62urs717FXelpOTw33/nLort+/8nUwEBgbC398frq6uaN68OTp37oxx48YBAObPn49OnTpBV1cXjo6OmDFjhlDCrQ8fPmDXrl1o1aoVzM3NkZ2djZKSEgwYMAC6urowMzPDpEmTICcn94V3lBBCyLeAgkVC6khFyngAQgFjfaWM37lzJ8TExBAbG4vAwECsXr0a27Zt4/YHBASgbdu2iI+Px6RJkzBx4kSkpqZy+wcNGgRlZWXExcXh4MGDOH/+PDw8PAAAM2bMgLOzM+zt7ZGdnY3s7Gx06tQJb9++hZ2dXbXHAeW9jXv27EFwcDBiYmLw5s0bHD16tMr2S0hIICYmBlu2bAFQns113bp1uHfvHnbu3ImLFy/yejSA8iB46dKl2LVrF2JiYvD69WsMHTqUVyY9PR1Hjx5FeHg4wsPDER0dDT8/PwDA4MGDUVpayltcOycnBydPnsSYMWO+8KdBflQRSdnovOIihgVdh2dYAqLTnuPyg+eISMrG9u3bUVJSgiZNmkBMTAxiYmLYvHkzDh06hLy8PF494uLi3H8LBALe94ptZWVl3Pfa1v1x+0YGHEZRURHEfjKr8nr2798Pa2traGhoQE5ODvPnz0dWVhavjI6ODpdMCwAsLCzQo0cPmJmZYfDgwQgKCkJubu7n30xCCCHfFAoWCalDDZ0yXktLC2vWrIGRkRFcXFwwZcoUrFmzhtvv4OCASZMmQV9fH7Nnz4aamhoiIyNhY2ODP/74A0VFRdi1axdMTU3RvXt3bNiwAaGhoXj27Bnk5OQgLS0NSUlJaGhoQENDAxISEti7dy8KCwurPQ4A1q9fj7lz56J///5o0aIFNmzYACUlJaH2GxgYwN/fH0ZGRjAyMgIAeHl5oVu3btDV1UX37t2xZMkSoV6N4uJibNiwAVZWVmjTpg127tyJq1evIjY2litTVlaGkJAQmJqa4ueff8bIkSNx4cIFAOVJgIYPH47g4GCu/O7du6Gtrf3J4XuEVFbVcjkAUFhShgm74rBtRwgCAgKQkJDAfe7cuYMmTZpg3759X3zekpIS7Nq165N1V9U+gbgkAGD+0SSh5XyuXbsGFxcXODg4IDw8HPHx8Zg3b55QEhtZWVned1FRUZw7dw6nT5+GiYkJ1q9fDyMjI2RkZHzxNRJCCPn6aM4iIXXM3lQTPU00EJvxCjn5hWgsXz70tC56FEvLGFfvm/fF6NChA7fkBgBYWVkhICAApaWlAPjD2gQCATQ0NLhhbCkpKbCwsOA99FlbW6OsrAypqalQV1evsg2fOk5KSgrPnj1D+/btuf2ioqJo06YNr1cEANq0aSNU//nz57F8+XLcv38fb968QUlJCQoLC/Hu3TvIyMgAKB9u165dO+6YFi1aQElJCSkpKdx5dXV1IS8vz5XR1NTkDeFzd3dHu3bt8Pfff6Np06YICQmBm5sb734SUpPqlsup8O5hLF6/fg230WOgoqzE2zdw4EBs374dEyZM+KJzh4eHIzc3F2PHjoWiomKVdbuP/7XK9okrN4FATBKFj+7A94QueppocH+frl69Ch0dHcybN48r/+jRo1q1SSAQwNraGtbW1li4cCF0dHRw5MgR/Pbbb190jYQQQr4+ChYJqQcVKePrUkRSNnxPJHM9BE+z3+BJaTYikrKr7bH81DC2r+3j3onMzEz07dsXEydOxNKlS6GiooIrV65g7Nix+PDhAxcs1sanrr1Vq1awsLDArl270KtXL9y7dw8nT578dxdEfig1LZcDAAV3z0JKxwKpr0phpczfN3DgQPj7++Pu3btfdO7t27fD1tZWKFCsXPfe05erbJ9ATAIKHQYiNyoYD0TFcDhKEVoyJbh37x4MDAyQlZWFsLAwtGvXDidPnsSRI0c+2Z4bN27gwoUL6NWrFxo3bowbN27g+fPn1a7bSggh5PtAw1AJ+Q5UN9TtdWYKJu6+zQ0lu379OgwMDCAqKvrJOo2NjXHnzh1eavuYmBiIiIhwQ0IlJCS4XsraHqeoqAh1dXXExcVx+0tLS3H79u1PtunWrVsoKytDQEAAOnbsCENDwyoXGS8pKcHNmze576mpqXj9+vVnP5iOGzcOISEhCA4Ohq2tLbS0tD7rePJjq265HLU+09B4wHw0HuSNxoN9qizXvn17MMZgbm4OGxsbMMZ4Q7Xd3Nx4mYiB8qRTCQkJAIATJ05U+3Kjom6Fps2rbbui9VAotOuP15f3YLhdJwwZMgQ5OTn45ZdfMG3aNHh4eMDS0hJXr17FggULar4RABQUFHDp0iU4ODjA0NAQ8+fPR0BAAHr37v3JYwkhhHy7KFgk5BtX01C3kvzneHUhCHN2nMGePXuxfv16eHp61qpeFxcXSElJwdXVFUlJSYiMjMSUKVMwcuRIbgiqrq4u7t69i9TUVLx48QLFxcW1Om7KlClYvnw5jh07htTUVHh6eiI3N/eTQzz19fVRXFyM9evX46+//kJoaCiX+KYycXFxTJkyBTdu3MCtW7fg5uaGjh078oa+1sbw4cPx5MkTBAUFUWIb8tk+tVzO55arazWdVyAQgWKnIfhp4g5cup+NR48eYe7cuQAAf39/vHjxAvn5+QgLC4OXlxcvcK0ctFYwNjZGREQEcnJyUFhYiNTUVF7SK0IIId8nChYJ+cbVNNRNtmV3lJV8wN2NkzFx8mR4enpi/PjxtapXRkYGZ86cwatXr9CuXTsMGjQIPXr04K3f6O7uDiMjI7Rt2xaNGjVCTExMrY6bPXs2hg0bhlGjRsHKygpycnKws7PjFu+ujoWFBVavXo0VK1bA1NQUe/bswfLly6ts++zZszF8+HBYW1tDTk4O+/fvr9V1V6aoqIiBAwdCTk6Ot6wHIbXR0MvlfK5vvX2EEEK+fQL2Da+Y++bNGygqKiIvLw8KCgpfuzmEfBXHEv6GZ1iC0Pane+dAorEeVGzLg8PAoZboZ9m0gVtXO2VlZTA2NoazszMWL178r+oKCQkR6un4N3r06IGWLVti3bp1dVIf+bFUDBEHwOv9rwjQ6iML8uf41ttHCCGfg2KDhkc9i4R84771oW5VefToEYKCgpCWlobExERMnDgRGRkZGD58+NduGic3NxdHjhxBVFQUJk+e/LWbQ75TDb1czuf61ttHCCHk20bZUAn5xlUMJXuaV1jlvEUByh/8vqWhZCIiIggJCcGMGTPAGIOpqSnOnz//TWVGbNWqFXJzc7FixQouoQ8hX6I+l8upC996+wghhHy7aBgqId8BGkpGCCGEkB8dxQYNj4ahEvIdoKFkhBBCCCGkodEwVEK+EzSUjBBCCCGENCQKFgn5joiKCGDVXPVrN4MQQgghhPwAaBgqIYQQQgghhBAhFCwSQgghhBBCCBFCwSIhhBBCCCGEECEULBJCCCGEEEIIEULBIiGEEEIIIYQQIRQsEkIIIYQQQggRQsEiIYQQQgghhBAhFCwSQgghhBBCCBFCwSIhhBBCCCGEECEULBJCCCGEEEIIEULBIiGEEEL+EwQCAY4ePfq1m0EIIf8ZFCwS8hWEhIRASUnpazeDEEIIIYSQalGwSL57AoGgxo+Pj0+dnWvIkCFo3749SktLuW3FxcVo06YNXFxcuG2RkZFwcHCAqqoqZGRkYGJigunTp+Pvv/+us7YQQgghhBBSnyhYJN+97Oxs7rN27VooKCjwts2YMaPOzrVp0yZkZWXBz8+P27Z48WJkZ2djw4YNAICtW7fC1tYWGhoaOHToEJKTk7Flyxbk5eUhICDgi8/94cOHf91+QhrKp4YDRkVFQSAQ4PXr1w3WptqcNzMzEwKBAAkJCQ3arrri4+MDS0tL7rubmxucnJy+Wnvqmo2NDaZOnYpZs2ZBRUUFGhoaQi8Es7Oz0bt3b0hLS0NPTw9//vknb//s2bNhaGgIGRkZ6OnpYcGCBSguLm7AqyCEkO8I+4bl5eUxACwvL+9rN4V8J4KDg5mioiJjjLHXr18zERERFhcXxxhjrLS0lCkrK7MOHTpw5UNDQ9lPP/3Efb979y7r1q0bk5KSYioqKszd3Z3l5+fzznHs2DEmISHB7ty5w+Li4piYmBg7efIkY4yxx48fMwkJCebl5VVl+3Jzc3ntjIiIYC1atGCysrLMzs6O/fPPP1xZV1dX1q9fP7ZkyRKmqanJdHV1a9XGiuOWLl3KGjduzBQVFZmvry8rLi5mM2bMYMrKyqxp06Zsx44dvLZ9qt7IyEjWrl07JiMjwxQVFVmnTp1YZmYmt3/Tpk1MT0+PiYuLM0NDQ7Zr1y5e/QDYli1bWJ8+fZi0tDRr0aIFu3r1Knvw4AHr2rUrk5GRYVZWVuzhw4e8444ePcpatWrFJCUlWbNmzZiPjw8rLi5mjDFWVlbGvL29mZaWFpOQkGCamppsypQpVd57UrdycnLYhAkTuHuvrq7OevXqxa5cucIYK/95HzlypNrji4qKWHZ2NisrK2ugFpeLjIxkALh/ix8rKSlh2dnZ3O9YRkYGA8AaNWrE3rx5wytrYWHBvL29/9W5//77b2Zqasp+/vln9vr168+9HCH5+fnsxYsX3PfXr19Xe63fo65duzIFBQXm4+PD0tLS2M6dO5lAIGBnz55ljJX/3qmqqrKgoCCWmprK5s+fz0RFRVlycjJXx+LFi1lMTAzLyMhgx48fZ+rq6mzFihVf65IIIZ+BYoOGR8Ei+U+pHCwyxljr1q3ZypUrGWOMJSQkMBUVFSYhIcEFQePGjWMuLi6MMcYKCgqYpqYmGzBgAEtMTGQXLlxgzZo1Y66urkLnGTVqFLOwsGAmJiZs7Nix3PbVq1czALygr7p2iouLM1tbWxYXF8du3brFjI2N2fDhw7kyrq6uTE5Ojo0cOZIlJSWxpKSkWrXR1dWVycvLs8mTJ7P79++z7du3MwDMzs6OLV26lKWlpbHFixczcXFx9vjx41pde3FxMVNUVGQzZsxgDx8+ZMnJySwkJIQ9evSIMcbY4cOHmbi4ONu4cSNLTU1lAQEBTFRUlF28eJFrFwDWtGlTtn//fpaamsqcnJyYrq4u6969O4uIiGDJycmsY8eOzN7enjvm0qVLTEFBgYWEhLD09HR29uxZpqury3x8fBhjjB08eJApKCiwU6dOsUePHrEbN26wP/74o8Z7T+rGzz//zDp06MAuXrzIMjMz2Y0bN9iyZcvYsWPHGGOfDha/lk8Fix+rCBalpKTYwoULefv+bbD48OFD1qxZM9anTx/27t27WtfzoykpLWNXH75gR+OfsFbtOzHrzp15+9u1a8dmz57NGCv/vZswYQJvf4cOHdjEiROrrX/lypWsTZs2dd/wH1zXrl2Zp6fn124G+Y+h2KDhUbBIvluVHyCuPnzBSkrLhILF3377jfXp04cxxtjatWvZkCFDmIWFBTt9+jRjjDF9fX0uuPjjjz+YsrIyKygo4I4/efIkExERYU+fPuWd+9WrV0xaWpqpq6vzfj8nTpzIFBQUPtn24OBgBoDXi7Zx40amrq7OfXd1dWXq6uqsqKiI21abNrq6ujIdHR1WWlrKlTEyMmI///zz/+5dSQmTlZVl+/btq1W9L1++ZABYVFRUldfTqVMn5u7uzts2ePBg5uDgwH0HwObPn899v3btGgPAtm/fzm3bt28fk5KS4r736NGDLVu2jFdvaGgo09TUZIwxFhAQwAwNDdmHDx+qbBepH7m5uTX+PjBW/vMOCgpiTk5OTFpamunr63OBJGPCgVNtetuLi4vZlClTmKKiIlNRUWGzZs1io0aNYv369ePKlJaWsmXLljFdXV0mJSXFzM3N2cGDB6s979u3b5m9vT3r1KkTy83N5YLD+Ph4xtj/gsWZM2cyOTk59uzZM66uj4PFV69esZEjRzIlJSUmLS3N7O3tWVpaWpXnvnPnDtPQ0GDDhw9nxcXFrKCggMnLy/PayhhjR44cYTIyMlyv5qdGAHh7ezMLCwvue8VIgwpdu3ZlU6ZMYTNnzmTKyspMXV39swLehnY68R/Wcdl5pjM7nOnMDmeSWqZMvcMv7HTi/34vfvnlFzZ69GjGWPnv3c6dO3l1eHl5MRsbG+57WFgY69SpE1NXV2eysrJMUlKSNWrUqGEuqB64uroyAEKfBw8efNV2UbBI6gPFBg2P5iyS71JEUjY6r7iIYUHX4RmWgGFB19F5xUUkPnnNK9e1a1dcuXIFpaWliI6Oho2NDWxsbBAVFYV//vkHDx8+hI2NDQAgJSUFFhYWkJWV5Y63trZGWVkZUlNTefXu27cPAoEAL168wP3797ntjDEIBIJaXYOMjAyaN2/OfdfU1EROTg6vjJmZGSQkJLjvtW1jy5YtISLyv3/e6urqMDMz476LiopCVVWVO9+n6lVRUYGbmxvs7Ozg6OiIwMBAZGdn89plbW3Na7u1tTVSUlJ428zNzXltqrjGytsKCwvx5s0bAMCdO3ewaNEiyMnJcR93d3dkZ2fj3bt3GDx4MN6/fw89PT24u7vjyJEjKCkpEb7ZpE5V/CyOHj2KoqKiasv5+vrC2dkZd+/ehYODA1xcXPDq1atqy7979w6rVq1CaGgoLl26hKysLN6c4xUrVmDPnj0IDg5GTEwM3rx5IzQvcvny5di1axe2bNmCe/fuYdq0aRgxYgSio6OFzvf69Wv07NkTZWVlOHfuXI0ZiocNGwZ9fX0sWrSo2jJubm64efMmjh8/jmvXroExBgcHB6H5cFevXkXXrl0xcOBA7N69G2JiYpCVlcXQoUMRHBzMKxscHIxBgwZBXl4eb9++hZ2dHZSVlREXF4eDBw/i/Pnz8PDwqLZNVdm5cydkZWVx48YN+Pv7Y9GiRTh37txn1dEQIpKyMXH3bWTnFfK2vysBJu6+jYik8r9BAoEAZWVltarz2rVrcHFxgYODA8LDwxEfH4958+Z993PC7e3teXP1s7Oz0axZM16Z7/0aCSFfBwWL5LtT3QPE07xC7Lz2CCVljNvWpUsX5Ofn4/bt27h06RIvWIyOjkaTJk1gYGDwWef/66+/MGvWLGzevBkjR46Em5sb98BsaGiIvLw8XiBVHXFxcd53gUAAxhhvW+Xg7XNUVXdV22r7gAWUP7Reu3YNnTp1wv79+2FoaIjr169/cbsqguqqtlW0q6CgAL6+vkhISOA+iYmJePDgAaSkpKClpYXU1FRs2rQJ0tLSmDRpErp06ULJKupJaRnDtfSXOJn0DHP91mPnzp1QUlKCtbU1fv/9d9y9e5dX3s3NjQuyli1bhoKCAsTGxlZbf3FxMbZs2YK2bduidevW8PDwwIULF7j969evx9y5c9G/f3+0aNECGzZs4AV4RUVFWLZsGXbs2AE7Ozvo6enBzc0NI0aMwNatW3nnevr0Kbp27QpNTU2cOHECMjIyNV67QCCAn58f/vjjD6Snpwvtf/DgAY4fP45t27bh559/hoWFBfbs2YO///5bKKDt378/HB0dsWHDBt7LpXHjxuHMmTPc34+cnBycOnUKY8aMAQDs3bsXhYWF2LVrF0xNTdG9e3ds2LABoaGhePbsWY3tr8zc3Bze3t4wMDDAqFGj0LZtW959/haUljH4nkgGq6GM74lklJYJl/j479L169dhbGwMoDxQ19HRwbx589C2bVsYGBjg0aNHddn0r0JSUhIaGhq8T48ePeDh4QEvLy+oqanBzs6uygROr1+/hkAgQFRUFLft+PHjMDAwgJSUFLp164adO3fyEkO9fPkSw4YNQ9OmTSEjIwMzMzPs27evYS+aENIgKFgk35WaHiAqthUWl3IPEEpKSjA3N8eGDRsgLi6OFi1aoEuXLoiPj0d4eDi6du3KHW9sbIw7d+7g7du33LaYmBiIiIjAyMgIQHkQ4+bmhh49emDUqFFYu3Yt8vPzsXDhQgDAoEGDICEhAX9//yrb/28zP9amjfVZb6tWrTB37lxcvXoVpqam2Lt3L3d8TEwMr86YmBiYmJh8cZsAoHXr1khNTYW+vr7Qp6LnVFpaGo6Ojli3bh2ioqJw7do1JCYm/qvzEmEf9+b/8bgRDLz2YEFgMOzt7REVFYXWrVsjJCSEO6ZyT7KsrCwUFBSEes8rq6m3PS8vD8+ePUP79u25/aKiomjTpg33/eHDh3j37h169uzJ643etWuXUIDXs2dP6OvrY//+/bze+5rY2dmhc+fOWLBggdC+lJQUiImJoUOHDtw2VVVVGBkZCfWw9+vXD0eOHMHly5d529u3b4+WLVti586dAIDdu3dDR0cHXbp04c5R29EPNan8cwGqHtXwtcVmvBJ6IVgZA5CdV4jYDOGe6oMHD2LHjh1IS0uDt7c3YmNjud5XAwMDZGVlISwsDOnp6Vi3bh2OHDlSX5fx1e3cuRMSEhKIiYnBli1banVMRkYGBg0aBCcnJ9y5cwe//vor5s2bxytTWFiINm3a4OTJk0hKSsL48eMxcuTIGl8GEUK+TxQsku/Kpx4gAKCMgfcAYWNjgz179nCBoYqKCoyNjbF//35esOji4gIpKSm4uroiKSkJkZGRmDJlCkaOHMkNmQwMDMS9e/e4XgpFRUVs27YNq1evRmxsLLS0tLBmzRoEBgZi7NixiI6OxqNHjxATE4Nff/0Vixcv/lfXX5s21ke9GRkZmDt3Lq5du4ZHjx7h7NmzePDgAfe2fubMmQgJCcHmzZvx4MEDrF69GocPH/7Xy5YsXLgQu3btgq+vL+7du4eUlBSEhYVh/vz5AICQkBBs374dSUlJ+Ouvv7B7925IS0tDR0fnX52X8FXXm5/zrgxBf8mjXf9xuHr1Ktzc3ODt7c3t/9ze7Nr0ttekoKAAAHDy5Eleb3RycrLQ8gl9+vTBpUuXkJycXOv6AcDPzw/79+9HfHz8Zx1X2datWzF06FD07t0bly5d4u0bN24cF3AHBwdj9OjRtR7aXlv/dpRBQ8jJr/nvfE3lfH19ERYWBnNzc+zatQv79u3jXlz98ssvmDZtGjw8PGBpaYmrV69WGfx/6yp6+Y8l/I3n+UUIDw/nvSAZPHgwgPLg2N/fH0ZGRrV+obh161YYGRlh5cqVMDIywtChQ+Hm5sYr07RpU8yYMQOWlpbQ09PDlClTYG9vjwMHDtT1pRJCvjKxr90AQj7HlzxAdO3aFWvXruXmJgLlAeSdO3d422RkZHDmzBl4enqiXbt2kJGRwcCBA7F69WoAQFpaGubNm4dt27ZBQ0ODO87Ozg6jR4+Gm5sb4uPjMWnSJBgaGmLVqlXo378/3r9/D11dXfTt2xe//fbbv7r+T7WxvuqVkZHB/fv3sXPnTrx8+RKampqYPHkyfv31VwCAk5MTAgMDsWrVKnh6eqJZs2YIDg7m3d8vYWdnh/DwcCxatAgrVqzgeofHjRsHoLzn2M/PD7/99htKS0thZmaGEydOQFVV9V+dl/zPp3rzBSgfDtjTRAMmJiY1rq34bygqKkJdXR1xcXFcT1tpaSlu377NrStoYmICSUlJZGVl8V4EVcXPzw9ycnLo0aMHoqKiat0L3r59ewwYMABz5szhbTc2NkZJSQlu3LiBTp06ASgfqpeamipUt0AgwB9//AERERE4ODjg5MmTXHtHjBiBWbNmYd26dUhOToarqyvvHCEhIXj79i3Xu1gXIwu+RY3lparcrjHcT6hc5d+5ipcLkyZNqrZuf39/odEfXl5eX9bQryAiKRu+J5K5lzcv0p5DrpkFVgeuRxfDxgDKe/KHDRvG63mvrdTUVLRr1463rXKPPlD+b2/ZsmU4cOAA/v77b3z48AFFRUWfHM5NCPn+ULBIvivVPUBUkDOzhZyZLa+ck5OTUO/E2rVrsXbtWqHjzczMcPHixSrrNjQ0xLt376rc98cff/C+29rawtbWttp2urm5Cb2p/bidlYfz1baN1R1XeS5KhczMzFrXq66u/smhWhMnTsTEiROr3f/xz0BXV1dom42NjdA2Ozs72NnZVVmnk5PTf2rB8W9RVb35pe/f4PlRP8iZ94REI11kvZaG38ZgrPf3R79+/eqtLVOmTMHy5cuhr6+PFi1aYP369cjNzeV63uTl5TFjxgxMmzYNZWVl6Ny5M/Ly8hATEwMFBQVe4AUAq1atQmlpKbp3746oqCi0aNGiVu1YunQpWrZsCTGx//1fqIGBAfr16wd3d3ds3boV8vLymDNnDpo2bVrlPREIBNiyZQtERUW5gNHGxgbKysoYMGAAZs6ciV69euGnn37ijnFxcYG3tzdcXV3h4+OD58+f18nIgm9R+2Yq0FSUwtO8wipfVAgAaChKoX0zlYZu2ldV0cv/8T35IJDA4ku52KzdDPammtz2j+e9Vwzfr/x39kvmeK9cuRKBgYFYu3YtzMzMICsrCy8vL0qiQ8h/EA1DJd+VigeI6gZlCQBo/oAPEITUl6p680XEpSHZxBD5cUfxdO8cZO+YjM2rl8Hd3R0bNmyot7bMnj0bw4YNw6hRo2BlZQU5OTnY2dlBSup/L4cWL16MBQsWYPny5TA2Noa9vT1OnjwplBmywpo1a+Ds7Izu3bsjLS2tVu0wNDTEmDFjUFjIvzfBwcFo06YN+vbtCysrKzDGcOrUKaFhnxUEAgE2btyI0aNHo0+fPoiMjAQAjB07Fh8+fOAS21SoGAHw6tUrtGvXDoMGDUKPHj3q9Z5/LaIiAng7lvfIfvz3vuK7t6MJREXqdojut+zfJP2p0KhRIwDgJWGrnOwGAIyMjHDz5k3etri4ON73mJgY9OvXDyNGjICFhQX09PRq/e+HEPJ9EbDPmRDSwN68eQNFRUXk5eVBQUHhazeHfCMq3qwC4P2fZsUjw+YRrXlvVgkhX+5a+ksMC/p01tt97h1h1bxhh/+WlZXB2NgYzs7O/3o+8LckNDQU06ZNwz///FPr5DsV5s6di8uXL+PKlSv11LqG9fGQS6D8haC3o8kP93e+un+LL06uQVnRWzQeUD6Xu+Lfoo2NDSwtLYVG0VhZWUFcXBxbt25FTk4OZs2ahdjYWERGRsLGxgYZGRkwMjLCtGnTMHbsWCQkJGD69Ol48uQJXr9+DUVFRfz222/4888/ERYWBmVlZaxevRoHDhxAt27duGHB1Z2fkH+DYoOGRz2L5Ltjb6qJzSNaQ0ORPyRVQ1GKAkVC6ti31Jv/6NEjBAUFIS0tDYmJiZg4cSIyMjIwfPjwej93Q3j37h3S09Ph5+eHX3/99bMCRcYY0tPTceHCBbRs2bIeW9mw7E01cWV2d+xz74jAoZbY594RV2Z3/yH/zv+bpD+V7dixAyUlJWjTpg28vLywZMkS3v5mzZrhzz//xOHDh2Fubo7Nmzdz2VAlJSUBAPPnz0fr1q1hZ2cHGxsbaGho0JQAQv6jqGeRfLdKyxhiM14hJ78QjeXLH1Z/pCFJhDSUb6U3//Hjxxg6dCiSkpLAGIOpqSn8/Py4hDffOx8fHyxduhRdunTBsWPHICcnV+tjX79+DXV1dbRr1w579uyhjMD/QV+zl3/p0qXYsmULHj9+XKf1EvK5KDZoeBQsEkII+SQaDkjI11VaxtB5xcVPJv25Mrv7v35xumnTJrRr1w6qqqqIiYnBlClT4OHhIdQLSUhDo9ig4VE2VEIIIZ9kb6qJniYa1JtPyFdSkfRn4u7bEKDqXv66Svrz4MEDLFmyBK9evYK2tjamT5+OuXPn/ut6CSHfH+pZJIQQQgj5TlAvP/mRUWzQ8KhnkRBCCCHkO0G9/ISQhkTBIiGEEELId0RURNDgS9UQQn5MtHQGIYQQQgghhBAhFCwSQgghhPyHhISEQElJifvu4+MDS0vLr9YeQsj3i4JFQgghhJCvKCoqCgKBAK9fv66X+mfMmIELFy7US92EkP82mrNICCGEEPId+PDhAyQkJD77ODk5OcjJydVDiwgh/3XUs0gIIYQQ8gkRERHo3LkzlJSUoKqqir59+yI9PR1A1T2DCQkJEAgEyMzMBAA8evQIjo6OUFZWhqysLFq2bIlTp04hMzMT3bp1AwAoKytDIBDAzc0NAGBjYwMPDw94eXlBTU0NdnZ2AIDVq1fDzMwMsrKy0NLSwqRJk1BQUFBt2z8ehhoXF4eePXtCTU0NioqK6Nq1K27fvl13N4sQ8p9BwSIhhBBCyCe8ffsWv/32G27evIkLFy5AREQE/fv3R1lZWa2Onzx5MoqKinDp0iUkJiZixYoVkJOTg5aWFg4dOgQASE1NRXZ2NgIDA7njdu7cCQkJCcTExGDLli0AABEREaxbtw737t3Dzp07cfHiRcyaNavW15Kfnw9XV1dcuXIF169fh4GBARwcHJCfn/8Zd4QQ8iOgYaiEEEIIIR8pLWO8tQyd+g/grWW4Y8cONGrUCMnJybWqLysrCwMHDoSZmRkAQE9Pj9unoqICAGjcuDEvMQ0AGBgYwN/fn7fNy8uL+29dXV0sWbIEEyZMwKZNm2rVlu7du/O+//HHH1BSUkJ0dDT69u1bqzoIIT8GChYJIYQQQiqJSMqG74lkZOcVctuUil9AKeUIsu7fxYsXL7gexaysLMjIyHyyzqlTp2LixIk4e/YsbG1tMXDgQJibm3/yuDZt2ghtO3/+PJYvX4779+/jzZs3KCkpQWFhId69e1ertjx79gzz589HVFQUcnJyUFpainfv3iErK+uTxxJCfiw0DJUQQggh5P9FJGVj4u7bvEARAO6FzMeNlCy4/+6HGzdu4MaNGwDKk86IiJQ/TjHGuPLFxcW848eNG4e//voLI0eORGJiItq2bYv169d/sj2ysrK875mZmejbty/Mzc1x6NAh3Lp1Cxs3buTaUhuurq5ISEhAYGAgrl69ioSEBKiqqtb6eELIj6Neg8Xly5ejXbt2kJeXR+PGjeHk5ITU1NT6PCUhhBBCyBcpLWPwPZEM9vH2929Q8uoJlDoNwfHnqjA0aoHc3Fxuf6NGjQAA2dnZ3LaEhASh+rW0tDBhwgQcPnwY06dPR1BQEABwGU5LS0s/2cZbt26hrKwMAQEB6NixIwwNDfHPP/981nXGxMRg6tSpcHBwQMuWLSEpKYkXL158Vh2EkB9DvQaL0dHRmDx5Mq5fv45z586huLgYvXr1wtu3b+vztIQQQgghny0245VQjyIAiEjJQURaAfl3ziAr8y9s2nsUv/32G7dfX18fWlpa8PHxwYMHD3Dy5EkEBATw6vDy8sKZM2eQkZGB27dvIzIyEsbGxgAAHR0dCAQChIeH4/nz5zVmNtXX10dxcTHWr1+Pv/76C6GhoVzim9oyMDBAaGgoUlJScOPGDbi4uEBaWvqz6iCE/BjqNViMiIiAm5sbWrZsCQsLC4SEhCArKwu3bt2qsnxRURHevHnD+xBCCCGENIScfOFAEQAEAhGo/TILH54+xD/bJ2P1onlYuXIlt19cXBz79u3D/fv3YW5ujhUrVmDJkiW8OkpLSzF58mQYGxvD3t4ehoaGXEKapk2bwtfXF3PmzIG6ujo8PDyqbaOFhQVWr16NFStWwNTUFHv27MHy5cs/6zq3b9+O3NxctG7dGiNHjsTUqVPRuHHjz6qDEPJjELDKA+zr2cOHD2FgYIDExESYmpoK7ffx8YGvr6/Q9ry8PCgoKDREEwkhhBDyg7qW/hLDgq5/stw+946waq7aAC0ihFT25s0bKCoqUmzQgBoswU1ZWRm8vLxgbW1dZaAIAHPnzkVeXh73efz4cUM1jxBCyFcWEhIitGwAIQ2pfTMVaCpKQVDNfgEATUUptG+m0pDNIoSQr6bBgsXJkycjKSkJYWFh1ZaRlJSEgoIC70MIIYQQ0hBERQTwdjQBAKGAseK7t6MJb71FQgj5L2uQYNHDwwPh4eGIjIzETz/91BCnJIQQQgj5bPammtg8ojU0FKV42zUUpbB5RGvYm2p+pZYRQkjDq9dgkTEGDw8PHDlyBBcvXkSzZs3q83SEkHpiY2MDLy+vr90M8hVFRESgc+fOUFJSgqqqKvr27Yv09HQA5eu+CQQChIWFoVOnTpCSkoKpqSmio6O546OioiAQCHDy5EmYm5tDSkoKHTt2RFJSUo3nPXbsGFq3bg0pKSno6enB19cXJSUl9XqthNibauLK7O7Y594RgUMtsc+9I67M7v5VAkX6+0sI+ZrqNVicPHkydu/ejb1790JeXh5Pnz7F06dP8f79+/o8LSHkC7m5uUEgEAh9/P39sXjxYq6crq4u1q5d+/UaShrc27dv8dtvv+HmzZu4cOECRERE0L9/f5SVlXFlZs6cienTpyM+Ph5WVlZwdHTEy5cvefXMnDkTAQEBiIuLQ6NGjeDo6Ci0eHmFy5cvY9SoUfD09ERycjK2bt2KkJAQLF26tF6vlRCgfEiqVXNV9LNsCqvmql9t6Onhw4d5f3+r4+Pj5CnjrQABAABJREFUU+Xf78ofAPjw4QP8/f1hYWEBGRkZqKmpwdraGsHBwbx/i0+fPoWnpyf09fUhJSUFdXV1WFtbY/PmzXj37l29XS8h5BvD6hGAKj/BwcG1Oj4vL48BYHl5efXZTELI/3N1dWX29vYsOzub9ykpKeGV09HRYWvWrPk6jSQNoqS0jF19+IIdjX/Crj58wUpKy3j7nz9/zgCwxMRElpGRwQAwPz8/bn9xcTH76aef2IoVKxhjjEVGRjIALCwsjCvz8uVLJi0tzfbv388YYyw4OJgpKipy+3v06MGWLVvGO29oaCjT1NSs68sl5LuXn5/P+7v9008/sUWLFvG2FRUVMRsbG6asrMw2bNjA4uPjWXp6OtuzZw9r1aoVi4+PZ4wxlp6ezjQ0NFiLFi3Y/v37WXJyMktPT2dHjx5lDg4O7NixY1/3YskPi2KDhlfvw1Cr+ri5udXnaQkh/4KkpCQ0NDR4nx49enDDoGxsbPDo0SNMmzaN97a6IpPlmTNnYGxsDDk5Odjb2yM7O5tX/7Zt22BsbAwpKSm0aNGCW2cMKH/j7eHhAU1NTUhJSUFHR4dbP4wxBh8fH2hra0NSUhJNmjTB1KlTG+am/GAikrLRecVFDAu6Ds+wBAwLuo42M3fBxsEJenp6UFBQgK6uLgAgKyuLO87Kyor7bzExMbRt2xYpKSm8uiuXUVFRgZGRkVCZCnfu3MGiRYsgJyfHfdzd3ZGdnU09G6TB2djYYMqUKfDy8oKysjLU1dURFBSEt2/fYvTo0ZCXl4e+vj5Onz4NoHxdxbFjx6JZs2aQlpaGkZERAgMDeXWWlJRg6tSp3PDu2bNnw9XVFU5OTrzzVh6GWlRUhNmzZ0NLSwuSkpLQ19fH9u3bIScnx/u7LSoqCnl5ed62tWvX4tKlS7hw4QImT54MS0tL6OnpYfjw4bhx4wYMDAwAAJMmTYKYmBhu3rwJZ2dnGBsbQ09PD/369cPJkyfh6OhY7/ebEPJtEPvaDSCEfF8OHz4MCwsLjB8/Hu7u7rx97969w6pVqxAaGgoRERGMGDECM2bMwJ49ewAAe/bswcKFC7Fhwwa0atUK8fHxcHd3h6ysLFxdXbFu3TocP34cBw4cgLa2Nh4/fswtoXPo0CGsWbMGYWFhaNmyJZ4+fYo7d+40+PX/10UkZWPi7tv4eAHeeyHzIabQCAvn+cHJ2gxlZWUwNTXFhw8f6q0tBQUF8PX1xYABA4T2SUlJVXEEIfVr586dmDVrFmJjY7F//35MnDgRR44cQf/+/fH7779jzZo1GDlyJLKysiAuLo6ffvoJBw8ehKqqKq5evYrx48dDU1MTzs7OAIAVK1Zgz549CA4OhrGxMQIDA3H06FF069at2jaMGjUK165dw7p162BhYYGMjAy8ePGiVu3fs2cPbG1t0apVK6F94uLiEBcXx8uXL3H27FksW7YMsrKyVdZT8ZKQEPLfR8EiIT+40jKG2IxXyMkvxPP8IpwJD4ecnBy3v3fv3rzyKioqvDfWlRUXF2PLli1o3rw5gPJMyIsWLeL2e3t7IyAggHv4b9asGTcXzdXVFVlZWTAwMEDnzp0hEAigo6PDHZuVlQUNDQ3Y2tpCXFwc2traaN++fZ3fjx9ZaRmD74lkoUCx9P0blLx6AjV7Dxx/ropZRi1w7WqM0PHXr19Hly5dAJT3mNy6dQseHh5CZbS1tQEAubm5SEtLg7GxcZXtad26NVJTU6Gvr//vL46QL1D57+Ob98Uwt7DA/PnzAZSvDe3n5wc1NTXuxdnChQuxefNm3L17Fx07doSvry9XV7NmzXDt2jUcOHCACxbXr1+PuXPnon///gCADRs24NSpU9W2Jy0tDQcOHMC5c+dga2sLANDT06v19Tx48AA2NjY1lnn48CEYYzAyMuJtV1NTQ2FhIYDynBQrVqyo9XkJId8vChYJ+YFFJGXD90QysvPKHwBepD2HXDMLrA5cjy6GjQEAsrKyGDZsWK3qk5GR4QJFANDU1EROTg6A8gQp6enpGDt2LK9HsqSkBIqKigDKE+z07NkTRkZGsLe3R9++fdGrVy8AwODBg7F27Vro6enB3t4eDg4OcHR0hJgY/RmrK7EZr7jfhcpEpOQgIq2A/DtnkCWngk173yB03XKhchs3boSBgQGMjY2xZs0a5ObmYsyYMbwyixYtgqqqKtTV1TFv3jyoqanxhtxVtnDhQvTt2xfa2toYNGgQREREcOfOHSQlJeH/2Lvv8JzO/4Hj7yd7JxJkEGJGIiExG7FXzFKKRhDEpqjZ1qZKjaK02tpq1x4VqxLEChKjiYRIhIpNSJB5fn/45Xw9khBq+7yu67munHPuc9/3OTT1ee7x+e67717JMwuRm6d/P15NuIeVQ3ECzyTQyM0eXV1dbGxscHd3V++xtbUFUH/v/fzzzyxcuJD4+HgePnxIamoqHh4eACQmJnLt2jWtL710dXWpWLGi1sZRTwoPD0dXV5datWq91DMpytNfBeXd0aNHyczMxM/Pj5SUlJeuRwjxfnkjeRaFEO+erOmGTwcHqRoDJuy7w/lHppQsWRJ7+7xvFa+vr691rNFo1H+cJCUlATBv3jzCw8PVz5kzZzh8+DDweCQpNjaWCRMm8PDhQ9q2bcvnn38OgKOjI1FRUfzyyy8YGxvTp08fatasmetOmuLFXb+fPVAE0Gh0yP/pMFKvnufKgr78OH4EU6dOzVZu8uTJTJ48mfLly3PgwAE2b95M/vz5s5UZMGAAFStW5OrVq2zZsgUDA4Mc2/Xx8WHr1q3s3LmTypUr88knnzBjxgytEWchXofcfj8+SIfey04QeObxWmyNRqP1ey9remZmZiarVq1iyJAhBAQEsHPnTsLDw+nSpct/mrptbGz80vcClC5dmrNnzz6zTMmSJdFoNERFRWmdL168OCVLlvzPfRBCvF/kK3khPkK5TTd80rgtETRwtctxu3gDAwMyMjJeqE1bW1scHBy4cOECfn5+uZazsLCgXbt2tGvXjs8//5xGjRpx+/ZtrK2tMTY2pnnz5jRv3py+fftSpkwZTp8+TYUKFV6oLyJnBc1zXwdo7OSBcbe5AKzo/gleJWzULwLi4uIAcHFx4ciRI89so3r16rnmVuzcuXO2DdB8fHzw8fHJ4xMI8d+9yO/HZwkJCaFatWr06dNHPZeVmxTA0tISW1tbQkND1enbGRkZnDhxQh19fJq7++P1wsHBweo01BfRvn17vv32W8LCwrKtW0xLSyM1NRUbGxsaNGjAnDlz+PLLL3NdtyiE+DjIyKIQH6HcphtmUYCExEccjb2d43UnJyf27dvHv//+m+eNFQDGjRvHpEmT+Omnn4iOjub06dMsWrSIH3/8EYAff/yRlStXcvbsWaKjo/nzzz+xs7PDysqKxYsXs2DBAs6cOcOFCxdYtmwZxsbGMsr0ClUpZo29pRG5bV2hAewtjahSzPpNdkuIN+q//n7MUqpUKY4dO8aOHTuIjo5m1KhRhIaGapX58ssvmTRpEps2bSIqKooBAwZw586dXDeQcXJywt/fn65du7Jx40ZiY2MJCgpizZo1eXq2gQMH4u3tTb169fj55585efIkFy5cYM2aNXzyySecO3cOgF9++YX09HQqVarE6tWriYyMJCoqimXLlnH27Fl0dXXz1J4Q4v0nwaIQH6Hcphvmtdz48eOJi4ujRIkSFChQIM/tduvWjfnz57No0SLc3d2pVasWixcvplixYgCYm5szZcoUKlWqROXKlYmLi+Ovv/5CR0cHKysr5s2bh7e3N+XKlWP37t1s2bIFGxubPLcvnk1XR8OY5q4A2QLGrOMxzV3fWnJyId6E5/1+zEi6w7VVI6nj5qiVOuZpPXv2pFWrVrRr146qVaty69YtrVFGgOHDh+Pr60unTp3w8vLCzMwMHx+fZ+72O3fuXD7//HP69OlDmTJl6N69O8nJyXl6NkNDQ3bt2sWwYcP47bff+OSTT6hcuTI//fQT/fv3x83NDYASJUoQFhZG/fr1+eabbyhfvjyVKlVi9uzZDBkyhAkTJuSpPSHE+0+j/JfVzq/ZvXv3sLS0JDExEQsLi7fdHSE+GIdibuE77/Bzy638/+mG4uPy9MYe8HhEcUxzVxq55X0NqxAvonPnzty9e5eNGze+1X487/fjnaBFPIwJZdmqP6nt7kRERAR16tThzp07WFlZ/ae2MzMzcXFxoW3bthKQCZEDiQ3ePFmzKMRHKGu64dXERzmuy9EAdjLd8KPVyM2eBq52asqAguaP/y7IiKJ4H6Smpua6aVJuMjIy0Gg06OjoPPf3Y/qdBCwdnWldpzK6OhoiIiJeuq8XL15k586d1KpVi5SUFObMmUNsbCzt27d/6TqFEOJVkmmoQnyEZLqheB5dHQ1eJWxo4VEIrxI28ndBvDJr167F3d0dY2NjbGxsqF+/PkOHDmXJkiVs2rQJjUaDRqMhKCgIeDxVs3Tp0piYmFC8eHFGjRqltQvy2LFj8fDwYP78+RQrVkydwnn37l169uyJra0tRkZGuLm5sXXrVgAWL16MlZUVmzdvxtXVFUNDQ+Lj4wkNDaWRT0MiprTh4oy2XF3xNSlXz6ttXZ7blQfRB7kZtgs9XR06d+5MnTp1AMiXLx8ajSbbJk3PoqOjw+LFi6lcuTLe3t6cPn2a3bt355p7VAgh3jQZWRTiI9XIzZ65HSpkm25oJ9MNhRCvSUJCAr6+vkyZMoXPPvuM+/fvs3//fjp16kR8fDz37t1j0aJFAFhbP57ZYG5uzuLFi3FwcOD06dN0794dc3Nzhg0bptZ7/vx51q1bx/r169HV1SUzM5PGjRtz//59li1bRokSJYiIiNDamOXBgwf88MMPzJ8/HxsbGwoWLMiFCxfw9/dn9uzZ7Iu+zsgJk7m+diyFuv+OjqEJHv1+QRP8M8ULFWDWrFkYGxvz6aef0rp1a6KiorCwsHih1BKOjo6EhIS8orcrhBCvngSLQnzEZLqhEOJ1y8hU1N8xdy5GkZ6eTqtWrdSdjLOS2hsbG5OSkoKdnXZKipEjR6o/Ozk5MWTIEFatWqUVLKamprJ06VJ1w62dO3dy9OhRIiMjKV26NPA4T+CT0tLS+OWXXyhfvrx6rm7duurPZcqUoUsTb6ysrOhU7AFNm9alSjFrWrf6A2NjY7WfWUFtwYIF//OaRSGEeNdIsCjERy5ruqEQQrxqT2+WpGRmYFHCE5eybjRt3IiGDRvy+eefky9fvlzrWL16NT/99BMxMTEkJSWRnp6ebWOLokWLau3MHB4eTuHChdVAMScGBgaUK1dO69y1a9cYOXIkQUFBXL9+nYyMDB4+eICDXrL8nhRCfJRkzaIQQgghXrnAMwn0XnZCa5q7RkeXfK3HY9lyNAb5HZk9ezbOzs7ExsbmWMehQ4fw8/OjSZMmbN26lbCwMEaMGEFqaqpWuacTx+dlKqixsXG2fIb+/v6Eh4cza9YsDh48SHh4ODY2NtnaE0KIj4UEi0J8IDQazVvfcv59pigKPXr0wNraGo1GQ3h4+NvukhDvrYxMhXFbInLcTRSNBqPCrlwo0pRjx09gYGDAhg0bMDAwICMjQ6vowYMHKVq0KCNGjKBSpUqUKlWKixcvPrf9cuXKcfnyZaKjo1+o3yEhIfTv358mTZpQtmxZDA0NuXnz5jPvydp59em+CyHEh0CCRSFeoc6dO6s7+enr61OsWDGGDRvGo0fPTvL8oenZsye6urr8+eefL3RfUFAQGo2Gu3fvvrK+3L59m4EDB1K0aFEMDAxwcHCga9eu2ZJpBwYGsnjxYrZu3UpCQoKanPp5Dh06hK6uLk2bNn1lfRbifXc09rbWiGKWlCtRJB5aw6OEc1y6FM/UX5dy48YNXFxccHJy4tSpU0RFRXHz5k3S0tIoVaoU8fHxrFq1ipiYGH766Sc2bNjw3PZr1apFzZo1ad26Nbt27SI2Npbt27cTGBj4zPtKlSrFH3/8QWRkJEeOHMHPz++5o5RFixZFo9GwdetWbty4QVJS0nP7J4QQ7wsJFoV4xRo1akRCQgIXLlxgxowZ/Pbbb4wZM+Ztd+uNefDggbr5xMKFC99qX27fvs0nn3zC7t27+fXXXzl//jyrVq3i/PnzVK5cmQsXLqhlY2JisLe3p1q1atjZ2aGnl7cl3QsWLODLL79k3759XLly5XU9ihDvlev3c/6CTMfAhEeXznB97Vj+/b0nv0yfyPTp02ncuDHdu3fH2dmZSpUqUaBAAUJCQvj000/56quv6NevHx4eHhw8eJBRo0blqQ/r1q2jcuXK+Pr64urqyrBhw547+rdgwQLu3LlDhQoV6NixI/3796dgwYLPvKdQoUKMGzeOr7/+GltbW/r165en/gkhxHtBeYclJiYqgJKYmPi2uyJEnvj7+ystWrTQOteqVSvF09NTURRFuXnzpvLFF18oDg4OirGxseLm5qasWLFCq3ytWrWUL7/8Uhk6dKiSL18+xdbWVhkzZoxWmejoaKVGjRqKoaGh4uLiouzcuVMBlA0bNqhlTp06pdSpU0cxMjJSrK2tle7duyv379/P1teJEycqBQsWVCwtLZVx48YpaWlpypAhQ5R8+fIphQoVUhYuXPhC72Dx4sXKJ598oty9e1cxMTFR4uPjta4/evRIGTZsmFK4cGHFwMBAKVGihDJ//nwlNjZWAbQ+/v7+iqIoyp9//qm4ubmpz1KvXj0lKSnpuX3p1auXYmpqqiQkJGidf/DggVKoUCGlUaNG6rt4st2iRYvm6Vnv37+vmJmZKWfPnlXatWunTJw4Uev63r17FUAJDAxUPDw8FCMjI6VOnTrKtWvXlL/++kspU6aMYm5urvj6+irJycnqfdu3b1e8vb0VS0tLxdraWmnatKly/vx59fqYMWOyvStAWbRokfqOv/zyS6VAgQKKoaGh4u3trRw9ejRbv3bv3q1UrFhRMTY2Vry8vJSzZ8/m6bmFeJ6D528qRYdvfe7n4Pmbb7urQoj3iMQGb56MLArxGp05c4aDBw+qa1oePXpExYoV2bZtG2fOnKFHjx507NiRo0ePat23ZMkSTE1NOXLkCFOmTGH8+PHs2rULgMzMTFq1aoWBgQFHjhzh119/Zfjw4Vr3Jycn4+PjQ758+QgNDeXPP/9k9+7d2b7x/vvvv7ly5Qr79u3jxx9/ZMyYMTRr1ox8+fJx5MgRevXqRc+ePbl8+XKen3nBggV06NABS0tLGjduzOLFi7Wud+rUiZUrV/LTTz8RGRnJb7/9hpmZGY6Ojqxbtw6AqKgoEhISmDVrlpqXrWvXrkRGRhIUFESrVq1QlBxXQ6kyMzNZtWoVfn5+2bbiNzY2pk+fPuzYsYPbt28za9Ysxo8fT+HChUlISCA0NDRPz7pmzRrKlCmDs7MzHTp0YOHChTn2a+zYscyZM4eDBw9y6dIl2rZty8yZM1mxYgXbtm1j586dzJ49Wy2fnJzMoEGDOHbsGHv27EFHR4fPPvuMzMxMAIYMGUJCQoL6mTZtGiYmJlSqVAmAYcOGsW7dOpYsWcKJEycoWbIkPj4+3L59W6tfI0aMYPr06Rw7dgw9PT26du2ap+cW4nmqFLPG3tKI3JLwaAB7y8epeoQQQrzD3na0+izy7YF416VnZCoHz99UNoZdVg6ev6l06uSv6OrqKqampoqhoaECKDo6OsratWtzraNp06bK4MGD1eNatWop1atX1ypTuXJlZfjw4YqiKMqOHTsUPT095d9//1Wvb9++XWtk8ffff1fy5cunNfq2bds2RUdHR7l69aqiKI9H04oWLapkZGSoZZydnZUaNWr87/nS0xVTU1Nl5cqVeXof0dHRir6+vnLjxg1FURRlw4YNSrFixZTMzExFURQlKipKAZRdu3bleH/WiNedO3fUc8ePH1cAJS4uLk99yHL16lUFUGbMmJHj9fXr1yuAcuTIEUVRFGXGjBl5HlHMUq1aNWXmzJmKoihKWlqakj9/fmXv3r3q9SdH8LJMmjRJAZSYmBj1XM+ePRUfH59c27lx44YCKKdPn8527dChQ4qRkZGyevVqRVEUJSkpSdHX11eWL1+ulklNTVUcHByUKVOm5Nqvbdu2KYDy8OHDF3oHQuRm++kritPwrYrTU6OJWee2n77ytrsohHjPSGzw5snIohAvKfBMAtV/+BvfeYcZsCoc33mH+etMAuUqVyM8PJwjR47g7+9Ply5daN26NfB4t7wJEybg7u6OtbU1ZmZm7NixI9tmK0/n/rK3t+f69esAREZG4ujoiIODg3rdy8tLq3xkZCTly5fX2k7e29ubzMxMoqKi1HNly5ZFR+d/vwZsbW3VBNkAurq62NjYqG0/z8KFC/Hx8SF//vwANGnShMTERP7++2/gce4zXV1datWqlaf6AMqXL0+9evVwd3enTZs2zJs3jzt37uT5fuU5I5AvKyoqiqNHj+Lr6wuAnp4e7dq1Y8GCBdnKPvnnaWtri4mJiVaCcFtbW613fO7cOXx9fSlevDgWFhY4OTkBZPt7Eh8fT8uWLRkyZAht27YFHq+9TEtLw9vbWy2nr69PlSpViIyMzLVf9vb2AHn+sxbieRq52TO3QwXsLI20zttZGjG3QwUaudm/pZ4JIYTIq7zt4CCE0JKVP+zpMORhagZn76dx/pEpjcqXZOHChZQvX54FCxYQEBDA1KlTmTVrFjNnzsTd3R1TU1MGDhyYLYeXvr6+1rFGo1GnIL5KObXzsm1nZGSwZMkSrl69qrU5TEZGBgsXLqRevXp5yn32NF1dXXbt2sXBgwfV6ZojRozgyJEjFCtWLNf7ChQogJWVVbYAKUtkZCQajYaSJUu+cJ/g8XTb9PR0raBdURQMDQ2ZM2cOlpaW6vkn32le3nHz5s0pWrQo8+bNw8HBgczMTNzc3LT+niQnJ/Ppp5/i5eXF+PHjX+oZnu4X8Fr+nomPVyM3exq42nE09jbX7z+ioPnjqae6OrlNUBVCCPEukZFFIV7QM/OH/b9xWyLIyFTQ0dHh22+/ZeTIkTx8+JCQkBBatGhBhw4dKF++PMWLF3/hPGAuLi5cunSJhIQE9dzhw4ezlTl58iTJycnquZCQEHR0dHB2dn6h9vLqr7/+4v79+4SFhREeHq5+Vq5cyfr167l79y7u7u5kZmYSHBycYx255SvTaDR4e3szbtw4wsLC1Lxsz6Kjo0Pbtm1ZsWIFV69e1br28OFDfvnlF3x8fLC2fvE1U+np6SxdupTp06drPevJkydxcHBg5cqVL1xnllu3bhEVFcXIkSOpV68eLi4u2UZSFUWhQ4cOZGZm8scff2glFi9RogQGBgaEhISo59LS0ggNDcXV1fWl+yXEy9LV0eBVwoYWHoXwKmEjgaIQQrxHJFgU4gXllj/sSQmJjzga+3gzkTZt2qCrq8vPP/9MqVKl1FGyyMhIevbsybVr116o/fr161O6dGn8/f05efIk+/fvZ8SIEVpl/Pz8MDIywt/fnzNnzrB3716+/PJLOnbsiK2t7Ys9cB4tWLCApk2bUr58edzc3NRP27ZtsbKyYvny5Tg5OeHv70/Xrl3ZuHEjsbGxBAUFsWbNGiDnfGVHjhzh+++/59ixY8THx7N+/Xo1L9vzfP/999jZ2dGgQQO2b9/OpUuX2LdvHz4+PqSlpfHzzz+/1LNu3bqVO3fuEBAQoPWsbm5utG7dOsepqHmVL18+bGxs+P333zl//jx///03gwYN0iozduxYdu/ezW+//UZSUhJXr17l6tWrPHz4EFNTU3r37s3QoUMJDAwkIiKC7t278+DBAwICAl66X0IIIYT4+EiwKMQLyi1/WG7l9PT06NevH1OmTGHw4MFUqFABHx8fateujZ2dHS1btnyh9nV0dNiwYQMPHz6kSpUqdOvWjYkTJ2qVMTExUXf6rFy5Mp9//jn16tVjzpw5L9RWXl27do1t27apazOf7u9nn32mBlBz587l888/p0+fPpQpU4bu3burI6A55SuzsLBg3759NGnShNKlSzNy5Eg1L9vz2NjYcPjwYerUqUPPnj0pUaIEbdu2pUSJEoSGhmqtG3wRCxYsoH79+lpTTbO0bt2aY8eOcerUqZeqW0dHh1WrVnH8+HHc3Nz46quvmDp1qlaZ4OBgkpKSqFatGvb29upn9erVAEyePJnWrVvTsWNHKlSowPnz59mxYwf58uV7qT4JIYQQ4uOkUV7X7g+vwL1797C0tCQxMRELC4u33R0hADgUcwvfeYefW25l90/wKmHzBnokhBBCCPHhk9jgzZORRSFekOQPE0IIIYQQHwMJFoV4Qbo6GsY0f7xRyNMBY9bxmOauH9wmDt9//z1mZmY5fvIyJfR1yK0/ZmZm7N+//6XrjY+Pf2bdT6ewEEIIIYT4EMk0VCFeUuCZBMZtidDa7Mbe0ogxzV0/yPxht2/f5vbt2zleMzY2plChQm+4R3D+/PlcrxUqVOilUnXA491O4+Licr3u5OSklR5ECCGEEK+fxAZvngSLQvwHGZmK5A8TQgghhHgDJDZ48+SrcSH+g6z8YUIIIYQQQnxoZM2iEEIIIYQQQohsJFgUQgghhBBCCJGNBItCCCGEEEIIIbKRYFEIIYQQQgghRDYSLAohhBBCCCGEyEaCRSGEEEIIIYQQ2UiwKIQQQgghhBAiGwkWhRBCCCGEEEJkI8GiEEIIIYQQQohsJFgUQgghhBBCCJGNBItCCCGEEEIIIbKRYFEIIYQQQgghRDYSLAohhBBCCCGEyEaCRSGEEEIIIYQQ2UiwKIQQQgghhBAiGwkWhRBCCCGEEEJkI8GiEEIIIYQQQohsJFgUQgghhBBCCJGNBItCCCGEEEIIIbKRYFEIIYQQQgghRDYSLAohhBBCCCGEyEaCRSGEEEIIIYQQ2UiwKIQQQgghhBAiGwkWhRBCCCGEEEJkI8GiEEIIIYQQQohsJFgUQgghhBBCCJGNBItCCCGEEEIIIbKRYFEIIYQQQgghRDYSLAohhBBCCCGEyEaCRSGEEEIIIYQQ2UiwKIQQQgghhBAiGwkWhRBCCCGEEEJkI8GiEEIIIYQQQohsJFgUQgghhBBCCJGNBItCCCGEEEIIIbKRYFEIIYQQQgghRDYSLAohhBBCCCGEyEaCRSGEEEIIIYQQ2UiwKIQQQgghXoutW7diZWVFRkYGAOHh4Wg0Gr7++mu1TLdu3ejQoQMA69ato2zZshgaGuLk5MT06dPVcnPmzMHNzU093rhxIxqNhl9//VU9V79+fUaOHPm6H0uIj4YEi0IIIYQQ4rWoUaMG9+/fJywsDIDg4GDy589PUFCQWiY4OJjatWtz/Phx2rZtyxdffMHp06cZO3Yso0aNYvHixQDUqlWLiIgIbty4kWNdaWlpHDp0iNq1a7/BJxTiwybBohBCCCGEeKUyMhUOxdwiKDaJUi5u/L13LwBBQUF89dVXhIWFkZSUxL///sv58+epVasWP/74I/Xq1WPUqFGULl2azp07069fP6ZOnQqAm5sb1tbWBAcHq3UNHjxYPT569ChpaWlUq1bt7Ty0EB8gCRaFEEIIIcQrE3gmgeo//I3vvMMMWBXOFeNiTFqwju2nr7B//35atWqFi4sLBw4cIDg4GAcHB0qVKkVkZCTe3t5adXl7e3Pu3DkyMjLQaDTUrFmToKAg7t69S0REBH369CElJYWzZ88SHBxM5cqVMTExeUtPLsSHR4JFIYQQQgjxSgSeSaD3shMkJD5SzxkVKUdi3BkCflxHpkaXMmXKULt2bYKCgggODqZWrVp5rj/rvv379+Pp6YmFhYUaQL5oXUKI55NgUQghhBBC5Ent2rUZOHBgjtcyMhXGbYlAeeq8oWNZlNSH3Du2ER17VzIyFTXoCwoKUtcYuri4MHbsWDZu3KjeGxISQunSpdHV1QX+t27xzz//VO+rXbs2u3fvJiQkRNYrCvGKSbAohBBCCCH+s6Oxt7VGFLPoGpmhX8CJ5H+CUOxdORp7m5o1a3LixAmio6PV0cDBgwejo6NDWFgY0dHRLFmyhDlz5jBkyBC1rnLlypEvXz5WrFihFSxu3LiRlJSUbNNYhRD/jQSLQgghhBBv0dixY/Hw8PjP9XTu3JmWLVs+s8yzRgb/q+v3sweKWYwc3UDJxKiIO9fvP8La2hpXV1fs7OxwdnYGoEKFCqxZs4a1a9fi5ubG6NGjGT9+PJ07d1br0Wg01KhRA41GQ/Xq1YHHAaSFhQWVKlXC1NT0tTybEB8rCRaFEEIIIV7AoUOH0NXVpWnTpm+7K1pmzZqlppl4ndLT0+nXrx+Wlpbkz5+fUaNGoSgKBc2NuPhDMx5EH9IqHz+zHQa2xSk6fCt6Vnb8MX0M9vb2nD17FgMDAyZNmqSW/fzzz5k4cSKpqakEBwczdOhQ1q9fT506dTAxMaF8+fIMHz6ctLQ0zMzMADh48CBly5YlPDwcR0dH+vfvT3JyslrnL7/8QqlSpTAyMsLW1pbPP/9cvbZ27Vrc3d0xNjbGxsaG+vXra90rxMdOgkUhhBBCiBewYMECvvzyS/bt28eVK1dee3upqal5KmdpaYmVldXr7QywZMkS9PT0OHr0KLNmzeLHH39k/vz5VClm/cz7NAD/bOdo8E7WrFlDVFQUy5cvx8nJ6Zn3jRgxgiFDhhAeHk7p0qXx9fUlPT0dgJiYGBo1akTr1q05deoUq1ev5sCBA/Tr1w+AY8eO0b9/f8aPH09UVBSBgYHUrFkTgISEBHx9fenatSuRkZEEBQXRqlUrFOXpVZdCfLwkWBRCCCGEyKOkpCRWr15N7969adq0qdZIXlBQEBqNhj179lCpUiVMTEyoVq0aUVFRWnVMnjwZW1tbzM3NCQgI4NEj7embWdNJJ06ciIODgzpN8/Tp09StW1cdBevRowdJSUnZ7suSnJxMp06dMDMzw97enunTp7/UM2flTNwU/i/3Hqbh6OjIjBkzcHZ2xs/Pjy+//JIZM2agq6NR79HkUpdHvnRKlSpF9erVKVq0KNWrV8fX1/eZ7Q8ZMoSmTZtSunRpxo0bx8WLFzl//jwAkyZNws/Pj4EDB1KqVCmqVavGTz/9xNKlS3n06BHx8fGYmprSrFkzihYtiqenJ/379wceB4vp6em0atUKJycn3N3d6dOnjzpiKYSQYFEIIYQQIs/WrFlDmTJlcHZ2pkOHDixcuDDbSNSIESOYPn06x44dQ09Pj65du2rdP3bsWL7//nuOHTuGvb09v/zyS7Z29uzZQ1RUFLt27WLr1q0kJyfj4+NDvnz5CA0N5c8//2T37t3qCFpOhg4dSnBwMJs2bWLnzp0EBQVx4sSJF3rep3MmRiTc46ZJEXb8c1Ut4+XlpeZCBOhVqzh2lkZa9Vga6zO3QwXGDO5LeHg4zs7O9O/fn507dz63D+XKlVN/tre3B+D69esAnDx5ksWLF2NmZqZ+fHx8yMzMJDY2lgYNGlC0aFGKFy9Ox44dWb58OQ8ePACgfPny1KtXD3d3d9q0acO8efO4c+fOC70fIT50EiwKIYQQQuTiyVG1QzG3mL9gAR06dACgUaNGJCYmEhwcrHXPxIkTqVWrFq6urnz99dccPHhQHT2cOXMmAQEBBAQE4OzszHfffYerq2u2dk1NTZk/fz5ly5albNmyrFixgkePHrF06VLc3NyoW7cuc+bM4Y8//uDatWvZ7k9KSmLBggVMmzZNDYiWLFmiTt/Mi5xyJgI8TM2g97ITBJ5JyHaPRqPBs0g+Dgyvy8runzDrCw8MdRRGNHGhkZs9FSpUIDY2lgkTJvDw4UPatm2rtYYwJ/r6+lr1A2RmZqrP2bNnT8LDw9XPyZMnOXfuHCVKlMDc3JwTJ06wcuVK7O3tGT16NOXLl+fu3bvo6uqya9cutm/fjqurK7Nnz8bZ2ZnY2Ng8vyMhPnQSLAohhBBC5ODpUbXPJ6/l8OEj2HrUBUBPT4927dqxYMECrfueNRIWGRlJ1apVtcp7eXlla9vd3R0DAwP1ODIykvLly2vt9unt7U1mZma2aa7weC1famqqVlvW1tbqlNbnyS1nIkDKlWgAxm2JICNT4fDhw5QqVQpdXV0KFChAQkICujoavErY4Gr6gEcPH6DzxBRVCwsL2rVrx7x581i9ejXr1q3j9u3beerX0ypUqEBERAQlS5bM9sl6f3p6etSvX58pU6Zw6tQp4uLi+Pvvv4HHwae3tzfjxo0jLCwMAwMDNmzY8FJ9EeJDpPe2OyCEEEII8a7JGlV7MlhKOrUTJTMD3zoe+Olo0ACKomBoaMicOXPUcs8aCcurt50CIreciQDp929wa888Uj0a891PMcyePVtdD5k14unl5UVGRgbDhw/Xeh8//vgj9vb2eHp6oqOjw59//omdnd1Lb8wzfPhwPvnkE/r160e3bt0wNTUlIiKCXbt2MWfOHLZu3cqFCxeoWbMm+fLl46+//iIzMxNnZ2eOHDnCnj17aNiwIQULFuTIkSPcuHEDFxeXl+qLEB8iGVkUQgghhHhCTqNqSmYGSf/8Tb46ATh0+Qm3vr9y/EQYJ0+exMHBgZUrV+apbhcXF44cOaJ17vDhw3m67+TJk1ppHUJCQtDR0clxtLBEiRLo6+trtXXnzh2io6Pz1M9n5Uw0LVsXJT2VhKWDmDZmGAMGDKBHjx4ATJ8+HUdHR2rUqEH79u0ZMmQIJiYm6r3m5uZMmTKFSpUqUblyZeLi4vjrr7/Q0Xm5f5KWK1eO4OBgoqOjqVGjBp6enowePRoHBwcArKysWL9+PXXr1sXFxYVff/2VlStXUrZsWSwsLNi3bx9NmjShdOnSjBw5kunTp9O4ceOX6osQHyIZWRRCCCGEeEJOo2oPzx8l81ESZuUbomNoSiLwwNQBrxI2tG7dmgULFjB16tTn1j1gwAA6d+5MpUqV8Pb2Zvny5fzzzz8UL178mff5+fkxZswY/P39GTt2LDdu3ODLL7+kY8eO2NraZitvZmZGQEAAQ4cOxcbGhoIFCzJixIg8B2UFzY1yPG/XfrL6s41PX1Z2/wSvEjbqOQcHB3bs2KF1z927d9Wfu3fvTvfu3XNt98nNgpycnLJtHmRlZZXtXOXKlXPdKKd69eoEBQXleM3FxYXAwMBc+yKEkJFFIYQQQggtOY2qJZ3aiXFRD3QMTbOVa926NceOHePUqVPPrbtdu3aMGjWKYcOGUbFiRS5evEjv3r2fe5+JiQk7duzg9u3bVK5cmc8//5x69eppTX992tSpU6lRowbNmzenfv36VK9enYoVKz63LYAqxayxtzTKNQWGBrC3NHpubkUhxPtNo7zDmUfv3buHpaUliYmJWFhYvO3uCCGEEOIjcCjmFr7znj819OlRtQ9N1rpNQGtKblYAObdDBRq52b/xfomPl8QGb56MLAohPkhPJ6fOi5CQENzd3dHX13/he3MyduxYPDw8nlmmdu3aDBw48D+39SpoNBo2btyY5/JOTk7MnDnztfVHiLdFRtUea+Rmz9wOFbLlTLSzNJJAUYiPhKxZFEKI/zdo0CA8PDzYvn07ZmZmb6TN9evXa+0U+D4JDQ196zs2ig9f7dq18fDweKNfTOjqaBjT3JXey05wc9sMMlOSKdhqJPC/UbUxzV3R1cktnNQWFxdHsWLFCAsLw8PDg6CgIOrUqcOdO3deehfQN6WRmz0NXO04Gnub6/cfUdD8cZCc12cXQrzfJFgUQoj/FxMTQ69evShcuPAba9Pa+v0dmShQoMDb7oIQr03WqFrHXbokp/zvvJ2lEWOau35Uo2pZOROFEB8fmYYqhHjrateuzZdffsnAgQPJly8ftra2zJs3j+TkZLp06YK5uTklS5Zk+/btAGRkZBAQEECxYsUwNjbG2dmZWbNmPbONzMxMJk2apN5Tvnx51q5dCzz+1l+j0XDr1i26du2KRqNh8eLFeWonKCiIKlWqYGpqipWVFd7e3ly8eFGrzB9//IGTkxOWlpZ88cUX3L9/X+vZn5yGeufOHTp16kS+fPkwMTGhcePGnDt3Tr2+ePFirKys2LFjBy4uLpiZmdGoUSMSEhLUMqGhoTRo0ID8+fNjaWlJrVq1OHHihFafzp07R82aNTEyMsLV1ZVdu3ZpXa9WrRrDhw/XOnfjxg309fXZt28fkH0a6t27d+nZsye2trYYGRnh5ubG1q1b1esHDhygRo0aGBsb4+joSP/+/bXSAAjxrmnkZk8TN3sqFc3HrC88WNn9Ew4Mr/tRBYpCiI+bBItCiHfCkiVLyJ8/P0ePHuXLL7+kd+/etGnThmrVqnHixAkaNmxIx44defDgAZmZmRQuXJg///yTiIgIRo8ezbfffsuaNWtyrX/SpEksXbqUX3/9lX/++YevvvqKDh06EBwcjKOjIwkJCVhYWDBz5kwSEhJo167dc9tJT0+nZcuW1KpVi1OnTnHo0CF69OihJuGGx6OVGzduZOvWrWzdupXg4GAmT56cWzfp3Lkzx44dY/PmzRw6dAhFUWjSpAlpaWlqmQcPHjBt2jT++OMP9u3bR3x8PEOGDFGv379/H39/fw4cOMDhw4cpVaoUTZo0UYPUzMxMWrVqhYGBAUeOHOHXX3/NFhj6+fmxatUqrS3qV69ejYODAzVq1MjW78zMTBo3bkxISAjLli0jIiKCyZMno6urq76HRo0a0bp1a06dOsXq1as5cOAA/fr1y/VdCJElPT2dfv36YWlpSf78+Rk1apT6d/OPP/6gUqVKmJubY2dnR/v27bl+/bp67507d/Dz86NAgQIYGxtTqlQpFi1apF6/dOkSbdu2xcrKCmtra1q0aEFcXJx6XaMBa1MDwjfN59NPypDPypJevXqRmpqqlgkMDKR69epYWVlhY2NDs2bNiImJef0vRgghXjflHZaYmKgASmJi4tvuihDiFUvPyFQOnr+pbAy7rHhWqaZ4V6/+v2vp6YqpqanSsWNH9VxCQoICKIcOHcqxvr59+yqtW7dWj/39/ZUWLVooiqIojx49UkxMTJSDBw9q3RMQEKD4+vqqx5aWlsqiRYue2e8n27l165YCKEFBQTmWHTNmjGJiYqLcu3dPPTd06FClatWq6nGtWrWUAQMGKIqiKNHR0QqghISEqNdv3rypGBsbK2vWrFEURVEWLVqkAMr58+fVMj///LNia2uba58zMjIUc3NzZcuWLYqiKMqOHTsUPT095d9//1XLbN++XQGUDRs2KIqiKNevX1f09PSUffv2qWW8vLyU4cOHq8dFixZVZsyYodapo6OjREVF5diHgIAApUePHlrn9u/fr+jo6CgPHz7Mte9C1KpVSzEzM1MGDBignD17Vlm2bJliYmKi/P7774qiKMqCBQuUv/76S4mJiVEOHTqkeHl5KY0bN1bv79u3r+Lh4aGEhoYqsbGxyq5du5TNmzcriqIoqampiouLi9K1a1fl1KlTSkREhNK+fXvF2dlZSUlJURTl8e8SMzMzpV27dsqZM2eUrVu3KgUKFFC+/fZbtY21a9cq69atU86dO6eEhYUpzZs3V9zd3ZWMjAxFURQlNjZWAZSwsDBFURRl7969CqDcuXPnDbxBIT4cEhu8ebJmUQjxxgWeSWDclgg16fXVhHtYORQn8EwCjdzs0dXVxcbGBnd3d/WerKTTWSMGP//8MwsXLiQ+Pp6HDx+Smpqa686j58+f58GDBzRo0EDrfGpqKp6ens/s67Pasba2pnPnzvj4+NCgQQPq169P27Ztsbf/3xQ1JycnzM3N1WN7e3utUY8nRUZGoqenR9WqVdVzNjY2ODs7ExkZqZ4zMTGhRIkSudZ57do1Ro4cSVBQENevXycjI4MHDx4QHx+vtuPo6IiDg4N6j5eXl1ZfChQoQMOGDVm+fDk1atQgNjaWQ4cO8dtvv+XY9/DwcAoXLkzp0qVzvH7y5ElOnTrF8uXL1XOKopCZmUlsbCwuLi453icEgKOjIzNmzECj0eDs7Mzp06eZMWMG3bt3p2vXrmq54sWL89NPP1G5cmWSkpIwMzMjPj4eT09PKlWqBDz+bzLL6tWryczMZP78+eqMgEWLFmFlZUVQUBANGzYEwMDAgIULF2JiYkLZsmUZP348Q4cOZcKECejo6NC6dWut/i5cuJACBQoQERGBm5vba347Qgjx+sg0VCHEG5WVtysrUMzyIB16LztB4JnHa+80Go3WLqFZ/5DLzMxk1apVDBkyhICAAHbu3El4eDhdunTRmhb2pKSkJAC2bdtGeHi4+omIiFDXLeYkL+0sWrSIQ4cOUa1aNVavXk3p0qU5fPh/+dme3ulUo9GQmZmZl1eVq5zqVJ6YLurv7094eDizZs3i4MGDhIeHY2Njk+v7yY2fnx9r164lLS2NFStW4O7urhXAP8nY2PiZdSUlJdGzZ0+t93/y5EnOnTunFfgKAZCRqVChqjfN/bpx72EaVatW1Zre7eXlxblz58jIyOD48eM0b96cIkWKYG5uTq1atQDUL0d69+7NqlWr8PDwYNiwYRw8eFCt5+TJk5w/fx5zc3PMzMwwMzPD2tqaR48eaU0jLV++PCYmJlrtJyUlcenSJeDxGmBfX1+KFy+OhYWFGpBm9eFpWeft7Ozw8PBQ102Hh4f/95f3BrxMaiIhxPtJRhaFEG9MRqbCuC0RWsmdnzZuSwQNXO2eWU9ISAjVqlWjT58+6rlnrQ9ydXXF0NCQ+Ph49R+SeZHXdjw9PfH09OSbb77By8uLFStW8Mknn+S5nSwuLi6kp6dz5MgRqlWrBsCtW7eIiorC1dX1hfr9yy+/0KRJE+DxmqybN29qtXPp0iUSEhLUUdAnA9wsLVq0oEePHgQGBrJixQo6deqUa5vlypXj8uXLREdH5zi6WKFCBSIiIihZsmSen0N8nLJmHkQk3ON8xg1Sr9/jckaCOvPgSY8ePcLHxwcfHx+WL19OgQIFiI+Px8fHR/1ypHHjxly8eJG//vqLXbt2Ua9ePfr27cu0adNISkqiYsWKWiPeWV5kt9/mzZtTtGhR5s2bh4ODA5mZmbi5ueX6BU3WmsmjR49SqFAhrU2vhBDiXSIji0KIN+Zo7O1sI4pPUoCExEccjb39zHpKlSrFsWPH2LFjB9HR0YwaNYrQ0NBcy5ubmzNkyBC++uorlixZQkxMDCdOnGD27NksWbLkpduJjY3lm2++4dChQ1y8eJGdO3dy7ty5l55SWapUKVq0aEH37t05cOAAJ0+epEOHDhQqVIgWLVq8UD1//PEHkZGRHDlyBD8/P62Rv/r161O6dGn8/f05efIk+/fvZ8SIEdnqMTU1pWXLlowaNYrIyEh8fX1zbbNWrVrUrFmT1q1bs2vXLmJjY9m+fTuBgYEADB8+nIMHD9KvXz/Cw8M5d+4cmzZtkg1uhJbcZh7cjYvUmnmQtXHT2bNnuXXrFpMnT6ZGjRqUKVMmx2neBQoUwN/fn2XLljFz5kx+//134PGXGOfOnaNgwYKULFlS62Npaanef/LkSR4+fKgeHz58GDMzMxwdHdUvdEaOHEm9evVwcXHhzp07z3zOK1euAFCkSBFsbCQlhRDi3SXBohDijbl+P/dA8UXK9ezZk1atWtGuXTuqVq3KrVu3tEb/cjJhwgRGjRrFpEmTcHFxoVGjRmzbto1ixYq9dDsmJiacPXuW1q1bU7p0aXr06EHfvn3p2bNnnp4zJ4sWLaJixYo0a9YMLy8vFEXhr7/+yjb19FkWLFjAnTt3qFChAh07dqR///4ULFhQva6jo8OGDRt4+PAhVapUoVu3bkycODHHuvz8/Dh58iQ1atSgSJEiz2x33bp1VK5cGV9fX1xdXRk2bBgZGRnA45HH4OBgoqOjqVGjBp6enowePVpr3aT4uD1r5kHavetcWTyA5p+4YmRkxJQpU2jUqBFFihTBwMCAKVOm0Lx5c2xsbNQR8KxUO6NHj2bTpk2Eh4fTrFkz+vbty6NHj5gxYwZ//PEHiqLQokUL9u/fj0aj4bvvvqN///5cvnwZgBUrVpCcnExAQAAREREsWbKEAQMGkJaWRv78+enSpQtWVlb8/vvvnD9/nt27d9OuXTsA2rRpg4eHB8HBweqzaDQaoqOjAciXLx9jx47N8X2cOXOGxo0bY2Zmhq2tLR07dtSaIXD//n38/PwwNTXF3t6eGTNmZEvFk5KSwpAhQyhUqBCmpqZUrVqVoKAg9XpeUvFkZGQwaNAgdafXYcOGaU17B1i7di3u7u4YGxtjY2ND/fr1JS2OEB+Kt7q9znPIjkdCfFgOnr+pFB2+9bmfg+dvvu2uCiHesKd/Pxg6uinmFT9VDB3dFD0bR0XH1EpBz1AxNTNX6tatqxgaGirR0dHKihUrlMKFCyu6urpKuXLllN9//10BFF1dXeXIkSPKhAkTFBcXF0VXV1fR0dFRqlWrpmzfvl357LPPFHNzc6Vbt25Kp06dlPz58yuAYmtrq3Tv3l39t4e+vr7i6empjB49WrG2tlY0Go1SunRp5dixY+ruqY6OjkqZMmUUQ0NDxd7eXjE1NVUAZc6cOcqwYcMUfX19dTfUhIQExcnJSQGUs2fPKvfv38+2W+qdO3eUAgUKKN98840SGRmpnDhxQmnQoIFSp04d9X1169ZNKVq0qLJ7927l9OnT6vNk7a6cVaZatWrKvn37lPPnzytTp05V35uiPN5dWV9fX6lfv74SGhqqHD9+XHFxcVHat2+v1vHDDz8o+fLlU9atW6dEREQoAQEBirm5ubrb9JUrVxQ9PT3lxx9/VGJjY5VTp04pP//8s3L//v3X+xdGfJQkNnjzJFgUQrwx6RmZyiff71accgkSnYZvVT75freSnpH5trsqhHjDNoZdzjFYLNR7oYJGRynUZ4lSdPhWZWPYZUVRFKVevXrKN998k2t9TZs2VQYPHqwoiqLcu3dP0dfXV/7880/1+t27dxUTExOt4Ion0sdkeTKlzh9//KE4OzsrmZn/+x2VkpKiGBsbKzt27FAURVEcHByUiRMnatVRuXJlpU+fPupx+fLllTFjxqjHTweLEyZMUBo2bKhVx6VLlxRAiYqKytPzXLx4UdHV1dVKkaMo2u8tL6l47O3tlSlTpqjHaWlpSuHChdVg8fjx4wqgxMXFKUK8bhIbvHmywY0Q4o3R1dEwprkrvZedQANa082y9jkc09wVXR1NDneLl7V48WIGDhzI3bt333ZXhMhV3M0HOZ5PvXERlEyuzHs8vdt3ti46msdTLLPW+2VkZPD999+zZs0a/v33X1JTU0lJSVF3ML1w4QJpaWlUqVJFrdfS0hJnZ+cX6uOTu6c+KWv31Hv37nHlyhW8vb21rnt7e3Py5MkXamfv3r2YmZlluxYTE8PDhw+f+zynT58mIyMj24ZTT743eHYqnsTERBISErTS+ejp6VGpUiV1Kmr58uWpV68e7u7u+Pj40LBhQz7//HPy5cuX5+cVQry7JFgUQrxRjdzsmduhglaeRQA7SyPGNHfNttuh+O/atWun7owqxLso8EwCM3dH53hNSX0IGh0c/GdSwNKYVT281C+UsoKpqVOnMmvWLGbOnIm7uzumpqYMHDjwhdPFPJ2GBiAtLU39+VXtnvo8SUlJNG/enB9++CHbNXt7e86fP5+nOnR1dTl+/Di6urpa154MQp+Xiud5dHV12bVrFwcPHmTnzp3Mnj2bESNGcOTIkWeuCRdCvB8kWBRCvHGN3Oxp4GrH0djbXL//iILmRlQpZi0jiq+JsbHxc/MgCvG2PC+ljoFtCVAySX9wl0kDPsW5dPYvlEJCQmjRogUdOnQAHudjjY6OVlPOFC9eHH19fUJDQ9WNmhITE4mOjqZmzZpqPQUKFNDa3OXcuXM8ePC/Ec8KFSqwevVqChYsiIWFRY79dXBwICQkRCtNT0hIiNYo4PNUqFCBdevW4eTkhJ5e9n+q5eV5PD09ycjI4Pr169SoUSPPbT/J0tISe3t7jhw5otabnp7O8ePHqVChglpOo9Hg7e2Nt7c3o0ePpmjRomzYsIFBgwa9VLtCiHeH7IYqhHgrdHU0eJWwoYVHIbxK2Eig+AyBgYFUr15d3Y2wWbNmar7HrGTe69evp06dOpiYmFC+fHkOHTqk3p+142GWsWPH4uHhwcKFCylSpAhmZmb06dOHjIwMpkyZgp2dHQULFsy2Q+rdu3fp1q0bBQoUwMLCgrp1677Q1DohcvK8lDr61oUwda1N2t+zeRB9iNjYWI4ePcqkSZPYtm0b8DhdTNboVmRkJD179uTatWtqHebm5vj7+zN06FD27t3LP//8Q0BAADo6Omg0//vdU7duXebMmUNYWBjHjh2jV69eWiNvfn5+5M+fX909NTY2lqCgIK3dU4cOHcoPP/zA6tWriYqK4uuvvyY8PJwBAwbk+Z307duX27dv4+vrS2hoKDExMezYsYMuXbqQkZGRp+cpXbo0fn5+dOrUifXr1+f43vJiwIABTJ48mY0bN3L27Fn69OmjNaX9yJEjfP/99xw7doz4+HjWr1/PjRs3XjqFkBDi3SLBohBCvOOSk5MZNGgQx44dY8+ePejo6PDZZ5+RmZmplhkxYgRDhgwhPDyc0qVL4+vrS3p6eq51xsTEqHkQV65cyYIFC2jatCmXL18mODiYH374gZEjR3LkyBH1njZt2nD9+nW2b9+ujizUq1eP27efnRdTiGfJS0odmyYDafBpGwYPHoyzszMtW7bUGlUbOXIkFSpUwMfHh9q1a2NnZ0fLli216vjxxx/x8vKiWbNm1K9fH29vb1xcXDAyMlLLTJ8+HUdHR2rUqEH79u0ZMmSIuu4RHq/v27dvH0WKFKFVq1a4uLgQEBDAo0eP1JHG/v37M2jQIAYPHoy7uzuBgYFs3ryZUqVK5fmdZI1OZmRk0LBhQ9zd3Rk4cCBWVlbo6Ojk+XkWLVpEp06dcn1veTF48GA6duyIv78/Xl5emJub89lnn6nXLSws2LdvH02aNKF06dKMHDmS6dOn07hx4zy3IYR4d2mUF5mY/obdu3cPS0tLEhMTc53uIYQQH5qMTOWZU3Rv3rxJgQIFOH36NGZmZhQrVoz58+cTEBAAQEREBGXLliUyMpIyZcpk2+Bm7NixTJ06latXr6obdTRq1IioqChiYmLUf4yWKVOGzp078/XXX3PgwAGaNm3K9evXMTQ0VPtSsmRJhg0bRo8ePd7Q2xEfmkMxt/Cdd/i55VZ2/wSvEq8ugX1ycjKFChVi+vTp6n8777MP7XmEyInEBm+erFkUQoh3SOCZhGyb/1il3cQqcgPxZ09x8+ZNdUQxPj5eXZNVrlw5tby9/eM1XdevX6dMmTI5tuPk5KS1o6OtrS26urpqoJh1LmtXxJMnT5KUlKS1iyLAw4cP1SmxQryMKsWssbc04mrioxzXLWp4vAFWlWLW/6mdsLAwzp49S5UqVUhMTGT8+PEAtGjR4j/V+7Z8aM8jhHg3SbAohBDviMAzCfRediLbP5j/WTwSPYsCjB4xmZbe7mRmZuLm5qa10+OT66qy1iw9OU31aTntgJjTuaw6kpKSsLe3JygoKFtdT66HFOJFvcmUOtOmTSMqKgoDAwMqVqzI/v37yZ8//3+u92350J5HCPHukWBRCCHeAbntCJnx8B7pty+Tv1E/Nt+wYZhzGQ4dDHnj/atQoQJXr15FT08PJyenN96++LC9iZQ6np6eHD9+/D/X86740J5HCPFukmBRCCHeAbntCKljZIaOsQX3T+4g3syaX1bc44+fJr3x/tWvXx8vLy9atmzJlClTKF26NFeuXGHbtm189tlnVKpU6Y33SXxYJKWOEEK8eyRYFEKId0BuO0JqNDrk/3QYd3b/xpUFfflxb0kWz5tL7dq132j/NBoNf/31FyNGjKBLly7cuHEDOzs7atasia2t7Rvti/hwZaXUEUII8W6Q3VCFEOId8LZ2hBRCCCHeFxIbvHmSZ1EIId4BWTtC5jbhTgPYv4IdIYUQQggh8kqCRSGEeAdk7QgJZAsYX/WOkEIIIYQQeSHBohBCvCOydoS0szTSOm9nacTcDhVeyY6QQgghhBB5JRvcCCHEO0R2hBRCCCHEu0KCRSGEeMfIjpBCCCGEeBfINFQhhBBCCCGEENlIsCiEEEIIIYQQIhsJFoUQQgghhBBCZCPBohBCCCGEEEKIbCRYFEIIIYQQQgiRjQSLQgghhBBCPEft2rUZOHDg2+6GEG+UpM4QQgghhBDiOdavX4++vv7b7oYQb5QEi0IIIYQQQjyHtbX12+6CEG+cTEMVQgghhBDvjfv37+Pn54epqSn29vbMmDFDa4roH3/8QaVKlTA3N8fOzo727dtz/fp19f6goCA0Gg07duzA09MTY2Nj6taty/Xr19m+fTsuLi5YWFjQvn17Hjx4oN739DRUJycnvv/+e7p27Yq5uTlFihTh999/1+rrwYMH8fDwwMjIiEqVKrFx40Y0Gg3h4eGv8xUJ8cpIsCiEEEIIId4bgwYNIiQkhM2bN7Nr1y7279/PiRMn1OtpaWlMmDCBkydPsnHjRuLi4ujcuXO2esaOHcucOXM4ePAgly5dom3btsycOZMVK1awbds2du7cyezZs5/Zl+nTp1OpUiXCwsLo06cPvXv3JioqCoB79+7RvHlz3N3dOXHiBBMmTGD48OGv9F0I8brJNFQhhBBCCPHOyshUOBp7m+v3H2GqSWPJkiWsWLGCevXqAbBo0SIcHBzU8l27dlV/Ll68OD/99BOVK1cmKSkJMzMz9dp3332Ht7c3AAEBAXzzzTfExMRQvHhxAD7//HP27t37zACvSZMm9OnTB4Dhw4czY8YM9u7di7OzMytWrECj0TBv3jyMjIxwdXXl33//pXv37q/u5QjxmkmwKIQQQggh3kmBZxIYtyWChMRHAKRev0BaWhoPLIuqZSwtLXF2dlaPjx8/ztixYzl58iR37twhMzMTgPj4eFxdXdVy5cqVU3+2tbXFxMREDRSzzh09evSZ/XuyDo1Gg52dnTrlNSoqinLlymFkZKSWqVKlygs9vxBvm0xDFUIIIYQQ75zAMwn0XnZCDRSfNGLDGQLPJGQ7n5ycjI+PDxYWFixfvpzQ0FA2bNgAQGpqqlbZJ3c21Wg02XY61Wg0aqCZm5e5R4j3iQSLQgghhBDihXXu3BmNRqMGWra2tjRo0ICFCxe+UMA0duxYPDw8tM5lZCqM2xKBAiQeWsPFKZ+SeGQdepZ2oKNHSsI5xm2JYO3addSpU4ewsDB++eUXPvnkE27dusXkyZOpUaMGZcqU0drc5k1ydnbm9OnTpKSkqOdCQ0PfSl+EeFkSLAohhBBCiJfSqFEjEhISiIuLY/v27dSpU4cBAwbQrFkz0tPTX7reo7G31RHFpNO7sKjaiqRTu9AxNMHMrS539i4k9tRR5i9bzc2bNzE1NcXPz09dxzhy5EguXLjA5s2bmTBhwit51hfVvn17MjMz6dGjB5GRkezYsYNp06YBj0cghXgfSLAohBBCCPEGPZ2C4VVzcnJi5syZr63+JxkaGmJnZ0ehQoWoUKEC3377LZs2bWL79u0sXrwYeLxWsEWLFpiZmWFhYUHbtm25du0aAIsXL2bcuHGcPHlSHaVcvHgx1+8/DhQfxZ9GSUvFqnoHlNQHPLocSb663TAoVIbr68ZxNCSYrl27UrZsWezs7Jg5cyaFChVi8+bNuLq6MnnyZDVAe9MsLCzYsmUL4eHheHh4MGLECEaPHg2gtY5RiHeZbHAjhBBCCCFembp161K+fHnWr19P165d1UAxODiY9PR0+vbtS7t27QgKCqJdu3acOXOGwMBAdu/eDTzesCb8yuP8hkmndmLqWguNrh6mLrVIOrWT/E0GUKD5UABWdv+EcnZGjBs3jh49epCZmYmOjg4TJkygX79+ap8URVF/rl27ttYxPJ5S+3R6jbFjxzJ27Fj1OCgoSOt6XFxctmd/On9itWrVOHnypHq8fPly9PX1KVKkyDPfoRDvCgkWhRBCCCHEK1WmTBlOnTrFnj17OH36NLGxsTg6OgKwdOlSypYtS2hoKJUrV8bMzAw9PT3s7OzU+6sUM6KAYQbxUQex6zgVANOydbi6YjiPyjUg494NCpd2R+9OHH6DH08zbdGiBdOmTSMpKYm2bdu++YfOwdKlSylevDiFChXi5MmTDB8+nLZt22JsbPy2uyZEnsg0VCGEEEKINywzM5Nhw4ZhbW2NnZ2d1gjWs6ZtZtmyZQuVK1fGyMiI/Pnz89lnn+Xa1vz587GysmLPnj0AnDlzhsaNG2NmZoatrS0dO3bk5s2bwOPgxsbGRmtTFoCWLVvSoUNHDsXcYlP4vxyKucVTg3NaFEVBo9EQGRmJo6OjGigCuLq6YmVlRWRkZK736+po8FLOomdlh2HBx+ksDGyLo2dRgEexJ7h3dD1nf+mFT8MGJCcns3//fnbu3Mm4ceNYs2YNBQsWzL1zb9DVq1fp0KEDLi4ufPXVV7Rp04bff//9bXdLiDyTYFEIIYQQ4hUJCQnB3d0dfX19WrZsmeO5u3fvMnv2bHR0dDhy5AhTpkxh/Pjx7Nq1i8zMTFq0aMHt27cJDg5m165dXLhwgXbt2qltbNu2jc8++4wmTZoQFhbGnj17cs3fN2XKFL7++mt27txJvXr1uHv3LnXr1sXT05Njx44RGBjItWvX1JG4Nm3akJGRwebNm9U6rl+/ztZt2zhh5IHvvMMMWBWO77zD/HUmgev3sqe1AIiMjKRYsWL/6V0eDlxL+q144qZ+ysUpjz9pNy+Rcfk0m3bv5+GDZG7fvs2uXbv4559/6NatG2vWrKF+/fr/qd1XadiwYcTFxfHo0SNiY2OZMWMGJiYmb7tbQuTZG5mG+vPPPzN16lSuXr1K+fLlmT17tiQlFUIIIcQHZ9CgQXh4eLB9+3bMzMxyPFe7dm0Avv76a6ysrChVqhRz5sxRR/6eN21z4sSJfPHFF4wbN05tt3z58tn64uvry6pVqzh48KD67645c+bg6enJ999/r5ZbuHAhjo6OREdHU7p0adq3b8+iRYto06YNACOn/oLGLD/JNs48uYfnw9QMwi8nE3gmgUZu9ur5v//+m9OnT/PVV19RuHBhLl26xKVLl9TniYiI4O7du7i6ugJgYGBARkaGVt9Pnz7NsWPHCAoKwtIqH6f/TeR2cgp6aQ/o59cCJ71E4HGbK1eupGvXrqxatYqmTZu+0J+XEOLZXnuwuHr1agYNGsSvv/5K1apVmTlzJj4+PkRFRb0zUwSEEEIIIV6FmJgYevXqReHChbXO9ejRk0spxly/mUxySvaUEvb29ly/fv250zYrV65MeHg43bt3f2Y/pk+fzt27dwFwcXFRz588eZK9e/eqgezTfS9dujTdu3encuXK/Pvvv9jZO7B82VJM3erlnO4hPY0RKw7g2tuLmzeuExgYyKRJk2jWrBmdOnVCR0cHd3d3/Pz8mDlzJunp6fTp04datWpRqVIl4PHurbGxsYSHh1O4cGHMzc1ZsGABVapUoWbNmgCUL/e/JpfMrMyCBQuYOnUqK1aswN/fn1mzZlG1alWuXr0KgLGxMZaWls98R0KI53vt01B//PFHunfvTpcuXXB1deXXX3/FxMSEhQsXvu6mhRBCCCFeqZSUFPr370/BggUxMjKievXqhIaGEhcXh0aj4datW3Tt2lVNAZF1rlu3AKqVzE/AiGlcvJUMwE8rtlCuXDmMjIzYv38/t27d0mrrwIED1KhRA2NjYxITE1m2bBnJycnq5ih//PEHlSpVwtzcHDs7O9q3b68moPf09CQpKQmAfPnyodFo6Ny5M0lJSTRv3pzw8HCtz7lz59TAzNPTk/Lly7N06VKWbvmbB9cuYuae89TOh7HHOTGpLcWLF6NRo0bs3buXn376iU2bNqGrq4tGo2HTpk3ky5ePmjVrUr9+fYoXL87q1avVOlq3bk2jRo2oU6cOBQoUYMmSJSxbtozWrVvn2Gbr1q1ZunQpaWlp/P777+oOq/b29upnwIAB/+FPWQiRRaM8vXfwK5SamoqJiQlr165V5+0D+Pv7c/fuXTZt2qRVPiUlRWtB9b1793B0dCQxMRELC4vX1U0hhBBCiDwZMGAAa9euZf78+RQtWpQpU6awefNmzp07R1paGs7OzowfP5527dphbm7OptDzdPCphlUNP0zL1ERjaMLVPwaTdiMOfRtHxn4/heZeZWnQ4PFGLWvWrKF58+bs3buXxo0b89133+Hs7EyTJk1wdnbGy8uLuLg4ChUqRN26dbG3t8fZ2Znr168zaNAgrKysiIiIoH///jx69IgRI0YwbNgwvvrqK4yNjZkyZQrr1q3jzJkz6OnlPsFs7ty5zJw5k5KeXvx95BS27Z6d2H7WFx608Cj0ql+3EFru3buHpaWlxAZv0GsdWbx58yYZGRnY2tpqnbe1tVWnCTxp0qRJWFpaqp8np2AIIYQQQrxNycnJzJ07l6lTp9K4cWNcXV2ZN28exsbGLFy4EDs7OzQaDZaWltjZ2WFkbMLsQzdBo0HH0BRds3zo6Buq9Vl5+7LlVgFcy7pRoUIFHj58yL1793B3d6dNmzb4+PhQrVo1Ro0aRa1atZg/fz5Lly7lm2++YeXKlVy8eBEnJyeSk5MJDg7mp59+Yvv27WquwWrVqgGP945YtWoVlpaW9O3bl9u3b+Pr60toaCgxMTHs2LGDLl26aK0bbN++PZcvX2bX+pWYlWvw3HdT0FySzAvxIXqndkP95ptvSExMVD+XLl16210SQgghxEcsI1NR00WsDzpOWloa3t7e6nV9fX2qVKmSYxqIo7G3SUjMebdQAINCZUhIfMTR2NsYGBhgaWnJ2bNn2bRpEykpKaxfv56qVasSHh5OaGgoPj4+ZGZm4ujoyJ9//smqVasoW7YsHh4ejBw5klq1agGQnq69JnL16tWMHDmS2bNn4+DgQEhICBkZGTRs2BB3d3cGDhyIlZUVOjr/+2ehpaUlrVu3xtzcjOKVapPDakUANIC9pRFVilnn/aUKId4br3WDm/z586Orq5stN9C1a9e0Eq9mMTQ0xNDQMNt5IYQQQog3LfBMAuO2RKgBX+r1WACCoq7jX7Toc++/fj/nQNG6fg+urfxWq9zGjRvx9PQEoEiRItjZ2dGxY0f69++f7f4iRYpQpEgRevToga+vL7169aJAgQLEx8fj4+PDX3/9hYeHB0FBQQB4e3ur6xcBSpUqxfr165/b/3///Rc/Pz+afOZB72Un0ABPrl3KCiDHNHdFVye3cFII8T57rSOLBgYGVKxYUd0KGh4nod2zZw9eXl6vs2khhBBCiJcWeCaB3stOaI0M6lnZg64eQ+asIfBMAgBpaWmEhoaqaSCe9LypmSn/Rqnl7ty5Q3R0tLpzaYUKFYiIiKBkyZLZPgYGBpw9e5Zbt24xefJkatSoQZkyZdTNbbIYGBgAZEtL8Tx37txhw4YNBAUF0bdvXxq52TO3QwXsLLWfx87SiLkdKmilzRBCfFhee+qMQYMG4e/vT6VKlahSpQozZ84kOTmZLl26vO6mhRBCCCFeWEamwrgtETy9A6COgRHmHk24s3chX023ptDgT5k+bSoPHjwgICAgWz1Villjb2lEfC7tJB5cSYH8NpgkF6HzoJHkz59f3RBw+PDhfPLJJ/Tr149u3bphampKREQEu3btYs6cORQpUgQDAwNmz55Nr169OHPmDBMmaG9CU7RoUTQaDVu3bqVJkyYYGxvnmDLjaZ6enty5c4cffvgBZ2dnABq52dPA1Y6jsbe5fv8RBc0fTz2VEUUhPmyvfc1iu3btmDZtGqNHj8bDw4Pw8HACAwOzbXojhBBCCPEueNZaw3y1O2Pi7E306slUqliR8+fPs2PHDvLly5etrK6OhjHNs484qnXV6syDfQuoUrkSV69eZcuWLepoYLly5QgODiY6OpoaNWrg6enJ6NGjcXBwAKBAgQIsXryYP//8E1dXVyZPnsy0adO06i9UqBDjxo3j66+/xtbWln79+uXp+ePi4khMTGTIkCHZnserhA0tPArhVcJGAkUhPgKvNXXGfyXb4wohhBDiTdsU/i8DVoU/t1xe00U8vfYRHm8KM6a5q0zhFOIFSGzw5r32aahCCCGEEO+TvKaByGs5mcIphHhfSbAohBDvmc6dO3P37l02btz4trsixAcpa63h1cRH2dYtwuNdQO1eMF1E1hROIYR4n0iwKIQQ75lZs2bxDq8gEOK9l7XWUNJFCCE+dq99gxshhBCvlqWlJVZWVm+7G69MWlra2+6CENlIugghhJBgUQgh3llr167F3d0dY2NjbGxsqF+/PsnJyXTu3FndXh+gdu3a9O/fn2HDhmFtbY2dnR1jx47Vquvs2bNUr14dIyMjXF1d2b17NxqNRmsq6/DhwyldujQmJiYUL16cUaNGaQVyY8eOxcPDg99++w1HR0dMTExo27YtiYmJapnMzEzGjx9P4cKFMTQ0xMPDg8DAQPV6XFwcGo2G1atXU6tWLYyMjFi+fDkA8+fPx8XFBSMjI8qUKcMvv/zyal+oEC+okZs9B4bXZWX3T5j1hQcru3/CgeF1JVAU4jV5+v9v4u2TaahCCPEOSkhIwNfXlylTpvDZZ59x//599u/fn+v00yVLljBo0CCOHDnCoUOH6Ny5M97e3jRo0ICMjAxatmxJkSJFOHLkCPfv32fw4MHZ6jA3N2fx4sU4ODhw+vRpunfvjrm5OcOGDVPLnD9/njVr1rBlyxbu3btHQEAAffr0UQO+WbNmMX36dH777Tc8PT1ZuHAhn376Kf/88w+lSpVS6/n666+ZPn06np6easA4evRo5syZg6enJ2FhYXTv3h1TU1P8/f1f8dsVIu9kraEQb44ss3gHKe+wxMREBVASExPfdleEEOKNOn78uAIocXFx2a75+/srLVq0UI9r1aqlVK9eXatM5cqVleHDhyuKoijbt29X9PT0lISEBPX6rl27FEDZsGFDrn2YOnWqUrFiRfV4zJgxiq6urnL58mX13Pbt2xUdHR21bgcHB2XixInZ+tKnTx9FURQlNjZWAZSZM2dqlSlRooSyYsUKrXMTJkxQvLy8cu2fEEJ86FJSUl5b3ampqa+t7tdFYoM3T6ahCiHEOyIjU+FQzC02hf/LA7PC1K1XD3d3d9q0acO8efO4c+dOrveWK1dO69je3p7r168DEBUVhaOjI3Z2dur1KlWqZKtj9erVeHt7Y2dnh5mZGSNHjiQ+Pl6rTJEiRShU6H955by8vMjMzCQqKop79+5x5coVvL29te7x9vYmMjJS61ylSpXUn5OTk4mJiSEgIAAzMzP189133xETE5PrMwshxPumdu3a9OvXj379+mFpaUn+/PkZNWqUOprm5OTEhAkT6NSpExYWFvTo0QOAdevWUbZsWQwNDXFycmL69Ola9SYkJNC0aVOMjY0pVqwYK1aswMnJiZkzZ6plNBoNc+fO5dNPP8XU1JSJEyeSkZFBQEAAxYoVw9jYGGdnZ2bNmqVVd9bU0O+//x5bW1usrKwYP3486enpDB06FGtrawoXLsyiRYvUe7KWHKxZs4YaNWpgbGxM5cqViY6OJjQ0lEqVKmFmZkbjxo25ceNGtraefF9PLrN4coZKlrwssxAvT6ahCiHEOyCnpN12db9mjH8y986fYPbs2YwYMYIjR47keL++vr7WsUajITMzM8/tHzp0CD8/P8aNG4ePjw+WlpasWrUq2z9IXhVTU1P156SkJADmzZtH1apVtcrp6uq+lvaFEOJtWbJkCQEBARw9epRjx47Ro0cPihQpQvfu3QGYNm0ao0ePZsyYMQAcP36ctm3bMnbsWNq1a8fBgwfp06cPNjY2dO7cGYBOnTpx8+ZNgoKC0NfXZ9CgQeoXhk8aO3YskydPZubMmejp6ZGZmUnhwoX5888/sbGx4eDBg/To0QN7e3vatm2r3vf3339TuHBh9u3bR0hICAEBARw8eJCaNWty5MgRVq9eTc+ePWnQoAGFCxdW7xszZgwzZ86kSJEidO3alfbt22Nubs6sWbPUde+jR49m7ty5z3xfWcss/v77b3r16sXff/9Ny5Yt87zMQrw8CRaFEOItCzyTQO9lJ7Llc7t2L4U5/+gxt0MvRo8eTdGiRdmwYcML1+/s7MylS5e4du0atra2AISGhmqVOXjwIEWLFmXEiBHquYsXL2arKz4+nitXruDg4ADA4cOH0dHRwdnZGQsLCxwcHAgJCaFWrVrqPSEhITmOZGaxtbXFwcGBCxcu4Ofn98LPJ4QQ77KMTIWjsbe5fv8R9x6m4ejoyIwZM9BoNDg7O3P69GlmzJihBot169bVCnj8/PyoV68eo0aNAqB06dJEREQwdepUOnfuzNmzZ9m9e7c6YgePNwzLaRSuffv2dOnSRevcuHHj1J+LFSvGoUOHWLNmjVawaG1tzU8//aT+vp8yZQoPHjzg22+/BeCbb75h8uTJHDhwgC+++EK9b8iQIfj4+AAwYMAAfH192bNnjzoDJSAggMWLFz/z/ZUrV04NnG1tbenVqxfBwcG0bNmSXbt2ERMTQ1BQkDp7ZuLEiTRo0OCZdYq8k2BRCCHeooxMhXFbIrIFiilXonh08STGTp58+0ciiRX0uHHjBi4uLpw6deqF2mjQoAElSpTA39+fKVOmcP/+fUaOHAk8HoEEKFWqFPHx8axatYrKlSuzbdu2HANTIyMj/P39mTZtGvfu3aN///60bdtW/Z/00KFDGTNmDCVKlMDDw4NFixYRHh6uboCTm3HjxtG/f38sLS1p1KgRKSkpHDt2jDt37jBo0KAXel4hhHhXPD1r5GrCPSwKFmHHP1fVXXW9vLyYPn06GRkZgPY0fYDIyEhatGihdc7b25uZM2eSkZFBVFQUenp6VKhQQb1esmRJ8uXLl60/T9cN8PPPP7Nw4ULi4+N5+PAhqampeHh4aJUpW7YsOjr/W71ma2uLm5ubeqyrq4uNjU220cwnl0hkfVnp7u6udS6nEdDc6shy8+ZNIO/LLMTLkzWLQgjxFh2Nva019TSLjoEJjy6d4drasYRN78ywb75l+vTpNG7c+IXb0NXVZePGjSQlJVG5cmW6deumjiAaGT3OIffpp5/y1Vdf0a9fPzw8PDh48KD6LfaTSpYsSatWrWjSpAkNGzakXLlyWiku+vfvz6BBgxg8eDDu7u4EBgayefPmHL/hflK3bt2YP38+ixYtwt3dnVq1arF48WKKFSv2ws8rhBDvgqxZI0//jn+YmkHvZScIPJOQ431PTtN/1Z6ue9WqVQwZMoSAgAB27txJeHg4Xbp0ITU1VatcTksd8rL84ckyWV9OPn3ueUsmnm4HeKFlFuK/kZFFIYR4i67fzx4oAujnd8S27Xj1eNYXHrTweLyxzNNTdoKCgrLd//TC/jJlynDgwAH1OCQkBHgc/GWZMmUKU6ZM0bpv4MCB2eru3bs3vXv3zrHfOjo6jBkzRp0y9DQnJ6dct0Vv37497du3z/GaEEK8T3KbNQKQciUagHFbImjgasfhw4cpVapUrmu0XVxc1N/ZWUJCQihdujS6uro4OzuTnp5OWFgYFStWBB6nOXrWpmhP1lOtWjX69OmjnnufNhbLyzIL8d/IyKIQQrxFBc2NXmm53GzYsIFdu3YRFxfH7t276dGjB97e3pQoUeI/1Ss+Dlk7G4aHh38Q7QjxuuU2awQg/f4Nbu2ZR/yF83z30zxmz57NgAEDcq1r8ODB7NmzhwkTJhAdHc2SJUuYM2cOQ4YMAR5/GVi/fn169OjB0aNHCQsLo0ePHhgbG6ujebkpVaoUx44dY8eOHURHRzNq1Kj3Kth6cpnFqVOnCAkJybbMQvw3EiwKIcRbVKWYNfaWRuT2vzQNYG9pRJVi1v+pnfv379O3b1/KlClD586dqVy5Mps2bfpPdYp3V+3atXMcFRZCvBm5zRoBMC1bFyU9lYSlg5g2ZhgDBgxQU2TkpEKFCqxZs4ZVq1bh5ubG6NGjGT9+vLoTKsDSpUuxtbWlZs2afPbZZ3Tv3h1zc3N1qUFuevbsSatWrWjXrh1Vq1bl1q1bWqOM77q8LLMQ/41GyW0+0Dvg3r17WFpakpiYiIWFxdvujhBCvBZZ61oArSlLWQHk3A4V1I0QhMiL2rVr4+HhoZVj7b+Ii4ujWLFihIWFZdv44lV6U+0I8bodirmF77zD2c5fXfE1BgWLY13/cXC4svsneJWweeXtX758GUdHR3bv3k29evVeef1vS15ig5CQEKpXr8758+dl9swrICOLQgjxljVys2duhwrYWWp/C2pnaSSBonhhnTt3Jjg4mFmzZqHRaNBoNMTFxXHmzBkaN26MmZkZtra2dOzYUd1REB5vGDFlyhRKliyJoaEhRYoUYeLEiVp1X7hwgTp16mBiYkL58uU5dOiQem3x4sVYWVmxY8cOXFxcMDMzo1GjRiQkJGi1MX78eAoXLoyhoSEeHh4EBgY+83mCg4OpUqUKhoaG2Nvb8/XXX5Oenq5ev3//Pn5+fpiammJvb8+MGTO0RlbHjx+vtWtjFg8Pjxw3cfqQPZ3wXLw+b2rWSJa///6bzZs3Exsby8GDB/niiy9wcnKiZs2ar6T+d5kss3i9JFgUQoh3QCM3ew4Mr8vK7p8w6wsPVnb/hAPD60qgKF7YrFmz8PLyonv37iQkJJCQkIC5uTl169bF09OTY8eOERgYyLVr17TyqGXlSRs1ahQRERGsWLFC3TAiy4gRIxgyZAjh4eGULl0aX19frcDtwYMHTJs2jT/++IN9+/YRHx+vrqvK6tv06dOZNm0ap06dwsfHh08//ZRz587l+Cz//vsvTZo0oXLlypw8eZK5c+eyYMECvvvuO7XMoEGDCAkJYfPmzezatYv9+/dz4sQJ9XrXrl2JjIzUWocVFhbGqVOnsuWbe1mdO3dGo9HQq1evbNf69u2LRqPRmjIoPny6OhrGNHcFyDVgHNPcFV2dV7OuLi0tjW+//ZayZcvy2WefUaBAAYKCgnLcSfRDI8ssXjPlHZaYmKgASmJi4tvuihBCCPHOSs/IVA6ev6lsDLusHDx/U6lZq5YyYMAA9fqECROUhg0bat1z6dIlBVCioqKUe/fuKYaGhsq8efNyrD82NlYBlPnz56vn/vnnHwVQIiMjFUVRlEWLFimAcv78ebXMzz//rNja2qrHDg4OysSJE7Xqrly5stKnTx+tdsLCwhRFUZRvv/1WcXZ2VjIzM7XqNDMzUzIyMpR79+4p+vr6yp9//qlev3v3rmJiYqL1/I0bN1Z69+6tHn/55ZdK7dq1c3zWl+Hv7684OjoqlpaWyoMHD9TzDx8+VKysrJQiRYoo/v7+L11/amrqK+jl4362aNHildQl8mb76SvKJ9/vVooO36p+Pvl+t7L99JW33bX3ksQGb56MLAohhBDvscAzCVT/4W985x1mwKpwfOcdJiz+DnG3ktUyJ0+eZO/evZiZmamfMmXKAI+3yY+MjCQlJeW5a5ueTI5tb/941PvJhNomJiZaU7/s7e3V6/fu3ePKlSt4e3tr1ent7U1kZGSO7UVGRuLl5aW1q6G3tzdJSUlcvnyZCxcukJaWppWE29LSEmdnZ616unfvzsqVK3n06BGpqamsWLGCrl27PvNZX1SFChVwdHRk/fr16rn169dTpEgRPD091XOBgYFUr14dKysrbGxsaNasmVaqgqwdYVevXk2tWrUwMjJi7ty5GBsbs337dq02N2zYgLm5OQ8ePADg0qVLtG3bFisrK6ytrWnRogVxcXGv9DnFi5FZI+J9J8GiEEII8Z7KLel3anomf0deV5N+JyUl0bx5c8LDw7U+586do2bNmhgbG+epvZwSbD+ZHDunJN3KO7CPXvPmzTE0NGTDhg1s2bKFtLQ0Pv/881feTteuXVm0aJF6vHDhwmxTXZOTkxk0aBDHjh1jz5496Ojo8Nlnn2VLMv71118zYMAAIiMjadOmDc2aNWPFihVaZZYvX07Lli0xMTEhLS0NHx8fzM3N2b9/PyEhIeq60acTrIs3S1dHg1cJG1p4FMKrhM0rm3oqxJug97Y7IIQQQogX96yk3xpdfVAy1aTfFSpUYN26dTg5OaGnl/1//aVKlcLY2Jg9e/bQrVu319JfCwsLHBwcCAkJoVatWur5kJAQrZHBJ7m4uLBu3ToURVGD05CQEMzNzSlcuDD58uVDX1+f0NBQihQpAkBiYiLR0dFaG3vo6enh7+/PokWLMDAw4IsvvshzgJyTjEyFo7G3uX7/EQXNjciKhzt06MA333zDxYsX1b6uWrWKoKAg9d7WrVtr1bVw4UIKFChARESE1kY8AwcOpFWrVuqxn58fHTt25MGDB5iYmHDv3j22bdvGhg0bAFi9ejWZmZnMnz9ffVeLFi3CysqKoKAgGjZs+NLPK4T4eEmwKIQQQryHnpX0W8+yICkJUVyKv8jO4+fo27cv8+bNw9fXl2HDhmFtbc358+dZtWoV8+fPx8jIiOHDhzNs2DAMDAzw9vbmxo0b/PPPPwQEBLyyPg8dOpQxY8ZQokQJPDw8WLRoEeHh4SxfvjzH8n369GHmzJl8+eWX9OvXj6ioKMaMGcOgQYPQ0dHB3Nwcf39/hg4dirW1NQULFmTMmDHo6OhkS8jdrVs3XFxcgMdB3MsKPJPAuC0RWu8++UwCpSw1FChQgKZNm7J48WIURaFp06bkz59f6/5z584xevRojhw5ws2bN9URxfj4eK1gsVKlSlr3NWnSBH19fTZv3swXX3zBunXrsLCwoH79+sDjqcbnz5/H3Nxc675Hjx5pTXMVQogXIcGiEEII8R56VtJviyqtuLntR67M70OTX1OIjY0lJCSE4cOH07BhQ1JSUihatCiNGjVCR+fxipRRo0ahp6fH6NGjuXLlCvb29jnu7vlf9O/fn8TERAYPHsz169dxdXVl8+bNlCpVKsfyhQoV4q+//mLo0KGUL18ea2trAgICGDlypFrmxx9/pFevXjRr1gwLCwuGDRvGpUuXsiXkLlWqFNWqVeP27dtUrVr1pfqfNe336dHch6kZhF9OJvBMAl27dqVfv34A/Pzzz9nqaN68OUWLFmXevHk4ODiQmZmJm5tbtqmipqamWscGBgZ8/vnnrFixgi+++IIVK1bQrl07daQ4KSmJihUr5hh4FyhQ4KWeVwghNMq7sJggF3lJvCmEEEJ8jHJL+v2015X0+12VnJxMoUKFmD59utaoqKIolCpVij59+jBo0KAXrjcjU6H6D3/nOJp7c9sMMlOSKd/lO4KH1KKYU1E0Gg0XL15EV1eXli1bYmVlxfTp08mfPz/79u2jRo0aABw4cIAaNWqwYcMGWrZsSVxcHMWKFSMsLAwPDw+tdoKDg2nQoAFhYWGUK1eOgwcPqoHvvHnzGD58OHFxcbn+m6lz587cvXuXjRs3vvDzC/EukNjgzZMNboQQQoj30JtO+v2uCgsLY+XKlcTExHDixAn8/PwAaNGihVrmxo0bzJkzh6tXr750bsVnTfvNkpD4iOPxiURGRhIREYGurq7W9Xz58mFjY8Pvv//O+fPn+fvvv18ocK1ZsyZ2dnb4+flRrFgxrRFSPz8/8ufPT4sWLdi/fz+xsbEEBQXRv39/Ll++/GIPC4wdOzZbsPq0zp0707JlyxeuW+Sudu3aDBw48IXuydpBNzw8/KXbfRV1iA+TBItCCCHEe+hZSb+zjl9l0u932bRp0yhfvjz169cnOTmZ/fv3a60VLFiwIOPHj+f3338nX758L9XGs6b9Pl3OwsIix1EPHR0dVq1axfHjx3Fzc+Orr75i6tSpee6DRqPB19eXkydPqkFxFhMTE/bt20eRIkVo1aoVLi4uBAQE8OjRo5cagRkyZAh79ux54fvepBs3btC7d2+KFCmCoaEhdnZ2+Pj4/Kc1qe+i8+fP06VLFwoXLoyhoSHFihXD19eXY8eOAeDo6EhCQoLWmlchXhVZsyiEEEK8pxq52TO3Q4VsG67YWRoxprnrR5HLzdPTk+PHjz+zzKtYcVPQ3CjXa/mbfvXMck9O+6xfvz4RERG59s/JyemZ/f3hhx/44YcfcrxmZ2fHkiVLcr138eLFuV57WlY+zndZ69atSU1NZcmSJRQvXpxr166xZ88ebt269Vb7lZqaioGBwSup69ixY9SrVw83Nzd+++03ypQpw/3799m0aRODBw8mODgYXV1d7Ozscq1DURQyMjJy3AlZiOeRkUUhhBDiPSZJv9+MD23a7++//65usPOkFi1a0LVr12zTUDMyMhg0aBBWVlbY2NgwbNiwbEFtZmYmkyZNolixYhgbG1O+fHnWrl2rVSY4OJgqVapgaGiIvb09X3/9Nenp6er1tWvX4u7ujrGxMTY2Nupo8dPu3r3L/v37+eGHH6hTpw5FixalSpUqfPPNN3z66acAah5RIyMjXF1d2bVrFxqNRg3eg4KC0Gg03L17V603PDwcjUZDXFwcALdu3cLX15dChQphYmKCu7s7K1eu1OpL7dq16devHwMHDiR//vz4+PgAcObMGRo3boyZmRm2trZ07NiRmzdvqvclJyfTqVMnzMzMsLe3Z/r06Vr1KopC586dKVWqFPv376dp06bqTsJjxoxh06ZNQPYppFnPtX37dipWrIihoSEHDhwgMzOTKVOmULJkSQwNDSlSpAgTJ07M9m6zPK//4uMgwaIQQgjxnpOk36/fhzbtt02bNty6dYu9e/eq527fvk1gYGC2Ka4A06dPZ/HixSxcuJADBw5w+/ZtNcdjlkmTJrF06VJ+/fVX/vnnH7766is6dOhAcHAwAP/++y9NmjShcuXKnDx5krlz57JgwQK+++47ABISEvD19aVr165ERkYSFBREq1atchxpzRr53LhxIykpKdmuZ2Zm0qpVKwwMDDhy5Ai//vorw4cPf+H39OjRIypWrMi2bds4c+YMPXr0oGPHjhw9elSr3JIlSzAwMCAkJIRff/2Vu3fvUrduXTw9PTl27BiBgYFcu3aNtm3bqvcMHTqU4OBgNm3axM6dOwkKCuLEiRPq9fDwcP755x8GDx6s7lr8JCsrq2f2/euvv2by5MlERkZSrlw5vvnmGyZPnsyoUaOIiIhgxYoV2Nra5nhvXvovPhLKOywxMVEBlMTExLfdFSGEEEIIZfvpK8on3+9Wig7fqn4++X63sv30lbfdtRfWokULpWvXrurxb7/9pjg4OCgZGRnKmDFjlPLly6vX7O3tlSlTpqjHaWlpSuHChZUWLVooiqIojx49UkxMTJSDBw9qtREQEKD4+voqiqIo3377reLs7KxkZmaq13/++WfFzMxMycjIUI4fP64ASlxcXI79Tc/IVA6ev6lsDLusHDx/U1mz5k8lX758ipGRkVKtWjXlm2++UU6ePKkoiqLs2LFD0dPTU/7991/1/u3btyuAsmHDBkVRFGXv3r0KoNy5c0ctExYWpgBKbGxsru+tadOmyuDBg9XjWrVqKZ6enlplJkyYoDRs2FDr3KVLlxRAiYqKUu7fv68YGBgoa9asUa/funVLMTY2VgYMGKAoiqKsXr1aAZQTJ07k2hdFUZTY2FgFUMLCwrSea+PGjWqZe/fuKYaGhsq8efPyVMfz+v+2SGzw5snkZSGEEEKIPGrkZk8DVzuOxt7m+v1HFDR/PPX0fRlRzMhU1L5XrvcpU0cN4pdffsHQ0JDly5fzxRdfZBvFSkxMJCEhQWv3VT09PSpVqqSO+p0/f54HDx7QoEEDrXtTU1Px9PQEIDIyEi8vLzSa/70rb29vkpKSuHz5MuXLl6devXq4u7vj4+NDw4YN+fzzz8mXLx+BZxKyrc21t8zH4l3HMb59nsOHD7N9+3amTJnC/PnzSUxMxNHREQcHB7W8l5fXi7+vjAy+//571qxZw7///ktqaiopKSmYmJholatYsaLW8cmTJ9m7d2+O6z5jYmJ4+PAhqampWu/U2toaZ2dn9Vj5j2ttK1WqpP4cGRlJSkoK9erVy9O9z+t/6dKl/1PfxPtDgkUhhBBCiBeQNe33ffN0wKWk5yPpURrfzV1Gj9YN2b9/PzNmzHipupOSkgDYtm0bhQoV0rpmaGiYpzp0dXXZtWsXBw8eZOfOncyePZsRI0YwZekWxgXd5OnQ6WriIwb8GcHcDhUYNaoBo0aNolu3bowZMyZPKUmyguIng7K0tDStMlOnTmXWrFnMnDkTd3d3TE1NGThwIKmpqVrlTE1NtY6TkpJo3rx5jpsR2dvbc/78+ef2LysgO3v2rBpwv4gn+2RsbPxC9z6v/+LjIWsWhRBCCCE+cIFnEui97ITWyJxGzwDjUtX4ce5Cxs74DWdnZypUqJDtXktLS+zt7Tly5Ih6Lj09XWsXWldXVwwNDYmPj6dkyZJaH0dHRwBcXFw4dOiQVnAWEhKCubk5hQsXftwnjQZvb2/GjRtHWFgYBgYGjJ29OFugCKjnxm2JICNTUfuRnJyMi4sLly5dIiEhQS1/+PBhrfsLFCgAoFXm6TyDISEhtGjRgg4dOlC+fHmKFy9OdHR0Dr3RVqFCBf755x+cnJyyvQ9TU1NKlCiBvr6+1ju9c+eOVt0eHh64uroyffr0bBsRAVob8zxPqVKlMDY2zjUdytMbET2v/+LjIcGiEEIIIcQHLCNTYdyWiBwDLpOytXkQE8qKP5bi2759rnUMGDCAyZMns3HjRs6ePUufPn20ghVzc3OGDBnCV199xZIlS4iJieHEiRPMnj1bTefRp08fLl26xJdffsnZs2fZtGmTOgqoo6PDkSNH+P777zl27Bjx8fGsX7+e69dv8NAke1qIjIf3uLryW+7/s5eL5yLZuC+MP//8kylTptCiRQvq169P6dKl8ff35+TJk+zfv58RI0Zo1ZEVyI4dO5Zz586xbdu2bDuSlipVSh3tjIyMpGfPnly7du2577xv377cvn0bX19fQkNDiYmJYceOHXTp0oWMjAzMzMwICAhg6NCh/P3335w5c4bOnTtrTQHWaDQsWrSI6OhoatSowV9//cWFCxc4deoUEydOpEWLFs/tRxYjIyOGDx/OsGHDWLp0KTExMRw+fJgFCxYA0KxZsxfqv/h4yDRUIYQQQogP2NHY21ojik8yKloOXWNzHt28hGv1JrnWMXjwYBISEvD390dHR4euXbvy2WefkZiYqJaZMGECBQoUYNKkSVy4cAErKysqVKjAt99+C0ChQoX466+/GDp0KOXLl8fa2pqAgABGjhwJgIWFBfv27WPmzJncu3ePokWL0nnQKHaSfQqmjr4xhg6luR+6kbS7V/FbmYlT0SJ0796db7/9Fh0dHTZs2EBAQABVqlTBycmJn376iUaNGql16Ovrs3LlSnr37k25cuWoXLky3333HW3atFHLjBw5kgsXLuDj44OJiQk9evSgZcuWWs+dEwcHB0JCQhg+fDgNGzYkJSWFokWL0qhRIzUgnDp1qjrd09zcnMGDB2ert0qVKhw7doyJEyfSvXt3bt68ib29PdWqVWPmzJnP7MPTRo0ahZ6eHqNHj+bKlSvY29vTq1cv0tLSMDLSzg+al/6Lj4NG+a+rZ1+je/fuYWlpSWJiIhYWFm+7O0IIIYQQ751N4f8yYFX4c8vN+sKDFh6FnlvuTToUcwvfeYefW25l90/ytI5Uo9GwYcMGWrZs+Qp69+7KzMxk2rRp/P7771y6dAlbW1t69uyJn58fxYoVY9WqVfzyyy9qWhGAgQMHqqPFY8eOZePGjQwePJhRo0Zx584dGjduzLx58zA3Nwfg/v379OrVi40bN2JhYcGwYcPYtGkTHh4eaiCbkpLCiBEjWLlyJXfv3sXNzY0ffviB2rVrv9RzSWzw5slXA0IIIYQQH7CC5kbPL/QC5d6kKsWssbc0ypbbMosGsLd8vCOt+J/n5VT8+uuvGTBgAJGRkfj4+ORYR0xMDBs3bmTr1q1s3bqV4OBgJk+erF4fNGgQISEhbN68mV27drF//36tPJEA/fr149ChQ6xatYpTp07Rpk0bGjVqxLlz517Pg4tXTqahCiGEEEJ8wLICrquJj3Jct6gB7N7RgEtXR8OY5q70XnYCDWj1PyuAHNPc9b1JXfK6PJkSxVSTxqxZs5gzZw7+/v4AlChRgurVqxMXFwc8HkVs1arVM+vMzMxk8eLF6khix44d2bNnDxMnTuT+/fssWbKEFStWqOk4Fi1apJWqJD4+nkWLFhEfH6+eHzJkCIGBgSxatIjvv//+Vb8G8RpIsCiEEEII8QF73wOuRm72zO1QIVueRTtLI8Y0d6WRW95TObzDq69e2tMpUVKuRJGSkoJeYfdc73kyB2NunJyc1EARHqfMuH79OgAXLlwgLS2NKlWqqNctLS218kSePn2ajIyMbDkZU1JSsLF5/1LPfKwkWBRCCCGE+MC9yoDrbWjkZk8DVzt19Kyg+eOR0Hc1wH1TslKiaH0BoP84r+XIjWewK1wkxz/bvKS/0NfX1zrWaDQ5pvDITVJSErq6uhw/fhxdXV2ta2ZmZnmuR7xdEiwKIYQQQnwE3veAS1dHk6dNbD4WuaVE0c/ngEbPkEcXTzJuixMNXO1e+Z9x8eLF0dfXJzQ0lCJFigCQmJhIdHQ0NWvWBMDT05OMjAyuX79OjRo1Xmn74s2RYFEIIYQQ4iMhAdeHI7eUKBo9AyyqtuZO0CLO6eqxPsgSR5N0/vnnH3V94X9lbm6Ov78/Q4cOxdramoIFCzJmzBh0dHTQaB4HpqVLl8bPz49OnToxffp0PD09uXHjBnv27KFcuXI0bdr0lfRFvF6yG6oQQgghhBDvmev3c86dCWDp/QUWlT/j7v7ltPepRrt27dT1hq/Kjz/+iJeXF82aNaN+/fp4e3vj4uKilbNx0aJFdOrUicGDB+Ps7EzLli21RiPFu0/yLAohhBBCCPGeedU5KP+r5ORkChUqxPTp0wkICHgtbUhs8ObJNFQhhBBCCCHeM287JUpYWBhnz56lSpUqJCYmMn78eABatGjxWtoTb4dMQxVCCCGEEOI9k5USBf6XAiXLm0qJMm3aNMqXL0/9+vVJTk5m//795M+f/7W1J948CRaFEEKId9zVq1dp0KABpqamWFlZAY+3sd+4ceNb7ZcQ4u3KSoliZ2mkdd7O0oi5HSq81pQonp6eHD9+nKSkJG7fvs2uXbtwd889t6N4P8k0VCGEEOIdN2PGDBISEggPD8fS0vJtd0cI8Q5531OiiHebBItCCCHEOy4mJoaKFStSqlSpt90VIcQ7SFKiiNdFpqEKIYQQb1nt2rXp378/w4YNw9raGjs7O8aOHQuAk5MT69atY+nSpWg0Gjp37vxW+yqEEOLjISOLQgghxDtgyZIlDBo0iCNHjnDo0CE6d+6Mt7c3oaGhdOrUCQsLC2bNmoWxsfHb7qoQQoiPhIwsCiGEEG9BRqbCoZhbbAr/l3sP03AvV44xY8ZQqlQpOnXqRKVKldizZw8FChTA0NAQY2Nj7OzsZM2iEDl4Hzd8CgoKQqPRcPfuXQAWL16sbmAlxLtCgkUhhBDiDQs8k0D1H/7Gd95hBqwKJyLhHufTrAk8k6CWsbe35/r162+xl0K8vM6dO6PRaOjVq1e2a3379n3lU6oTEhJo3Lhxnsu3bNkSjUaDRqNBT0+P/PnzU7NmTWbOnElKSsoLtf2yQV61atVISEjI9gXQ2LFj8fDweOH6hHgdJFgUQggh3qDAMwn0XnaChMRHWucfpEPvZSfUgFGj0ZCZmfk2uijEK+Ho6MiqVat4+PCheu7Ro0esWLGCIkWKvNK27OzsMDQ0zHP5smXL4uzsTEJCAvHx8ezdu5c2bdowadIkqlWrxv37919p/3JiYGCAnZ0dGo32rqVDhgxhz549r719IfJCgkUhhBDiDcnIVBi3JQLlGWXGbYkgI/NZJYR4P1SoUAFHR0fWr1+vnlu/fj1FihTB09NTPZeSkkL//v0pWLAgRkZGVK9endDQUAAyMzMpXLgwc+fO1ao7LCwMHR0dLl68CGSfhnrp0iXatm2LlZUV1tbWtGjRgri4OADS0tLQ19fHyMgIOzs7HBwccHd358svvyQ4OJgzZ87www8/aPVvyJAhFCpUCFNTU6pWrUpQUBDweCpply5dSExMVEcqszan+uOPP6hUqRLm5ubY2dnRvn17rdkCT09DzWJmZoaNjQ0nT56kTp06mJubY2FhQcWKFTl27NhL/VkI8bIkWBRCCCHekKOxt7ONKD5JARISH3E09vab65QQr1HXrl1ZtGiRerxw4UK6dOmiVWbYsGGsW7eOJUuWcOLECUqWLImPjw+3b99GR0cHX19fVqxYoXXP8uXL8fb2pmjRouq5DRs2ULJkSQwNDSlevDgxMTGsWrWKO3fucOvWLVxdXTEyMmL58uWEh4dz+vRp9d6sqZ+hoaHo6uoyadIkvvjiC+7fv0+/fv04dOgQCxcupH79+oSFhVGnTh2+/fZbxowZQ82aNbGwsCAhIYG4uDhu375NoUKF6NatG/fv3+e3335j48aNxMXFUb9+faysrNi4cSMdOnQAoHXr1ly6dClbX/z8/ChcuDChoaF88803XL9+nWrVqmFvb0+/fv0AUBSFsWPHUqRIEQwNDXFwcKB///6v7g9QfPQkWBRCCCHekOv3cw8UX6acEO+SJzdtunE/BUWBDh06cODAAS5evMjFixcJCQlRgySA5ORk5s6dy9SpU2ncuDGurq7MmzcPY2NjFixYAICfnx8hISHEx8cDj0cbV61ahZ+fn1b769evZ9SoUXz//ffY29vTq1cvypQpA8Dly5fJyMjg999/x8fHJ8f+x8TEsHHjRnx9fdHT0yM4OJhvvvmGRYsW8eeff7J27VpOnjzJ9u3bqVq1KuvWrSMsLAxDQ0M0Gg12dnZ89913HD9+nFWrVhEREUH37t3p2rUrNjY2/PTTT5w+fZoHDx4wceJEvvnmGwASExP54osvsvUnPj6e+vXrs3fvXsaPH8+QIUM4c+YMmzdvpmTJkgCsW7eOGTNm8Ntvv3Hu3Dk2btyIu7v7f/yTFOJ/JHWGEEII8YYUNDfK8bxd+8nZyj05pS6nXR4VRaaqindH4JkExm2JUEfOb0bfwCDjIcevpdO0aVMWL16Moig0bdqU/Pnzq/fFxMSQlpaGt7e3ek5fX58qVaoQGRkJgIeHBy4uLqxYsYKvv/6a4OBgrl+/Tps2bQDU9YWdOnXC39+foUOHcuXKFb766it13W9CQgJpaWkkJydjb2+f4zNkZmayePFivvvuO/T09OjYsSObN28mIyODUqVK8eDBAwwNDWnRogUpKSk0a9aMy5cvq/fHx8ezaNEi4uPjcXBw4Pjx4wQHBwOP10jq6+sDj6fBzpkzR13L+csvv1C1alXq1aun1Z9BgwbRrVs3dHV1qVatGs2aNaNEiRIAVK5cWW3Tzs6O+vXro6+vT5EiRahSpcoL//kJkRsZWRRCCCHekCrFrLG3NEKTy3UNYG9pRJVi1m+yW0L8J7lt2vQoPZPey05Qvl5LFi9ezJIlS+jatetLteHn56dORV2xYgWNGjXCKp81h2Ju8eumxwGZm9vjEbWkpCQqVqxIeHg4f/31FwBLly4lOjqa9u3b59qGk5MT5ubmREZGUqxYMezt7blz5w66urpq2zt37iQ8PJzIyEjmzp2Ls7Ozev/p06fJyMigdOnSmJmZUalSJQIDA0lPT6devXps2LABAD09PTXYAyhdujRWVlZcuXJFqz9jx45l3759pKSkcO/ePVxdXdU6srRp04aHDx9SvHhxunfvzoYNG0hPT3+pdyxETiRYFEIIId4QXR0NY5q7AmQLGLOOxzR3RVcnt3BSiHdLXjZt+uuOLampqaSlpWWbAlqiRAkMDAwICQlRz6WlpREaGoqrq6t6rn379pw5c4bjx4+zdu1aytZsoqafmbH38SY3U3dGEXgmgQoVKnDu3DkKFiyIk5MTAM7OzpQsWfKZeUr19fU5e/YsgYGBtG7dGo1Gg76+PhkZGdy+/XgdsZOTEyVLlqRkyZLY2dkBoKurS0ZGBklJSejq6nL8+HGWLVsGwN9//83Zs2dZtGjRS6XCcXNze/xsU6fSqlUrrfWf8HjH2aioKH755ReMjY3p06cPNWvWJC0t7YXbEiInEiwKIYQQb1AjN3vmdqiAnaX2lFQ7SyPmdqhAI7ecp8gJ8S7Ky6ZNV++nsWRbCBEREejq6mpdNzU1pXfv3gwdOpTAwEB1nd+DBw8ICAhQyzk5OVGtWjUCAgJISUtnRUJBtV39fA4A3LgYTe9lJyjgUZf8+fPTokULjh49CsCxY8fo37+/1rRRgKtXr3LlyhWuXbvGzZs3qVWrFh4eHgwdOhR4PAro5+fH2LFj0dPTY+vWrRw9epRJkyaxZs0aoqOjMTc3JykpiUePHpGRkUF8fDz/1959x9d4vg8c/5wksjeRBCHIkAhib2LHSFFFUytGWpsaRb8qlCpKrZavGqFWaNGaaRUxIsSKGRERO0qNSJD9/P7IL8/XcRKrJMb1fr3O6+U8437u55wg13Pf93XVrVsXQ0NDNm3ahJ6eHpGRkUycOBGAjIwMraymsbGx3Lt3j2LFiqnbsrKyGDhwIEeOHKFEiRIsXbqUQ4cO4eHhofMZm5iY4Ofnx5w5cwgLCyMiIkIreY8Q/4asWRRCCCHyma+XI808HYiMv8PNpBSKWmRPPZURRfE6/fe//2XkyJHcvXsXA4PsXwGTk5OxsbGhbt26ajkIyC7r0KhRo2e2OXHhWpJP7uf21lk6+woVLaP++QGFsLS0zLWNKVOmkJWVRbdu3UhKSqJatWr88ccf2NjYaB3XpUsX+vfvT5HKzdAU+l9NRY2BYfY1Tu3EuJg7EzLuMH3G90z5djJ9+/YFsqd0tmrVSqsPWVlZODo6oq+vT6FChdDX12fSpEn069dPq2ZjcHAwkyZN4vvvv2fAgAHY2tpSuXJldu7ciZ6eHsWLF6dv374MHz4cgE6dOrF48WKmTZvG1KlT+f7773F1dWX69Ol88MEHGBgYMGjQIHr06AHAgAEDqFWrFmXK/O/z0mg03L59m+7du5OQkMDPP/9M/fr16datG0ePHiU8PJxBgwaxdOlSMjMzqVmzJqampqxYsQITExOtLLFC/BsysiiEEEIUAH09DbXLFqatd3Fqly0sgaJ47Ro1akRycrLWqNbevXtxcHDg4MGDpKT8b4Rw165dODg4kJCQoL46deqEr6+v1rYGdesBoDE0pcSA5Vovh67fqe09mdzpt99+Y+nSpQAYGxszZ84cbt26RUpKCvv27dNa05ejX79+7D//D2bNh+jsK/nFRqxqdeTu3pVEzezFp3374efnx9GjRwHYvHkzP/30kxostmvXDisrKxRFISMjg1GjRuHi4sLQoUO1AkXInp46YcIErl+/zieffEJKSgqnT5+mVatWeHh4YGxszPz58/nnn39IS0tj8ODBDB8+XB2d/OCDD1i7di1+fn4EBwdjZmbGqFGjmDFjBkZGRlhZWbFmzRoCAgLUmosajYbVq1dz+fJlMjIy+O9//8vNmzepUqUKbdq0ITY2FgBra2sWLlxI3bp1qVixIn/99RebNm2icOHCz/6BEOI5aJQ3OJ3a/fv3sbKyIjExMc+nUUIIIYQQr5JGo2HDhg20a9cOgLNnzxIQEEBUVBTlypUjKiqqQPv3NEuXLmXo0KE6hd5z5NThGz16NACjRo3iwYMH7Ny5k3nz5uHj4wNAw4YNKV26tBrQAWow83h23swsBbdOo4jfPI+SQ9foXE9D9hTrfaMav5IHIr9HXWNISNQzj5v9sTdtvYv/6+s9zYMHDyhevDgzZszQmjL7NM/6fsTTSWyQ/2RkUQghhBDvnYCAADUYfFJCQgItW7ZU3wcFBWFmZkZMTAw7dux4rvbHjx+PRqNBo9FgYGCAs7Mzn3/+OcnJyS/dZx8fH7XN3F4+Pj507tyZc+fO5dlGo0aN2LVrl/p+165d+Pj40LBhQ3X7o0ePOHjw4HNNQ9XX09DOO3utXX4kbcqr/MzLHvcijh07xurVq4mLi+Po0aNqnce2bdu+8msJ8aaQNYtCCCGEEI/JyXKZIy4ujtatW7/wOrDy5cvz119/kZGRQXh4OL169eLhw4csWLBA59i0tDQMDQ2f2t769etJS0sD4MqVK9SoUYO//vqL8uXLA2BoaIiJiQkmJiZa52VmKer62JLlq/P75LFkZGTw6NEjjh07RsOGDUlPT+e///0vABEREaSmpj5XsAhQoYQ1SuoDLs/8iJz5ahpDY2r851eC/DxfadKmnPIzNxJTcs3AmjOS+brKz0yfPp2YmBgMDQ2pWrUqe/fu1aobKcS7RkYWhRBCCCEeo9Fo1KmWGo2GI0eO8PXXX6PRaBg/fjyQHax16tQJa2trbG1tadu2LRcvXtRqx8DAAAcHB0qUKEHnzp3p0qULGzduBLJHHr29vVm0aBGlS5fG2Dh7JOzy5cu0bdsWc3NzLC0t6dSpE3///TcAtra2ODg44ODggJ2dHQCFCxdWt9na2rJ06VKsra3VPnTtPxzLEq60GRBEhwbeTBmXPe102pJf2bt3L25ubpw+fZqpU6eyZ88erKys6NGjByVLlqRkyZLP/ZlZWFhw5uQJ1mzbw7xft7Nm01/sG9X4lWf3LcjyM5UrV+bIkSMkJydz584dtm/fToUKFV6ojcfXJQrxNpBgUQghhBAiDwkJCZQvX57hw4eTkJDAiBEj1HqBFhYW7N27l/DwcMzNzfH19VVH/nJjYmKitf/8+fOsW7eO9evXExUVRVZWFm3btuXOnTvs3r2b7du3c+HCBTp37vxSfQ89lcDmEwk8+ucaD8/upWiHcdh3/ho0eowfO4Ylv2yifv36tGvXjmbNmlGqVClmzZqFqakpderUeaFr6enp4e7mSscm1en7QT0+alT1tSVtkvIz764nH3Y8D2dnZ2bNmvXcx4eFhaHRaNSg/clr5jzIeVkvcw+5uXHjBs2aNcPMzExt7/EHWflFpqEKIYQQQuTBwcEBAwMDzM3N1empK1asICsri0WLFqHRZAdEwcHBWFtbExYWRvPmzXXaOXLkCKtWraJx48bqtrS0NH7++Wd1lHD79u2cPHmS+Ph4nJycAPj5558pX748hw4dyjVDaF4ysxQmbDoDgJKRRuHWwzCwyJ4uaVyyAimXjrMtdDuzJ45hwYIFtGnThjt37nDmzBkuXrxIUFDQS3xa+UfKz4gchw4dwszM7KXP79y5M61atXpl/XlV7c2cOZOEhASioqKwsrICsh9ePVlS5nWTYFEIIYQQ74XH1+7dSkrF4CXzwR8/fpzz589jYWGhtT0lJYW4uDj1/cmTJzE3NyczM5O0tDRat27NDz/8oO4vVaqUGigCREdH4+TkpAaKAJ6enlhbWxMdHf1CwWJk/B21aL2BpZ0aKAKYuNUm5dJxHv4dj2OFOgQEBNCiRQs8PDyIjo4mLS3tudcrFqSc8jPi/fb436GXkds63zehvbi4OKpWrYqrq6u67cn11PlBpqEKIYQQ4p0XeiqBelN34r/wAENCoth97hZ7Y28ReirhhdtKTk6matWqREVFab3OnTvHJ598oh7n7u5OVFQU0dHRPHr0iI0bN2Jvb6/u/zejIc9yMyklz33GJbIT4uhbFCbDyJLg4GAiIiJo1qwZqampaDQaLl269Nr6Jt5eWVlZfPvtt5QuXRoTExMqVarEr7/+iqIoNG3alBYtWpBTle/OnTuUKFGCcePGAf+b/rllyxYqVqyIsbExtWrV4tSpU3leLy4ujrZt22Jvb4+5ubla2uVxT05D1Wg0LFq0iPbt22Nqaoqrq6u6Vjg3z5o2GhcXR5kyZRg4cCCKopCamsqIESMoXrw4ZmZm1KxZk7CwsDzby5nWumTJEkqWLIm5uTn9+/cnMzOTadOm4eDgQNGiRfnmm2+07mndunX8/PPPaDQaAgIC1HvLmYZap04dRo0apdXXW7duUahQIfbs2QPwzL4+DwkWhRBCCPFOCz2VQL8VR9WRthwpGVn0W3H0hQPGKlWqEBsbS9GiRXFxcdF65UwXg+zspC4uLjg7Oz8z0ymAh4cHV65c4cqVK+q2M2fOcO/ePTw9PV+oj4+Xjsi4f4uMpNvq+8zkO6DRw7Hb9+pxlStXZtq0aSiKQs2aNVm1apVOm0uXLs11vZQkbXl/fPvtt/z888/897//5fTp03z++ed07dqVPXv2sGzZMg4dOsScOXMA6Nu3L8WLF1eDxRwjR45kxowZHDp0CDs7O/z8/EhPT8/1esnJybRq1YodO3Zw7NgxmjZtCqD1dyQ3EyZMoFOnTpw4cYJWrVrRpUsX7ty588L3e+LECerVq8cnn3zCDz/8gEajYeDAgURERBASEsKJEyfo2LEjvr6+xMbG5tlOXFwc27ZtIzQ0lNWrV7N48WJat27N1atX2b17N1OnTmXs2LEcPHgQyJ5a6+vrS6dOnUhISGD27Nk6bXbp0oWQkBA1OAdYs2YNxYoVo379+gAv1dcnSbAohBBCiHdWztq93GacZqU+IPXvC3yxYCNHjh4jKirqmb+EQvYvaUWKFKFt27bs3buX+Ph4wsLCGDx4MFevXn3pvjZt2pQKFSrQpUsXjh49SmRkJN27d6dhw4ZUq1bthdrKKTEBoDEw5PaWmaTdvEDKlVPc2fETZuXqUaK4I3YkMmbMGCIiIrh06RJ//vknsbGxeHh4vPR9iHdHZpZCRNxtfo+6xu4z15k8eTJLliyhRYsWlClThoCAALp27cqCBQsoXrw4CxYsYPTo0YwZM4atW7eyYsUKDAy0V70FBQXRrFkzKlSowLJly/j777/ZsGFDrtevVKkSn332GV5eXri6ujJ27FgAtm3b9tR+BwQE4O/vj4uLC5MnTyY5OZnIyMgXuvf9+/fj4+PDiBEjmDRpEpCdrTg4OJhffvmF+vXrU7ZsWUaMGEG9evUIDg7Os62srCyWLFmCp6cnfn5+NGrUiJiYGGbNmoW7uzs9e/bE3d1drXVqZ2eHkZERJiYmODg4aD2EytGpUyeuX7/Ovn371G2rVq3C398fjUbz0n19kqxZFEIIIcQ76/G1e09KvXyShKWDSQCq/f9Swt69ez+zTVNTU/bs2cOoUaP48MMPSUpKonjx4jRp0gRLS8uX7qtGo+H3339n0KBBNGjQAD09PXx9fZk7d+4Lt5VTYuLjLWBg44ipex1u/jKerJRkTMpWx7Z5f4L8PLEw1+Ps2bMsW7aM27dv4+joyIABA/jss89e+j7EuyH0VAITNp1R//6k3brEw4cPadykqVYiobS0NCpXrgxAx44d2bBhA1OmTGH+/Pla6+1y1K5dW/2zra0t7u7uREdH59qH5ORkxo8fz5YtW0hISCAjIwN49shixYoV1T+bmZlhaWnJzZs3n/POs4PCZs2a8c033zB06FB1+8mTJ8nMzMTNzU3r+NTUVAoXznv9rLOzs9YaZ3t7e/T19dHT09Pa9iJ9tLOzo3nz5qxcuZL69esTHx9PRESEWsf1Zfv6JAkWhRBCCPHOymvtXpHWn1Ok9efq+9kfe9PWuzgAixYt0jo2KipK53wHBweWLVuW53XHjx+v1mR8kf0lS5bk999/z/O8HM7OzlrTz3IEBASo65t8vRxpU9GRDRf0sKjcCovK2RkaHa2MCfLzVEtM5DWqI95fOVO3H/8JU9Kz/y5Zt/uKb7o2oIFbUXWfkZERAA8fPuTIkSPo6+u/0FTHvIwYMYLt27czffp0XFxcyMjIoEqVKnlOW81RqFAhrfcajYasrKznvq6dnR3FihVj9erV9OrVS30IlJycjL6+vnqPjzM3N3+h/vzbPkL2LIfBgwczd+5cVq1aRYUKFdTany/b1ydJsCiEEEKId9bja/dexXFvG5eiFrgWNWd+YC0pMSGeS15TtwsVdgL9QmTcv8XC44/o4VtW5+do+PDh6OnpsW3bNlq1akXr1q21ysUAHDhwgJIlSwJw9+5dzp07l+e05/DwcAICAmjfvj0A169ffzU3+QwmJiZs3ryZVq1a0aJFC/78808sLCyoXLkymZmZ3Lx5U10XWJDatm3Lp59+SmhoKKtWraJ79+7qvlfVV1mzKIQQQoh3Vs7avbxCIw3ZI201StvmZ7fyXe2yhWnrXZzaZQvnGSj6+PhoTbl7FR7P3njx4kU0Gk2uI7Vvi1dVcP1NltfUbT0jUyxrfMidnYuI3beF9WFHOHr0KHPnzmXZsmVs2bKFJUuWsHLlSpo1a8bIkSPp0aMHd+/e1Wrn66+/ZseOHZw6dYqAgACKFClCu3btcu2Lq6sr69evJyoqiuPHj9OnT5/Xccu5MjMzY8uWLRgYGNCyZUuSk5Nxc3OjS5cudO/enfXr1xMfH09kZCTffvstW7Zsybe+Pd7Hdu3a8dVXXxEdHY2/v7+671X1VYJFIYQQQryzctbuAToBY877ID/Pt3qk7WlB3vjx4ws0OEtISKBly5YFdn3x4p5WdsW6fles6nQm8cAvfNKiDr6+vmzZsgVnZ2d69+7N+PHjqVKlCpCdkdTe3p6+fftqtTFlyhSGDBlC1apVuXHjBps2bcozW/D333+PjY0NderUwc/PjyZNmry6G30O5ubmbNu2DUVRaN26NQ8ePCA4OJju3bszfPhw3N3dadeuHYcOHVJHS/Nbly5dOH78OPXr19fpw6voq0bJbcL7G+L+/ftYWVmRmJj4rxaMCyGEEOL99mSyDtBdu/e28vHxwdvbW6vWXEG2k5eLFy9SunRpjh07hre392u5BkB6errOerBXZenSpQwdOvSdLhUSEXcb/4UHnnnc6sBa1C77/IlSwsLCaNSoEXfv3n3p0VmJDfKfjCwKIYQQ4p3n6+XIvlGNWR1Yi9kfe7M6sBb7RjV+6wPFgIAAdu/ezezZs9FoNGg0Gi5evMju3bupUaMGRkZGODo6Mnr0aDWTJMCDBw/o3r075ubmODo6MmPGDJ22ly9fTrVq1bCwsMDBwYFPPvlEzdaoKAouLi5Mnz5d65yoqCg0Gg3nz58HtKeh5ubUqVO0bNkSc3Nz7O3t6datG//884+6PzQ0lHr16mFtbU3hwoVp06YNcXFx6v6cqa1r1qyhYcOGGBsbs3LlSgICAmjXrh3Tp0/H0dGRwoULM2DAAK3EKM9TsHzp0qWULFkSU1NT2rdvz+3bt3nXydRt8TgJFoUQQgjxXtDX0zzX2r23yezZs6lduzaBgYEkJCSQkJBAoUKFaNWqFdWrV+f48ePMnz+fxYsXq7XiILsw+u7du/n999/5888/CQsL4+jRo1ptp6enM3HiRI4fP85vv/3GxYsX1UyrGo2GXr166dRrCw4OpkGDBri4uDyz7/fu3aNx48ZUrlyZw4cPExoayt9//02nTp3UYx48eMCwYcM4fPgwO3bsQE9Pj/bt2+tkjRw9ejRDhgwhOjqaFi1aALBr1y7i4uLYtWsXy5YtY+nSpSxdulQ951kFyw8ePEjv3r0ZOHAgUVFRNGrUSOszfFe9D1O3xQtQ3mCJiYkKoCQmJhZ0V4QQQggh3ggZmVnK/vP/KL8du6rsP/+P0qBhQ2XIkCHq/i+//FJxd3dXsrKy1G0//vijYm5urmRmZipJSUmKoaGhsnbtWnX/7du3FRMTE612nnTo0CEFUJKSkhRFUZRr164p+vr6ysGDBxVFUZS0tDSlSJEiytKlS9VzAGXDhg2KoihKfHy8AijHjh1TFEVRJk6cqDRv3lzrGleuXFEAJSYmJtc+3Lp1SwGUkydParU5a9YsreN69OihlCpVSsnIyFC3dezYUencubOiKIpy6dIlRV9fX7l27ZrWeU2aNFHGjBmjKIqi+Pv7K61atdLa37lzZ8XKyirPz+hdsu3kdaXW5L+UUqM2q69ak/9Stp28XmB9ktgg/0npDCGEEEKIt0Ruay/vXL6LjdMD9X10dDS1a9dGo/nfyE/dunVJTk7m6tWr3L17l7S0NGrWrKnuzymO/rgjR44wfvx4jh8/zt27d9XRvMuXL+Pp6UmxYsVo3bo1S5YsoUaNGmzatInU1FQ6duz4XPdy/Phxdu3alWvNt7i4ONzc3IiNjWXcuHEcPHiQf/75R6sPXl5e6vHVqlXTaaN8+fJa9eUcHR05efIk8HwFy6Ojo9WSDTlq165NaGjoc93f287Xy5Fmng5Ext+RsivvMQkWhRBCCCHeArkVSgdIy8hiZ/RNQk8lvLI1mA8ePKBFixa0aNGClStXYmdnx+XLl2nRogVpaWnqcX369KFbt27MnDmT4OBgOnfujKmp6XNdIzk5GT8/P6ZOnaqzz9Ex+z78/PwoVaoUCxcupFixYmRlZeHl5aXVB8guIfCkpxU9f1UFy991OVO3xftLgkUhhBBCiDdcXoXSATT6hUDJYsKmMzTzdMDDw4N169ahKIo6uhgeHo6FhQUlSpTA1taWQoUKcfDgQZ3i6A0bNgTg7Nmz3L59mylTpuDk5ATA4cOHda7dqlUrzMzMmD9/PqGhoezZs+e576lKlSqsW7cOZ2dnDAx0fyW9ffs2MTExLFy4UC0qvm/fvudu/2mep2C5h4cHBw8e1Np24MCzs4QK8S6RBDdCCCGEEG+4vAqlAxhYFSU1IYYrly/x55FY+vfvz5UrVxg0aBBnz57l999/JygoiGHDhqGnp4e5uTm9e/dm5MiR7Ny5Uy2Orqf3v18LS5YsiaGhIXPnzuXChQts3LiRiRMn6lxbX1+fgIAAxowZg6urK7Vr137uexowYAB37tzB39+fQ4cOERcXxx9//EHPnj3JzMzExsaGwoUL89NPP3H+/Hl27tzJsGHDXvzDy8XzFCwfPHgwoaGhTJ8+ndjYWH744Yf3ZgqqEDkkWBRCCCGEeMM9rVC6ZY0PQaPH9UX9aVXDnfT0dLZu3UpkZCSVKlWib9++9O7dm7Fjx6rnfPfdd9SvXx8/Pz+aNm1KvXr1qFq1qrrfzs6OpUuX8ssvv+Dp6cmUKVN0ymTk6N27N2lpafTs2fOF7qlYsWKEh4eTmZlJ8+bNqVChAkOHDsXa2ho9PT309PQICQnhyJEjeHl58fnnn/Pdd9+90DWe5lkFy2vVqsXChQuZPXs2lSpV4s8//9T6DIV4H2gURcltRsMbQQpvCiGEEEK8vkLpr8LevXtp0qQJV65cwd7ePl+vLd4vEhvkPxlZFEIIIYR4w72JhdJTU1O5evUq48ePp2PHjhIoCvEOkmBRCCGEEOIN9yYWSl+9ejWlSpXi3r17TJs2Ld+uK4TIPzINVQghhBDiLZFbnUVHK2OC/DxfWdkMId5UEhvkPymdIYQQQgjxlpBC6UKI/CTBohBCCCHEW0QKpQsh8ousWRRCCCGEEOItMX78eLy9vZ/7+KVLl2Jtbf3a+vOyLl68iEajISoq6p24zrtKgkUhhBBCCFHgQkNDqVevHtbW1hQuXJg2bdoQFxcHwEcffcTAgQPVY4cOHYpGo+Hs2bMApKWlYWZmxl9//cXmzZuxtrYmMzMTgKioKDQaDaNHj1bP79OnD127ds3Hu3u6iIgI9PX1ad269Stvu3Pnzpw7d+6VtwtQrlw5jIyMuHHjxguf6+TkREJCAl5eXq+hZ+JVkWBRCCGEEEIUuAcPHjBs2DAOHz7Mjh070NPTo3379mRlZdGwYUPCwsLUY3fv3k2RIkXUbYcOHSI9PZ06depQv359kpKSOHbsWK7H5mzz8fHJv5t7hsWLFzNo0CD27NnD9evXX2nbJiYmFC1a9JW2CbBv3z4ePXrERx99xLJly174fH19fRwcHDAwkFVxbzIJFoUQQgghRL7LzFKIiLvN71HXiIi7Tbv2H/Lhhx/i4uKCt7c3S5Ys4eTJk5w5cwYfHx/OnDnDrVu3uHv3LmfOnGHIkCFqABgWFkb16tUxNTXFysoKb29vrX2ff/45x44dIzk5mWvXrnH+/HkaNmz4wn12dnZm1qxZ6vsbN27QrFkzzMzMXnqqZ3JyMmvWrKFfv360bt2apUuXau2fMmUK9vb2WFhY0Lt3b1JS/pcJ988//8TY2Jh79+5pnTNkyBAaN24M5D4NddOmTVSvXh1jY2OKFClC+/bt1X2pqamMGDGC4sWLY2ZmRs2aNbUC7RyLFy/mk08+oVu3bixZskRnv7OzM5MnT6ZXr15YWFhQsmRJfvrpJ3X/k9NDw8LC0Gg0/PHHH1SuXBkTExMaN27MzZs32bZtGx4eHpQoUQKAhw8fqu08bUQ6N3fv3qVLly7Y2dlhYmKCq6srwcHBeR7/vpNgUQghhBBC5KvQUwnUm7oT/4UHGBIShf/CA1Qd+TM+rdpRpkwZLC0tcXZ2BuDy5ct4eXlha2vL7t272bt3L5UrV6ZNmzbs3r0b0B0pzBmJVBSFvXv38uGHH+Lh4cG+ffvYvXs3xYoVw9XV9YX7fejQIT799FP1/cyZM0lISCAqKuqlp3quXbuWcuXK4e7uTteuXVmyZAk5le3Wrl3L+PHjmTx5MocPH8bR0ZF58+ap5zZp0gRra2vWrVunbsvMzGTNmjV06dIl1+tt2bKF9u3b06pVK44dO8aOHTuoUaOGun/gwIFEREQQEhLCiRMn6NixI76+vsTGxqrHJCUl8csvv9C1a1eaNWtGYmIie/fu1bnWjBkzqFatGseOHaN///7069ePmJiYp34e48eP54cffmD//v1cuXKFTp06MWvWLFatWsXatWsBWLBggXr800akc/PVV19x5swZtm3bRnR0NPPnz6dIkSJP7dP7TMZ9hRBCCCFEvgk9lUC/FUd5stD36aVjMbC0Y9x/ptCubgWysrLw8vIiLS0NjUZDgwYNCAsLw8jICB8fHypWrEhqaiqnTp1i//79jBgxQm3Lx8eHJUuWcPz4cQoVKkS5cuXw8fEhLCyMu3fvvtSoIoCdnZ3W+7i4OKpWrfpCgWdmlqJV+mTR4sXq+klfX18SExPV4HfWrFn07t2b3r17AzBp0iT++usvdXRRX1+fjz/+mFWrVqnH7Nixg3v37tGhQ4dcr//NN9/w8ccfM2HCBHVbpUqVgOzAPDg4mMuXL1OsWDEARowYQWhoKMHBwUyePBmAkJAQXF1dKV++PAAff/wxixcvpn79+lrXatWqFf379wdg1KhRzJw5k127duHu7p7n5zNp0iTq1q0LQO/evRkzZgxxcXGUKVOG+/fvA2gFpk/e55IlS7Czs+PMmTO5roe8fPkylStXplq1agDqQwmROxlZFEIIIYQQ+SIzS2HCpjM6gWLmo/tk3LmKdZ3ObLxVGDf3cty9e1frmJzRwrCwMHx8fNDT06NBgwZ89913pKamqgEGoK5bnDlzphoY5gSLOefnJikpiS5dumBmZoajoyMzZ87Ex8eHoUOHAtrTUJ2dnVm3bh0///wzGo2GgIAAFEVh/PjxlCxZEiMjI4oVK8bgwYPV9p8cUf1oyq8cOHAQe+/sKaMGBgZ07tyZxYsXAxAdHU3NmjW1+li7dm2t9126dCEsLExd67hy5Upat26d57TYqKgomjRpkuu+kydPkpmZiZubG+bm5upr9+7dWlM7lyxZopUgqGvXrvzyyy8kJSVptVexYkX1zxqNBgcHB27evJnrtXM7x97eHlNTU8qUKaN1zK1bt9Q/x8bG4u/vn+uIdG769etHSEgI3t7efPHFF+zfv/+p/XnfyciiEEIIIYTIF5Hxd0hITNHZrmdsjp6JJUnH/+CyuS3zVt1n+ZxvtY7x8fHh888/x9DQkHr16qnbRowYQfXq1TEzM1OPtbGxoWLFiqxcuZIffvgBgAYNGtCpUyfS09PzHFkcNmwY4eHhbNy4EXt7e8aNG8fRo0dzLVVx6NAhunfvjqWlJbNnz8bExIR169Yxc+ZMQkJCKF++PDdu3OD48eNA7iOqySf+RMnKxL+RN130NGgARVEwMjJS+/0s1atXp2zZsoSEhNCvXz82bNigs+7xcSYmJnnuS05ORl9fnyNHjqCvr6+1z9zcHIAzZ85w4MABIiMjGTVqlLo/MzOTkJAQAgMD1W2FChXSakOj0eQ5PTS3czQajU4bgDpNF8DPz49SpUqxcOFCihUrpjUinZuWLVty6dIltm7dyvbt22nSpAkDBgxg+vTpT+3X+0qCRSGEEEIIkS9uJukGigAajR5FPviCu38t4PriAXy/y4WlC+drjQBWqFABa2trddQLsoPFzMzMXEcKGzZsSFRUlLrP1tYWT09P/v77b61pkDnTQi/d+Iely5axcsVKdeQtODhYnY75JDs7O4yMjDAxMcHBwQHIHs1ycHCgadOmFCpUiJIlS1KjRo1cR1SVrEyST+/EplFvTEpXpoi5ET/3rom+noZ27dqxevVqPDw8OHjwIN27d1fPO3DggE5funTpwsqVKylRogR6enpPLcFRsWJFduzYQc+ePXX2Va5cmczMTG7evKkzpTTH4sWLadCgAT/++KPW9uDgYBYvXqwVLL5ut2/fJiYmhoULF6r93bdv3zPPs7Ozo0ePHvTo0YP69eszcuRICRbzINNQhRBCiBegKAqffvoptra2b32h54cPH9KhQwcsLS3RaDQ6GRVfpVdVGPzJbJQajYbffvvtX7cr8kdRC+M895k4e1Osz3xKjdjAqm17adiwIYqi0K5dOwD09PS4c+eOVrDk7e2Noih8++23Ou3NmjULRVEoV66cui0qKoqEhAT1/ePTQoct/pOM9HSmHEkn9FT2MVZWVk9dX/ekjh078ujRI8qUKUNgYCAbNmwgIyMj1xHVR+cjyUpJxrxScwrZOZNo4shDs2J4eXnRoUMHFi9ezJAhQ1iyZAnBwcGcO3eOoKAgTp8+rXPdLl26cPToUb755hs++ugjjIyM8uxjUFAQq1evJigoiOjoaE6ePMnUqVMBcHNzo0uXLnTv3p3169cTHx9PZGQk3377LVu2bCE9PZ3ly5fj7++Pl5eX1qtPnz4cPHgw1/69LjY2NhQuXJiffvqJ8+fPs3PnToYNG/bUc8aNG8fvv//O+fPnOX36NJs3b8bDwyOfevz2kWBRCCGEeAGhoaEsXbqUzZs3v7KC0q8qkHpRy5YtY+/evezfv5+EhASsrKxeSbtPBnSiYLyJDzZqlLbF0coYTR77NYCjlTE1Stu+9r7kTAt9Moi7lZRKvxVH1YDxRTg5ORETE8O8efMwMTGhf//+NGjQgOt3knSOTT7xJyalvNEz+t/02ZyR1w4dOnD48GE8PDz46quv+OKLL6hatSqXLl2iX79+Om25uLhQo0YNTpw4kWcW1Bw+Pj788ssvbNy4EW9vbxo3bkxkZKS6Pzg4mO7duzN8+HDc3d1p164dhw4domTJkmzcuJHbt29rldrI4eHhgYeHh7reMj/o6ekREhLCkSNH8PLy4vPPP+e777576jmGhoaMGTOGihUr0qBBA/T19QkJCcmnHr99ZBqqEEII8QLi4uJwdHSkTp06Bd2Vfy0uLg4PD49XEvCKN0/Og42wsDDKlCnzRpQH0NfTEOTnSb8VR7PX5z22LyeADPLzRF8vr3Dy1chtWqiBlQPoGZCSEIu5ZVEmbDpDjeImnDt3jgYNGjx32yYmJvj5+eHn58eAAQMoV64cSdcv6BxX9KMg3W3/P/Jao0YNdV1exYoV+fLLL7WOyxkJfNzBgwdz7U9AQAABAQFa2z78MLumZW4KFSrEhAkTtLKl5qhQoQKZmZm5ngfZ6xlzXLx4UWf/4w8snJ2dtdYe+vj4aL3Pq++gPdW0adOmWtcF7TWNT15n7NixjB07Ns97ENpkZFEIIYR4TgEBAQwaNIjLly+j0WhwdnZ+ZkHonMLT69evp1GjRpiamlKpUiUiIiKA7ELUPXv2JDExEY1Gg0ajYfz48QAsX76catWqYWFhgYODA5988olWJsGnFZdu3LgxAwcO1Or/rVu3MDQ0ZMeOHfj4+DBjxgz27NmDRqNR13XdvXuX7t27Y2Njg6mpKS1bttSqrwawbt06ypcvj5GREc7OzsyYMUPd5+Pjw6VLl/j888/V+3ncb7/9hqurK8bGxrRo0YIrV66o++Li4mjbti329vaYm5tTvXp1/vrrr5f8tsTjDzYcHBwwMHgzxgh8vRyZ37UKDlbaU1IdrIyZ37UKvl6Or70PuU0L1TMyxdyrMfd2LeHRpRNcOh9DB//u6Onp6fwc52Xp0qUsXryYU6dOceHCBVasWIGJiQmt6lR8Y0ZUhXgREiwKIYQQz2n27Nl8/fXXlChRgoSEBA4dOvTcBaH/85//MGLECKKionBzc8Pf35+MjAzq1KnDrFmzsLS0JCEhgYSEBLVeXHp6OhMnTuT48eP89ttvXLx4Uesp+9OKS/fp04dVq1aRmpqqHr9ixQqKFy9O48aNWb9+PYGBgdSuXZuEhATWr18PZAfEhw8fZuPGjURERKAoCq1atSI9PR2AI0eO0KlTJz7++GNOnjzJ+PHj+eqrr9Tsi+vXr6dEiRJ8/fXX6v3kePjwId988w0///wz4eHh3Lt3j48//ljdn5ycTKtWrdixYwfHjh3D19cXPz+/PFPgi7zl9mAjt+nB3t7e6sMJyF4DumjRItq3b4+pqSmurq5s3LhR3R8WFoZGo2HHjh1Uq1YNU1NT6tSpoxZav3jxInp6ehw+fFjrOrNmzaJUqVLq3wtfL0f2jWrM6sBazP7Ym9WBtdg3qnG+BIqQd6Idm8Z9MCxejpvrJnBzzVjKeFXBw8MDY+O811o+ztramoULF1K3bl0qVqzIX3/9xaZNmyhqV4QgP08AnYAxP0dUhXhhyhssMTFRAZTExMSC7ooQQoj3WEZmlrL//D/Kb8euKkP+M0kpVapUnsfeunVLAZSTJ08qiqIo8fHxCqAsWrRIPeb06dMKoERHRyuKoijBwcGKlZXVM/tx6NAhBVCSkpIURVEUPz8/pWfPnrke++jRI8XGxkZZs2aNuq1ixYrK+PHj1fdDhgxRGjZsqL4/d+6cAijh4eHqtn/++UcxMTFR1q5dqyiKonzyySdKs2bNtK41cuRIxdPTU31fqlQpZebMmVrHBAcHK4By4MABdVt0dLQCKAcPHszznsuXL6/MnTs3z7YBZcOGDXme/766d++e8vXXXyslSpRQEhISlJs3b+b6vVSqVEkJCgpS3wNKiRIllFWrVimxsbHK4MGDFXNzc+X27duKoijKrl27FECpWbOmEhYWppw+fVqpX7++UqdOHbWNZs2aKf3799e6TsWKFZVx48a9tvt9UfvP/6OUGrX5ma8dJy4pVlZWWn9//41tJ68rtSb/pXWNWpP/UradvP5K2n/XSWyQ/2RkUQghhHiKJ4to/xxxiYTEFDX5xfMWhH680LSjY/boybOKUx85cgQ/Pz9KliyJhYWFWhsup+2nFZc2NjamW7duLFmyBICjR49y6tSpXNf/5IiOjsbAwECrCHjhwoVxd3cnOjpaPebx4ucAdevWJTY29qlrmSC74Hj16tXV9+XKlcPa2lptOzk5mREjRuDh4YG1tTXm5uZER0fLyOILyMxSiIi7TVh8MrdT9dDX18fBwQE7O7vnbiMgIAB/f39cXFyYPHkyycnJWglQAL755hsaNmyIp6cno0ePZv/+/aSkZI/W9enTh9WrV6uj2kePHuXkyZO5lmooKHkl2kn7O44HZ3aTcTcBy+QrzB47GIC2bdu+kusW9IiqEC9KgkUhhBAiD3llS8zMUtRsiX5+fty5c4eFCxdy8OBBNcnEkwWhnyw0DTy1OPWDBw9o0aIFlpaWrFy5kkOHDrFhwwattnOKS3/++edcv36dJk2aqFNYIfuX9u3bt3P16lWCg4Np3LgxpUqV+hefyOs1YsQINmzYwOTJk9m7dy9RUVFUqFAhz+LaQtuzHmw8r8cfbJiZmWFpaanzYONpDz/atWuHvr6++vO6dOlSGjVqpD5IeRPkJNoB3Wmh9yPXcz14EPHLx/Dw4QP27t37SpMD6etpqF22MG29i1O7bGGZeireaBIsCiGEELnILVvik8auiSAmJoaxY8fSpEkTPDw8uHv37gtfy9DQUGdU7uzZs9y+fZspU6ZQv359ypUrl+tIZE5x6RUrVjBr1ix++ukndV+FChWoVq0aCxcuZNWqVfTq1eup/fDw8CAjI0Mrq2JO0WtPT0/1mPDwcK3zwsPDcXNzQ19fP8/7AcjIyNBayxYTE8O9e/fUGmfh4eEEBATQvn17KlSogIODQ64ZFYWu53mwoaenp5NtMmct6uMef7AB2Q83nnyw8bSHH4aGhnTv3p3g4GDS0tKe62evIOSWaMfQvixVhixg69ELJCXeZfv27VSoUKEAeylEwXoz0mIJIYQQb5jcsiU+TgFupRXCysaWn376CUdHRy5fvszo0aNf+FrOzs4kJyezY8cOKlWqhKmpKSVLlsTQ0JC5c+fSt29fTp06xcSJE7XOGzduHFWrVqV8+fKkpqbmWly6T58+DBw4EDMzs1xroz3O1dWVtm3bEhgYyIIFC7CwsGD06NEUL15cnYY3fPhwqlevzsSJE+ncuTMRERH88MMPzJs3T+t+9uzZw8cff4yRkZE6KlOoUCEGDRrEnDlzMDAwYODAgdSqVYsaNWqo11+/fj1+fn5oNBq++uqrp46+imzP82BjwqYzFLGz00o4dP/+feLj419Ln/r06YOXlxfz5s0jIyMjzzINBc3Xy5Fmng5Ext/hZlIKRS2yM5LKaJ8Q2WRkUQghhMhFXtkSH6fR6DFs8o8vVBA6N3Xq1KFv37507twZOzs7pk2bhp2dHUuXLuWXX37B09OTKVOmMH36dK3znqe4tL+/PwYGBvj7+z9XRsfg4GCqVq1KmzZtqF27NoqisHXrVnUkqUqVKqxdu5aQkBC8vLwYN24cX3/9tdZayK+//pqLFy9StmxZrbVypqamjBo1ik8++YS6detibm7OmjVr1P3ff/89NjY21KlTBz8/P1q0aEGVKlVe+PN83zzPg42ExBTKVa7N8uXL2bt3LydPnqRHjx7qaPCr5uHhQa1atRg1ahT+/v6YmJi8luu8CjItVIi8aZQn5yO8Qe7fv4+VlRWJiYlYWloWdHeEEEK8RyLibuO/8MAzj1sdWIvaZQvnQ49eTk7QdujQIQm83lG/R11jSEiUzvb7h37n/uHfKdEvO8nRt23K8vsP49m2bRtWVlZMnDiRmTNn0q5dO7V8hkajYcOGDbRr105tx9ramlmzZhEQEEBYWBiNGjXi7t27WFtbA9mF1itXrkx8fLzWusQlS5bQu3dvIiMjtRIbCfGyJDbIfzINVQghhMhFTrbEG4kpuU7v05BdRPxNLaKdnp7O7du3GTt2LLVq1ZJA8R1W1CL3EWPL6m2xrP6/LJ7OjnY6I889evTQep/bGMK9e/fUP/v4+Ogc4+3tnet5165do0KFChIoCvEWk2moQgghRC6eli3xbSiiHR4ejqOjI4cOHeK///1vQXdHvEZ5lYHIoQEc8/HBRnJyMqdOneKHH35g0KBB+XJNIcTrIcGiEEIIkYfcsiVC9oji/K5V3ujaaDkjQDExMZLN8R33pj3YGDhwIFWrVsXHx+e1Z0F1dnZm1qxZr/UaQrzPZM2iEEII8QyZWYpkSxRvvNBTCUzYdEYr2Y2jlTFBfp5v9IONf+PWrVuYmZlhampa0F0R+UBig/wnwaIQQgghxDtCHmyId5nEBvlPpqEKIYQQQrwj3rUyED4+PgwcOJCBAwdiZWVFkSJF+Oqrr9SEOk9OQ7137x6fffYZ9vb2GBsb4+XlxebNmwG4dOkSfn5+2NjYYGZmRvny5dm6detrv4ewsDA0Go1WoqDffvsNFxcX9PX1GTp06GvvgxAvS7KhCiGEEEKIN9ayZcvUEhyHDx/m008/pWTJkgQGBmodl5WVRcuWLUlKSmLFihWULVuWM2fOqLUkBwwYQFpaGnv27MHMzIwzZ85gbm7+yvvr4+ODt7e3GsTWqVOHhIQErKys1GM+++wzevbsyeDBg7GwsHjlfRDiVZFgUQghhBBCvLGcnJyYOXMmGo0Gd3d3Tp48ycyZM3WCxb/++ovIyEiio6Nxc3MDoEyZMur+y5cv06FDBzXh0+P7XidDQ0McHBzU98nJydy8eZMWLVpQrFixfOmDEC9LpqEKIYQQQog3RmaWQkTcbX6Pusb9R+nUrFkTjeZ/02lr165NbGwsmZmZWudFRUVRokQJNVB80uDBg5k0aRJ169YlKCiIEydOvPK+BwQEsHv3bmbPno1Go0Gj0bB06VJ1GmpYWJg6kti4cWM0Gg1hYWGMHz8eb29vrbZmzZqFs7PzK++jEC9CgkUhhBBCiOd048YNhgwZgouLC8bGxtjb21O3bl3mz5/Pw4cPC7p7b73QUwnUm7oT/4UHGBISxZmE+2w5mUDoqYRnnmtiYvLU/X369OHChQt069aNkydPUq1aNebOnfuqug7A7NmzqV27NoGBgSQkJJCQkICTk5O6v06dOsTExACwbt06EhISqFOnzivtgxCvkgSLQgghhBDP4cKFC1SuXJk///yTyZMnc+zYMSIiIvjiiy/YvHkzf/31V0F38a0WeiqBfiuOapX+ALh3MZp+K46qAeOBAwdwdXVV1yLmqFixIlevXuXcuXN5XsPJyYm+ffuyfv16hg8fzsKFC19J33NGQ8Pik0nJ1GBiYoKDgwMODg5a/TQ0NKRo0aIA2Nra4uDggKGh4SvpgxCvgwSLQgghhBDPoX///hgYGHD48GE6deqEh4cHZcqUoW3btmzZsgU/Pz8Avv/+eypUqICZmRlOTk7079+f5ORktZ2lS5dibW3N5s2bcXd3x9TUlI8++oiHDx+ybNkynJ2dsbGxYfDgwVpTLVNTUxkxYgTFixfHzMyMmjVrEhYWpu4vqGyfr0JmlsKETWfIrZ5bRtIt7uxYyOglf7By5Srmzp3LkCFDdI5r2LAhDRo0oEOHDmzfvp34+Hi2bdtGaGgoAEOHDuWPP/4gPj6eo0ePsmvXLjw8PP5133MbDV17+OpzjYYK8aaTYFEIIYR4h/j4+Dw1Fb9Go+G333577vZyS/v/Oly8eBGNRkNUVFSexwQEBKDRaOjbt6/OvgEDBqDRaAgICHjhYx+/x4iICPT19WndurXWObdv3+bPP/9kwIABmJmZ5dq/nHV1enp6zJkzh9OnT7Ns2TJ27tzJF198oXXsw4cPmTNnDiEhIYSGhhIWFkb79u3ZunUrW7duZfny5SxYsIBff/1VPWfgwIFEREQQEhLCiRMn6NixI76+vsTGxqr3lZqayp49ezh58iRTp059Ldk+X4fI+Ds6I4o5zMo3JisjjRM/DqDfgAEMGTKETz/9NNdj161bR/Xq1fH398fT05MvvvhCDbgzMzMZMGAAHh4e+Pr64ubmxrx58/5Vv/MaDX2QmqE1Gvosenp6PFn6PD09/V/1TYhXQbKhCiGEEO+RhIQEbGxsCrobL83JyYmQkBBmzpyprlFLSUlh1apVlCxZ8qWPzbF48WIGDRrE4sWLuX79upqt8vz58yiKgru7u9bxRYoUISUlO1AYMGAAU6dO1QrWnZ2dmTRpEn379tUKTNLT05k/fz5ly5YF4KOPPmL58uX8/fffmJub4+npSaNGjdi1axedO3fm8uXLBAcHc/nyZbVPI0aMIDQ0lODgYCZPnlxg2T5fhZtJuQeKABo9fWyb9qNwiwHM/tibtt7F1X0XL17UOtbW1pYlS5bk2s6rXp+Y12ioRr8QKFkATNh0hkk1nz02Y2dnx40bN1AURX3o8LQHJ0LkFxlZFEIIId4jDg4OGBkZFXQ3XlqVKlVwcnJi/fr16rb169dTsmRJKleu/NLHQnZJgzVr1tCvXz9at27N0qVL1bVoe87dBCArSzs0iIyMJCoqivLly5Oamgpkl3Bo0qQJxYsXx8LCgm7dunH79m2tBDimpqZqoAhgb2+Ps7Oz1kigvb09N29mX/fkyZNkZmbi5uaGubm5+tq9ezdxcXFA/mT7fF2KWhi/0uPyQ16joQZWRUlNiCE98W+uJvzNmev3ntmWj48Pt27dYtq0acTFxfHjjz+ybdu219BrIV6MBItCCCHEOyYrK4svvvhCTaAxfvx4dd/j01Bzpn6GhIRQp04djI2N8fLyYvfu3TptHjlyhGrVqmFqaqqV0TFHziiZoaEh7u7uLF++XGu/RqNh/vz5tGzZEhMTE8qUKaM1xTLHhQsXaNSoEaamplSqVImIiAit/bdv3+bBgwd0794dJycnBg8ezMKFC+nZsycAcXFxVKtWjZUrVxIaGkqhQoVYsGCBev6SJUuoWbMmf/31FytWrKBRo0bq6NRvv/1GuXLlMDY25sKFC4wd+xWGxqY0rFmFqb/uBTR8vnCr1tTCMmXK4OLioo5cXrx4kTZt2lCxYkXWrVvHkSNH+PHHHwFIS0tTzytUqJDO55Pbtqys7BGq5ORk9PX1OXLkCFFRUeorOjqa2bNnA/mT7fN1qVHaFkcrYzR57NcAjlbG1Chtm5/deqq8RkMta3wIGj2uL+rP1bldiL1w8ZlteXh4MG/ePH788UcqVapEZGQkI0aMeMU9FuLFSbAohHirvOh6q+f1POulhMhNQf7s5FabDWDZsmWYmZlx8OBBXF1dmTBhAtu3b1f3L168WOv4kSNHMnz4cI4dO0bt2rXx8/Pj9u3bWsf85z//YcaMGRw+fBgDAwN69eql7tuwYQNDhgxh+PDhnDp1is8++4yePXuya9curTa++uorOnTowPHjx+nSpQsff/wx0dHROtcZMWIEUVFRuLq68WHHzqw/fImIuNvcv3+fiIgIevXqhb6+PrNnz2bnzp3s3buXrl27AtmB8sSJE/nggw+oUaMG+vr6hIeHc+nSJS5dusS+ffv4+eefcXBwwM/Pjz59+jB69GgAli9fTteuXRkwYAAZ+sZgaIKN70CsfQIwMC+MsbM318N/47Ml+/Nci3bkyBGysrKYMWMGtWrVws3NjevXrz/jm3y2ypUrk5mZyc2bN3FxcdF6PV7w/XVl+3zd9PU0BPl5AmgFjA6fTKFw0+z1iUF+nujr5RVO5r+8RjkL2RbHsdsMSg5fR6lRm/Hv0h1FUbC2tgbA2toaRVHw8fHROq9v375cvnyZ5ORkli1bxpdffqkzzVaI/CbBohBCkP0LVkJCAl5eXgXdFVGA8kpu8m85OzurBbpNTU2pUKECixYtemXt5xQwj4i7jUJ2CYGgoCBcXV0pW7Ys1tbW7NixQz3+k08+0Trf29ubDh064OHhwfz587GystIJKL/55hsaNmyIp6cno0ePZv/+/epavenTpxMQEED//v1xc3Nj2LBhfPjhh0yfPl2rjY4dO9KnTx/c3NyYOHFiriNfI0aMoHXr1lxIsyC2REtuXLvCoAXb8F94gK17Iils78jYsWPx8/Pj5MmT1KhRg8zMTHX6pqurKy1btsTCwgJbW1vmz59PVlYWCxYsIDg4GGdnZ8qWLYuXlxdWVlZ06dJFTXRz5MgR/P39uXz5MrfMSmNe3oeUC0cwdamBsZMXts37oyiZJCwbyqBvfuTU6TPExMSwYsUKzp49i76+Pi4uLqSnpzN37lwuXLjA8uXL+e9///uvv2M3Nze6dOlC9+7dWb9+PfHx8URGRvLtt9+yZcsW4PVl+8wvvl6OzO9aBQcr7SDMwcqY+V2r4OvlWEA9y93bOBoqxIuSBDdCCAHo6+trPZ0X76e8kpu8Cl9//TWBgYE8fPiQX375hcDAQIoXL07Lli1fus3zN5OIvZmM/8ID6rY7l+/SsIb2ejxjY2N17RvoFi93dnZW/2xgYEC1atV0RvwqVqyo/tnRMfuX9ps3b1KyZEmio6N1slPWrVtXnR6Zo3bt2jrvnxyRrVixopphMoPsrKOZDxMpVNiJ9If3uXknGRNTM1Cy2LBhg3pefHw8kD1N1c/Pj127dpGSkqKOqC5btgxDQ0McHR0pV64cd+7c0elXZmYmxYoVIysrC0U5mb1To4+5d0uMS3hSyMYRx4A53I9Yy8Vti6i8bhrGRkZ4enoyYsQI+vfvj6mpKd9//z1Tp05lzJgxNGjQgG+//Zbu3bvzbwUHBzNp0iSGDx/OtWvXKFKkCLVq1aJNmzZq/wcMGMDVq1extLTE19eXmTNn/uvr5idfL0eaeToQGX+Hm0kpFLXIDrbepBHFHDmjof1WHEUDWolucnr7po2GCvGiZGRRCJHvQkNDqVevHtbW1hQuXJg2bdqoCRrS0tIYOHAgjo6OGBsbU6pUKb799ts82woKCsLR0VFN5DBv3jxcXV0xNjbG3t6ejz76SD02KyuLadOm4eLigpGRESVLluSbb74Bcp9KeOrUKVq2bIm5uTn29vZ069aNf/75R93v4+PD4MGD81wbBnDv3j0+++wz7O3t1fVgmzdvVvfv27eP+vXrY2Jioq6/evDgwUt/tuLl5ZbcJMfdu3fp0qULdnZ2mJiY4OrqSnBwcK7tZGZm0qtXL8qVK8fly5fV7RYWFjg4OFCmTBlGjRqFra2t1tTQe/fu0adPH+zs7LC0tKRx48YcP35cq+0pU6Zgb2+PhYUFLT70Z9PRy6RnZqn7laxMHt66xuZfV2JpbcsXX3yhpuPPWfsG/5uG+vHHHwPwww8/qCOfeXl8PV3OcY+3+aro6RuoGSbV3vx/ZkklKxN9S3vK9Z/PkaPHsLOzo2jRopw9e5ayZcuSkZHB9u3bsbS0pH79+jRs2FANKNPT00lPT1cLoj8up7TCpEmTiIqKYtba7RTtNBHLOp3RGBjw96rR3D+yCQADc1tsm/WleN/F/BoZT1JSEgcPHmTEiBGYmpoC8Pnnn3P9+nUePnxIaGgo3bp105qGGBAQoFOKZPz48TqB89KlS7Wm3RcqVIgJEyYQHx9PWloa169fZ/369Wr207lz53L+/HlSUlK4efMmP//8M4ULF365L6IA6etpqF22MG29i1O7bOE3Oth620ZDhXhREiwKIfLdgwcPGDZsGIcPH2bHjh3o6enRvn17srKymDNnDhs3bmTt2rXExMSwcuVKrVGPHIqiMGjQIH7++Wf27t1LxYoVOXz4MIMHD+brr78mJiaG0NBQGjRooJ4zZswYpkyZwldffcWZM2dYtWoV9vb2ufbx3r17NG7cmMqVK3P48GFCQ0P5+++/6dSpk9Zxj68NmzZtGl9//bUaAGRlZdGyZUvCw8NZsWIFZ86cYcqUKejr6wPZiTh8fX3p0KEDJ06cYM2aNezbt4+BAwe+ok86f7wr6z3Xrl1LuXLlcHd3p2vXrixZskQNtHJ+ZrZt20Z0dDTz58+nSJEiOm2kpqbSsWNHoqKi2Lt3b67lGbKysli3bh13797F0NBQ3d6xY0du3rzJtm3bOHLkCFWqVKFJkybqCNjatWsZP348kydP5mDkIU7f1SPp2Battu8f2kDmg7uYlKpMmZ7TuX37ttbo25Nypkc2a9aMhIQEEhISyMjI4MiRIy80fdHDw4Pw8HCtbeHh4Xh6emptO3DggM77J69z6lpinvX29IwtUNIectegMImGdsTGxnLu3Dnc3d0xNDQkOTmZ1NRUraA6Z0R1/fr1nDlzBk9PTyIjI7XaXblyJQBdu3bFy8uL6pW9MSldGZv63bCo0gY9UyuSj/+h0583KTOnKDi+Xo7sG9WY1YG1mP2xN6sDa7FvVGMJFMW7QXmDJSYmKoCSmJhY0F0RQvxLGZlZyv7z/yi/Hbuq7D//j5KRmaXuu3XrlgIoJ0+eVAYNGqQ0btxYycrKyrUdQPnll1+UTz75RPHw8FCuXr2q7lu3bp1iaWmp3L9/X+e8+/fvK0ZGRsrChQtzbTc+Pl4BlGPHjimKoigTJ05UmjdvrnXMlStXFECJiYlRFEVRGjZsqNSrV0/rmOrVqyujRo1SFEVR/vjjD0VPT089/km9e/dWPv30U61te/fuVfT09JRHjx7les7rtn//fkVPT09p1arVc5/z5Gf3pM6dOystWrTQ2rZt2zYFUIKCgrS2BwUFKU5OTi/a7Zfy5M9k7Tp1lFmzZimKoijp6elKkSJFlF27dimKoih+fn5Kz549c20n5/737t2rNGnSRKlXr55y7949rWNKlSqlGBoaKmZmZoqBgYECKLa2tkpsbKyiKNnfu6WlpZKSkqJ1XtmyZZUFCxYoiqIotWvXVvr3768oiqLsP/+PUmrUZsXQ0V0pVLS0UmrUZqXUqM2Kvrmtom/loFhU/UApNWqzsufsDaVEiRKKg4OD0qNHD0VRsv8OtWnTRqvvNjY2yvr165Xo6Gjl008/VczNzZVbt24piqIou3btUgDl7t27ar+OHTumAEp8fLyiKIqyYcMGpVChQsq8efOUc+fOKTNmzFD09fXVzy/nukWKFFEWL16sxMTEKOPGjVP09PSU06dPa/Xl+5A/1PtxGhKiAIq9/2Sl1KjNiolLDQWNnmJRpbXyfcgfyrlz55TffvtNGTBggKIoiuLr66vo6ekpI0eOVD788EOlRo0aipubm9bP56VLlxRDQ0PFxcVFadeunbJy5UrFyMhI6x4HDx6slAv4Vin+2SKlcKvPFUAxdq6s9st51Gal1uS/tP4dE0K8fhIb5D9ZsyiEeO1CTyUwYdMZdbQg/c41Ug+GUOh2HA/u31Wnsl2+fJmAgACaNWuGu7s7vr6+tGnThubNm2u19/nnn2NkZMSBAwe0RneaNWtGqVKlKFOmDL6+vvj6+tK+fXtMTU2Jjo4mNTWVJk2aPFefjx8/zq5du7RqnuWIi4vDzc0N0F7HBdlruXJGMqKioihRooR6bG7XOHHihDqqAdkjpllZWcTHxxdIYorXsWavUaNGjBgxgoyMDAwMsv/b2bVrF05OToSFhWkdu2vXLho1avSvr/ksOj+Tt6+ScOAggydnj7QZGBjQuXNnFi9ejI+PD/369aNDhw4cPXqU5s2b065dO+rUqaPVpr+/PyVKlGDnzp06awIhO+NoQEAACQkJjBw5kv79++Pi4gJk/ywkJyfrTBl89OiROkU7Ojqavn37Av9L2W9UvBwpl7OnYGelPiAz+Q6FipZWz7/zKINq1arpjOg9qU2bNkyZMoWoqChcXFzYuHFjriOneWnXrh2zZ89m+vTpDBkyhNKlSxMcHKyT7XHChAmEhITQv39/HB0dWb16tc7oo62pIZCe63WKdhhHasI57u1ZztjeH/IVULZsWTp37gzAtm3bWL16NV9++SUJCQlUqVKF6dOn88EHH6htlCxZknXr1vH555+zbds2bt++zfz587Wyu2ZlZZG4YwEJ16+hZ2SKWflG2DQJBGQtmhDi/SLTUIUQr1VOoorHp5XdXDeRB/fvodT7lOkrNnPw4EEge71ilSpViI+PZ+LEiTx69IhOnTpprTuE7KDw2rVr/PGH9rQwCwsLjh49yurVq3F0dGTcuHFUqlSJe/fu5frL+9MkJyfj5+enVc8sKiqK2NhYramtT6uL9qxrJicn89lnn2m1f/z4cWJjY7WKdeeX17Vmr1GjRiQnJ3P48GH1mLCwMEaPHs3BgwfVjJopKSkcPHhQDRYvX75M27ZtMTc3x9LSkk6dOvH333+rbeSUjViyZAklS5bE3Nyc/v37k5mZybRp03BwcKBo0aLqutQcv+4/S8cuAURO/JDLMztyY/WXJB5Yi5KViX8jbzR6emg0GubNm8fKlSuxsrJi2bJlnD59Wl2L1qRJE50aaK1ateLEiRM6dQFzFClSBBcXF+rXr88vv/zC4MGDOXPmjPrZOzo66vy8xcTEMHLkSJ22njb90bbpZ9j+f6mBnONq1qypfp8NGzbU+fmyt7fn4MGDpKamcvr0aa2A3cfHR2u9HWRnT1UURWuKeL9+/YiLiyMtLY2YmBi6deum07dixYrx559/kpKSQnx8vNa0bmdnZxRFoWvrhmqGST1jc0qN2oxxyf89lDF2dMP70++4fz+J5ORkjh8/zpdffqnu9/f3Jz4+npSUFPbv34+fnx+KomiVGGnTpg2xsbGkpKSwZ88eevbsqXWPc+fO5frleLZFXaLG2HUUaTMcfRNLQNaiCSHeL68lWLx48SK9e/emdOnSmJiYULZsWYKCgrSK0Qoh3n2ZWYqaqELd9ug+GXeuYlWnMybO3gSfTuef23e0zrO0tKRz584sXLiQNWvWsG7dOq3MhR988AGrVq2iT58+hISEaJ1rYGBA06ZNmTZtGidOnODixYvs3LkTV1dXTExMtMoHPE2VKlU4ffo0zs7OOjXNzMzMnquNihUrcvXqVc6dO5fnNc6cOaPTvouLi9Zatvzyutbsubm5UaxYMbXmXlJSEkePHqVjx444OzurwdX+/ftJTU2lUaNGZGVl0bZtW+7cucPu3bvZvn07Fy5cUEeQcsTFxbFt2zZCQ0NZvXo1ixcvpnXr1ly9epXdu3czdepUxo4dqz6QyMxS6NOjC5kP71G04wQce8zCsGgZHpzahVW9rhTrOYei1dtgampKkyZNKF68OH369GH37t0sWrSIHj16sGLFCmbNmsVPP/2k1Zd+/foxZcoUPvjgg1yL2j/OycmJzp07M2bMGCD7Z+HGjRsYGBjo/CzkfM4eHh7qfeSk7E+7flZtU8/IDH1zW1Kvx6gp+6s4WXLkyJFnfvc5CV7eBHnV23v8fX6N6slaNCHE++61TEM9e/asWtPIxcWFU6dOERgYyIMHD3RqLgkh3l2R8Xd0ElXoGZujZ2JJ8vE/0De35cKlW/QL+VXd//333+Po6EjlypXR09Pjl19+wcHBQWtUA6B9+/YsX76cbt26YWBgwEcffcTmzZu5cOECDRo0wMbGhq1bt5KVlYW7uzvGxsaMGjWKL774AkNDQ+rWrcutW7c4ffo0vXv31un7gAEDWLhwIf7+/mq20/PnzxMSEsKiRYvUJDVP07BhQxo0aECHDh34/vvvcXFx4ezZs2g0Gnx9fRk1ahS1atVi4MCB9OnTBzMzM86cOcP27dv54YcfXu5DfwGZWYpWevpFixerhc19fX1JTExk9+7d+Pj4cPnyZSpXrky1atUAck06lJycTOvWrUlNTWXXrl1YWVmp+xo1akRYWBhjxoxh7969uLm5YWdnR4MGDQgLC1P3ly5dmlKlSrF9+3ZOnjxJfHw8Tk5OAPz888+UL1+eQ4cOUb16dSB7uuCSJUuwsLDA09OTRo0aERMTw9atW9HT08Pd3Z2pU6eya9cuatasycJftnL/8lmcBq1EY5A9KmzsVJ6kw7+hZ2RCITtnHhQyJ0vJTogyadIkdu7cSenSpfn111/p2bMnqampbN68OddpwoMGDSIzM5M2bdqwbds26tWrl+fnP2TIELy8vDh8+DBNmzaldu3atGvXjmnTpqmF3Lds2UL79u2pVq0aQ4YMISAggGrVqlG3bl3KXN7CwX8uY2D9v5IvFlU/4P6BXylkU4xeAc0ZNHCATtbN3Bw9epRr165hZGT0QtNPX5ecDJOPTxWG7FG9ID/PfA3WcjJzCiHE++i1BIs5a4VylClThpiYGObPny/BohDvkZx1VY/TaPQo8sEX3P1rAdcXD6CQbXEGT/6OsYEdgeyppNOmTSM2NhZ9fX2qV6+u/uL/pI8++oisrCy6deuGnp4eRYsWZf369YwfP56UlBRcXV1ZvXo15cuXB7JHxwwMDBg3bhzXr1/H0dFRXQP2pGLFihEeHs6oUaNo3rw5qamplCpVCl9f31z7kpd169YxYsQI/P39efDgAS4uLkyZMgXIHnncvXs3//nPf6hfvz6Komitv3qd8nvNno+PD0OHDiU9PZ2wsDB1LVvDhg1ZsGABgBo0Qvb6PCcnJzVQBPD09MTa2pro6Gg1WHR2dsbCwkI9xt7eHn19fa3vyN7eXl1HeiwqCiU9hStz/NX9Skb2rJfM5P+NXts5lsDCwoIOHTowbdo0WrRoweHDh6lYsSImJibUr19fZ1Q7x9ChQ8nKyqJVq1aEhobqfE6P30/z5s0ZN24cW7duZevWrfznP/+hZ8+e3Lp1CwcHBxo0aKBm7O3cuTNxcXF88cUXpKSk0KFDBzp06cnW0P9Nx7as0R7j9Pvc3z6HL3b9SK9evWjfvj2JiYm59iFnZPezzz6jbNmypKamqqPJr9qLtvs21dsTQoh3lUZ5Xf8rPGHs2LGEhoZqrVl5UmpqKqmpqer7+/fv4+TkRGJiIpaWlvnRTSHEKxQRd1urWHheVgfWeq4n9xqNhg0bNtCuXbtX0Lv3V8460sf/8b+7awn3I9eDRg89PU12gWlFwcjIiISEBKysrLh16xZbt25l+/btrFu3jgEDBjB9+nQuXrxI6dKl+fTTT1mxYgWbNm2icePGWteMi4vDxcWF8PBwhgwZwsiRI+nUqRPXrl2jbNmyavC+ZMkSunTpwpw5c5g5c6ZaaD2HjY0Ns2fPpnv37owfP57ffvtNq2RHTv26x2vT+fj44O3tzaxZs+j/RRA/zf8R+090a3fqGZmhb2rFvX0rKXrnJOfOnFT3zZo1i1mzZnHx4sV/8cm/Hk+OEEtAJYR4V92/fx8rKyuJDfJRviS4OX/+PHPnzuWzzz576nHffvstVlZW6uvxJ8pCiLdPzrqqvH5tzVlXVaO07XO1l5CQQMuWLV9Z/95Hua0jVbIyST69E5tGvSnWcw5eA/7LkaPHOH78OMWKFWP16tUA2NnZvfSavbJly+Lk5MTGjRuJioqiYcOGABQvXpzixYszY8YM0tLS1JFFDw8Prly5wpUrV9Q2zpw5w71793SyZ76ID5rUJfPhXTR6+hSyKab10je1QgOYGxlgavjsacZvirepgLkQQoi3ywsFi6NHj0aj0Tz1dfbsWa1zrl27hq+vLx07diQwMPCp7Y8ZM4bExET19fgvCUKIt8+rTlTh4OCAkZHRq+vgeyi3daSPzkeSlZKMeaXmFLJzJtHEkYdmxfDy8qJDhw4sXryYcePG8fvvv3P+/HlOnz791DV7kyZNok2bNuzbt09rX6NGjZg3bx4uLi7q1ErInoo6d+5cNREOQNOmTalQoQJdunTh6NGjREZG0r17dxo2bKium3wZLZo3o7x3NW6un0RK/FEyEv8m5Wo0d/f8TFpCLAA+7nYv3b4QQgjxLnmhYHH48OFER0c/9VWmTBn1+OvXr9OoUSPq1Kmj8wQ6N0ZGRlhaWmq9hBBvt5xEFQ5W/0v1/+DsPv5eOpDrsz6iS0MvmjZtyoMHDwBYsmQJ5cuXx8jICEdHRwYOHKiep9FotKYXXrlyhU6dOmFtbY2trS1t27bVmiYYEBBAu3btmD59Oo6OjhQuXJgBAwaQnv6/Gm6pqamMGjUKJycnjIyMcHFxYfHixer+U6dO0bJlS8zNzbG3t6dbt278888/r+GTyh+5rSNNPvEnJqW80TMy0zmuQ4cOHD58GAMDA8aMGUPFihVp0KAB+vr6T12zN2HCBFq1asX+/fvV7Y0aNSIpKUmn9l7Dhg1JSkrSKteg0Wj4/fffsbGxoUGDBjRt2pQyZcqwZs2af3P7aDQaIsL+omXTRtwJnc21nz7jn43TyEy8iYODPfO7VsGlqMWzGxJCCCHeA69tzeK1a9do1KgRVatWZcWKFc+VOfBJMi9ZiHdHzrqqsxcuEdiqJlOnTqXDhx+SlJTE3r176d69O8uXL2fYsGFMmTKFli1bkpiYSHh4OEOHDgW01yymp6dTqVIlateuzdChQzEwMGDSpEkcOXKEEydOYGhoSEBAABs2bOCTTz5hyJAhnD9/ns6dOzNr1ix1pkPnzp2JiIhg9uzZVKpUifj4eP755x86d+7MvXv3cHNzo0+fPnTv3p1Hjx4xatQoMjIy2LlzZwF+mi/vVa8jfZvJWj8h3my5rUsW7zeJDfLfawkWr127ho+PD6VKlWLZsmVagaKDg8NTztQmPxBCvHuOHj1K1apVuXjxIqVKldLaV7x4cXr27MmkSZNyPffxYHHFihVMmjSJ6OhoNJrsX/DT0tKwtrbmt99+o3nz5gQEBBAWFkZcXJz671CnTp3Q09MjJCSEc+fO4e7uzvbt22natKnO9SZNmsTevXv544//ZZu8evUqTk5OxMTE4Obm9qo+lnyTmaVQb+pObiSmkNs//hqyyxPsG9VYAichRIFKTk4mNTWVwoXf7QdX4vlJbJD/XkvpjO3bt3P+/HnOnz9PiRIltPblU/JVIcQb5PERnMLmJWjcpAkVKlSgRYsWNG/enI8++oj09HSuX79OkyZNnqvN48ePc/78ea2yCQApKSnExcWp78uXL6/1wMrR0ZGTJ7OzXEZFRaGvr68mW8ntGrt27cLc3FxnX1xc3DODxZwsoceOHcPb2/u57ut1y1lH2m/F0eyMp4/ty++C50II8TTm5ua5/vsrhMg/ryUbakBAAIqi5PoSQrxfQk8lUG/qTvwXHmBISBRdlxziQePRBP24HE9PT+bOnUuxYsXUhCeNGzdWE2Y9Xq/1ScnJyVStWpWoqCit17lz5/jkk0/U4woVKqR1nkajISsrC0CnFmBu1/Dz89O5RmxsLA0aNHjmvTs5OZGQkICXl9czj81Pua0jhewRxfldq+RrwXMhxNvBx8eHQYMGMXToUGxsbLC3t2fhwoU8ePCAnj17YmFhgYuLC9u2bVPP2b17NzVq1FDXoI8ePZqMjAwAfvrpJ4oVK6b+e5yjbdu29OrVC8iehvrkg7ZFixbh4eGBsbEx5cqVY968eeq+tLQ0Bg4ciKOjI8bGxpQqVYpvv9UtkyOEeH6vZWRRCCEg93p+AH/fT+WH0wbM79qXcePGYWlpibu7Ow8fPuTDDz9k9OjRAE/NfFqlShXWrFlD0aJFX3oqSoUKFcjKymL37t25TkOtUqUK69atw9nZGQODF//nUl9f/4Wm3ucnKXguhHhRy5Yt44svviAyMpI1a9bQr18/NmzYQPv27fnyyy+ZOXMm3bp14/Lly9y9e5dWrVoREBDAzz//zNmzZwkMDMTY2Jjx48fTsWNHBg0axK5du9QZJXfu3CE0NJStW7fmev2VK1cybtw4fvjhBypXrsyxY8cIDAzEzMyMHj16MGfOHDZu3MjatWspWbKkTvkdIcSLy5c6i0KI909u9fwAUq/HcC9iLakJsXy5fBe//rqOlJQUbGxsmDhxIgsWLGDt2rUkJSURHx/P3LlzuXfvnlqntWPHjnh5eWFjY0ORIkVo27YtEydOxNXVlUKFCmFpaclXX32ldU1nZ2cmT55Mr169mDdvHgcOHOCnn37C2dmZHj160KtXL2bPnk3t2rUxNDTE3NycTz/9lICAAO7cuYO/vz9t2rShWbNm9OzZExMTE6ysrPj666/JyMhg5MiR2NraUqJECYKDg9XrXrx4EY1Go5Wc4fTp07Rp0wZLS0ssLCyoX7++Om02LCyMGjVqYGZmhrW1NXXr1uXSpUuv5fsBqc8nhHgxlSpVYuzYsbi6ujJmzBiMjY0pUqQIgYGBuLq6Mm7cOG7fvs2JEyeYN28eTk5O/PDDD5QrV4527doxYcIEZsyYQVZWFjY2NrRs2ZJVq1ap7f/6668UKVJEKzPy44KCgpgxYwYffvghpUuX5sMPP+Tzzz9nwYIFAFy+fBlXV1fq1atHqVKlqFevHv7+/vny2QjxrpKRRSHEa5FbPT8APUNTUq6c4v7h30lIfcgXJUtSrVo17O3t6dGjBykpKcycOZMRI0ZQpEgROnToQMuWLUlKSgJg7ty5lChRAn19ffbs2UOfPn0YN24cBgYGODo6UqZMGaZPn07ZsmW1rjtjxgwmTpxIVlYWe/bsoV+/fjRs2JD58+czcuRIhg0bBkCJEiXo3Lkzv/76K2lpaYSHhzNq1Ci2b99OWloaNjY2dO7cmfr169OnTx/2799PgwYNOHjwIGvWrOGzzz6jWbNmOuu1ITv5V4MGDfDx8WHnzp1YWloSHh5ORkYGGRkZtGvXjsDAQFavXk1aWhqRkZFq8h4hhMhvj683v/8onVpVK6n79PX1KVy4MBUqVFC35SwnuHnzJtHR0dSuXVvr37C6deuSnJzM1atXKVmyJF26dCEwMJB58+ZhZGTEypUr+fjjj9HT0x3LePDgAXFxcfTu3VurbndGRgZWVlZA9jKoZs2a4e7ujq+vL23atKF58+av/HMR4n0iwaIQ4rXIrZ4fQKEiTth3+lp93712KY78/A2bf/9FK5GBoaEhAwcOpFq1asybN4/o6OhcE8pYWVnRrFkz/vzzT3XbF198wXfffcfp06eB7JHFVq1a0b9/f/r374+iKDg4OLBr1y769u1LxYoVsbKy4sqVK5iZZdca9PHxwc/Pj6lTp7J+/Xo1s+qFCxfUX2S+++47Hj58yJdffgnAmDFjmDJlCvv27ePjjz/W6euPP/6IlZUVISEh6lrKnHu6c+cOiYmJtGnTRg10cyt6L57Nx8cHb29vZs2a9dqu4ezszNChQ9WyLkK8a0JPJTBh0xn1od+NhPskHP+bD04lqOuaNRqN1rrwnMDwyXWIefHz80NRFLZs2UL16tXZu3cvM2fOzPXY5ORkABYuXEjNmjW19uUkMatSpQrx8fFs27aNv/76i06dOtG0aVN+/fXXF7hzIcTjJFgUQrwWRS2Mn30Q8HPEJf45dwvz0pX4fvZcGrgVVffZ2tqyaNEiSpQokWfm0ejoaNq2bau1rW7dusyaNYvMzEz1l4iKFSuq+zUaDQ4ODty8eVNto1KlSmqgmNNGVlYWMTEx6tPy8uXLaz3xtre310pek/OkPafdJ0VFRVG/fn2dpDs59xoQEECLFi1o1qwZTZs2pVOnTjg6SrIZIUT+ymu9+YPUDPqtOPpcibA8PDxYt24diqKoQWR4eDgWFhbqzAtjY2M+/PBDVq5cyfnz53F3d6dKlSq5tmdvb0+xYsW4cOECXbp0yfO6lpaWdO7cmc6dO/PRRx/h6+vLnTt3sLW1ff4PQAihkjWLQojXokZpWxytjHneSZRpGkMm7rnL+RQzXFxccHFxwdbW9pkZS5/X07Ki/ps2XqTdZ91LcHAwERER1KlThzVr1uDm5saBAwdeqI9CCPFv5LXe/HETNp0hM+vpGe779+/PlStXGDRoEGfPnuX3338nKCiIYcOGaT1069KlC1u2bGHJkiVPDQIBJkyYwLfffsucOXM4d+4cJ0+eJDg4mO+//x6A77//ntWrV3P27FnOnTvHL7/8goODA9bW1s97+0KIJ0iwKIR4LXLq+QHPHTCC7i8hFStW5OrVq5w7dy7X4z08PAgPD9faFh4ejpubm1Z9xafx8PDg+PHjPHjwQKsNPT093N3dX6D3T1exYkX27t1Lenp6nsdUrlyZMWPGsH//fry8vLSSP4jnl5GRwcCBA7GysqJIkSJ89dVXavmmu3fv0r17d2xsbDA1NaVly5bExsZqnb9u3TrKly+PkZERzs7OzJgx46nXW7RoEdbW1uzYsQPITtRRoUIFTExMKFy4ME2bNtX6+RLiTZXXevMcCpCQmEJk/J2ntlO8eHG2bt1KZGQklSpVom/fvvTu3ZuxY8dqHde4cWNsbW2JiYnRKnuUmz59+rBo0SKCg4OpUKECDRs2ZOnSpZQuXRoACwsLpk2bRrVq1ahevToXL15k69atua6BFEI8H5mGKoR4bXLq+T2+7iUvSkY6Gcl3uZoM2yKjqeZsi4GBAQ0bNqRBgwZ06NCB77//HhcXF86ePavWYRw+fDjVq1dn4sSJdO7cmYiICH744Qet2lvP0qVLF4KCgujRowfjx4/n1q1bDBo0iG7duqlTUF+FgQMHMnfuXD7++GPGjBmDlZUVBw4coEaNGhgaGvLTTz/xwQcfUKxYMWJiYoiNjaV79+6v7Prvk2XLltG7d28iIyM5fPgwn376KSVLliQwMJCAgABiY2PZuHEjlpaWjBo1ilatWnHmzBkKFSrEkSNH6NSpE+PHj6dz587s37+f/v37U7hwYQICAnSuNW3aNKZNm8aff/5JjRo1SEhIwN/fn2nTptG+fXuSkpLYu3ev1BoWb4W81ps7fDJF57iLFy/qHPf4z3nDhg2JjIx86vX09PS4fv16rvvGjx/P+PHjtbZ98skneQaVgYGBWslvhBD/ngSLQojX6vF6fttOJfBzRO6lIFLij3D1x24A+P2Yvc3d3Z2zZ8+ybt06RowYgb+/Pw8ePMDFxYUpU7J/calSpQpr165l3LhxTJw4EUdHR77++utcf6nPi6mpKX/88QdDhgyhevXqmJqaqsHpq1S4cGF27tzJyJEjadiwIfr6+nh7e1O3bl1MTU05e/Ysy5Yt4/bt2zg6OjJgwAC1ZIjI2+MZG4taGKMATk5OzJw5E41Gg7u7OydPnmTmzJn4+PiwceNGwsPDqVOnDpBdu83JyYnffvuNjh078v3339OkSRO1BIubmxtnzpzhu+++0/m5GjVqFMuXL2f37t2UL18egISEBDIyMvjwww8pVaoUgFbGSCHeZM+73vx5jxNCvN00yhv8qPP+/ftYWVmRmJj40kW3hRBvjoi42/gvfPYavNWBtahdtnA+9Ei87Z7M2AhwZ+2X1KrkwZ8bQtRtv//+Ox999BG//vorH330ESkpKVrTlCtXrkz79u0ZN24cVapUoW3btgQFBWmd37FjRx49eoS+vj7Ozs5kZmby4MEDDh8+TJkyZdRjMzMzadGiBZGRkbRo0YLmzZvz0UcfYWNj85o/DSH+PWdnZ/Bqhcarda7rFjWAg5Ux+0Y1ltqsIt9JbJD/ZBK3ECLfPCvpjQZwtDKmRmnJWieeLSdj45NTnNMysoiIu03oqYTXev369euTmZnJ2rVrtbbr6+uzfft2tm3bhqenJ3PnzsXd3Z34+PjX2h/x7goICECj0dC3b1+dfQMGDECj0bzQbIqnOXToELOChgO6681z3gf5eUqgKMR7QoJFIUS+eVrSG/klRLyIZ2VsTL1+TitZ0oEDB3B1dcXT05OMjAwOHjyoHnv79m1iYmLw9Mz+2XzepEk1atRg27ZtTJ48menTp2sdr9FoqFu3LhMmTODYsWMYGhqyYcOGV3Dn4n3l5ORESEgIjx49UrelpKSwatUqSpYs+cquY2dnR7vqZZnftQoOVtpTTR2sjJ+rbIYQ4t0hwaIQIl/lJL2RX0LEv/GsjI0ZSbc4vX4uv+w4yOrVq5k7dy5DhgzB1dWVtm3bEhgYyL59+zh+/Dhdu3alePHiar3O4cOHs2PHDiZOnMi5c+dYtmwZP/zwAyNGjNC5Tp06ddi6dSsTJkxg1qxZABw8eJDJkydz+PBhLl++zPr167l16xYeHh6v5bMQ74cqVarg5OTE+vXr1W3r16+nZMmSVK5cWd3m7Oys/izm8Pb2VhPFKIrC+PHjKVmyJEZGRhQrVozBgwfrnO/r5ci+UY35qbMH7udXc39RT45NaM2Ij5uxefPm13qvQog3hyS4EULku8eT3uQkJalR2lZGFMVzyytjYw6z8o1RMtLo/WFzjAoZMGTIED799FMgu57lkCFDaNOmDWlpaTRo0ICtW7eqNTNfNGlSvXr12LJlC61atUJfX5+mTZuyZ88eZs2axf379ylVqhQzZsygZcuWr/QzEO+fXr16ERwcrNYjXLJkCT179iQsLOy521i3bh0zZ84kJCSE8uXLc+PGDY4fP57rsRoUgvp3ISkpiTWrV1K2bFnOnDnz3GWJhBBvPwkWhRAFQl9PI0lsxEt7WibGx1P8r/51uc7PmY2NDT///PNT2+/QoQMdOnTIc/+TJQMaNGhAcnKy+j40NPSp7QvxNDoZfv9/vnXXrl0ZM2YMly5lZ5UODw8nJCTkhYLFy5cv4+DgQNOmTSlUqBAlS5akRo0auR77119/ERkZSXR0NG5ubgBayZyEEO8+CRaFEEK8dXKSJd1ITHlqxkZJliTeNrll+H1wKgFXKw12dna0bt2apUuXoigKrVu3pkiRIi/UfseOHZk1axZlypTB19eXVq1a4efnh4GB7q+EUVFRlChRQg0UhRDvH1mzKIQQ4q0jyZLEuyivDL+P0jKJuppI6KkEevXqxdKlS1m2bBm9evXSaUNPT48nq6Klp6erf3ZyciImJoZ58+ZhYmJC//79adCggdYxOUxMTF7RnQkh3lYSLAohhHgrSbIk8S55VoZfgAmbztCseQvS0tJIT0+nRYsWOsfY2dmRkPC/sjH379/XKdtiYmKCn58fc+bMISwsjIiICE6ePKnTVsWKFbl69Srnzp176fsSQrzdZBqqEEKIt5YkSxLvimdl+AVISEzhyOVEoqOjAXJNNNO4cWOWLl2Kn58f1tbWjBs3Tuu4pUuXkpmZSc2aNTE1NWXFihWYmJhQqlQpnbYaNmxIgwYN6NChA99//z0uLi6cPXsWjUaDr6/vv7xjIcTbQIJFIYQQbzVJliTeBc/K8Pv4cU/7eR8zZgzx8fG0adMGKysrJk6cqDWyaG1tzZQpUxg2bBiZmZlUqFCBTZs2Ubhw7m2uW7eOESNG4O/vz4MHD3BxcWHKlCm5HiuEePdolCcntr9B7t+/j5WVFYmJiVhaWhZ0d4QQQgghXouIuNv4LzzwzONWB9aShyPivSWxQf6TNYtCCCGEEAUsJ8NvXhOoNYCjZPgVQuQzCRaFEEIIIQqYZPgVQryJJFgUQgghhHgDSIZfIcSbRhLcCCGEEEK8ISTDrxDiTSLBohBCCCHEG0Qy/Aoh3hQyDVUIIYQQQgghhA4JFoUQQgghhBBC6JBgUQghhBBCCCGEDgkWhRBCCCGEEELokGBRCCGEEEIIIYQOCRaFEEIIIYQQQuiQYFEIIYQQQgghhA4JFoUQQgghhBBC6JBgUQghhBBCCCGEDgkWhRBCCCGEEELokGBRCCGEEEIIIYQOCRaFEEIIIYQQQuiQYFEIIYQQQgghhA4JFoUQQojX5MaNGwwaNIgyZcpgZGSEk5MTfn5+7NixAwBnZ2c0Go3Wq0SJEur5j+/X19enWLFi9O7dm7t37xbULQkhhHiPSLAohBBCvAYXL16katWq7Ny5k++++46TJ08SGhpKo0aNGDBggHrc119/TUJCgvo6duyYVjs5+y9fvszKlSvZs2cPgwcPzu/bEUII8R4yKOgOCCGEEO+i/v37o9FoiIyMxMzMTN1evnx5evXqpb63sLDAwcEhz3Ye31+8eHF69OjB6tWrX1/HhRBCiP8nI4tCCCHEK3bnzh1CQ0MZMGCAVqCYw9ra+qXavXbtGps2baJmzZr/sodCCCHEs0mwKIQQQrxi58+fR1EUypUr98xjR40ahbm5ufqaM2dOrvtNTEwoUaIEGo2G77///nV1XQghhFDJNFQhhBDiFcjMUoiMv8PNpBRuXbn33OeNHDmSgIAA9X2RIkVy3a8oCleuXOHLL7+kdevW7NmzB319/VfUeyGEEEKXBItCCCHEvxR6KoEJm86QkJgCQOajJNBo2Lj7EO3bt3/quUWKFMHFxeW59ru6ujJr1ixq167Nrl27aNq06au7CSH+X2ZmJhqNBj09mYAmxPtO/hUQQggh/oXQUwn0W3FUDRQB9E0sMHGuwvLFP/FbZJzOOffu3Xvp6+WMJj569Oil2xAF49dff6VChQqYmJhQuHBhmjZtyoMHD8jKyuLrr7+mRIkSGBkZ4e3tTWhoqNa5V69exd/fH1tbW8zMzKhWrRoHDx5U92/atInq1atjbGxMkSJFtB5SpKamMmLECIoXL46ZmRk1a9YkLCxM3b906VKsra3ZuHEjnp6eGBkZcfny5df+eQgh3nwSLAohhBAvKTNLYcKmMyi57LNp3g+ULD7xa8Ivv/xKbGws0dHRzJkzh9q1az/3NZKSkrhx4wYJCQlERkYycuRI7OzsqFOnzqu7EfHaJSQk4O/vT69evYiOjiYsLIwPP/wQRVGYPXs2M2bMYPr06Zw4cYIWLVrwwQcfEBsbC0BycjINGzbk2rVrbNy4kePHj/PFF1+QlZUFwJYtW2jfvj2tWrXi2LFj7Nixgxo1aqjXHjhwIBEREYSEhHDixAk6duyIr6+v2j7Aw4cPmTp1KosWLeL06dMULVo0fz8gIcQbSaMoSm7/x70R7t+/j5WVFYmJiVhaWhZ0d4QQQggtEXG38V94IM/9Gcl3uB+xBrO/T3Dn1t/Y2dlRtWpVPv/8c3x8fHB2dmbo0KEMHTo01/OdnZ25dOmS+t7Ozo7q1avzzTff4O3t/YrvRrwOOWtZIyIPMfyTlsRdiKdMaWetY4oXL86AAQP48ssv1W01atSgevXq/Pjjj/z000+MGDGCixcvYmtrq3ONOnXqUKZMGVasWKGz7/Lly5QpU4bLly9TrFgxdXvTpk2pUaMGkydPZunSpfTs2ZOoqCgqVar06m5eiFdMYoP8J2sWhRBCiJd0MynlqfsNzG2xbdaP2R9709a7uM7+ixcvPvX8Z+0Xb7bH17IqWZkYl6qEm0d5ajdoTPeOH/DRRx+hr6/P9evXqVu3rta5devW5fjx4wBERUVRuXLlXAPFnP2BgYG57jt58iSZmZm4ublpbU9NTaVw4cLqe0NDQypWrPhvblcI8Q6SYFEIIYR4SUUtjF/pceLdkbOWNWf6lkZPn6KdJ5F2LZqT8ceY/N1M/vOf/7B9+/ZntmViYvLS+5OTk9HX1+fIkSM62XPNzc212tBoNM/sixDi/SJrFoUQQoiXVKO0LY5WxuT1K7YGcLQypkbp3EeExLspr7WsGo0GoxKe2NTvgn2PWRgaGrJjxw6KFStGeHi41rHh4eF4enoCULFiRaKiorhz506u16tYsSI7duzIdV/lypXJzMzk5s2buLi4aL0cHBz+9b0KId5tEiwKIYQQL0lfT0OQX/Yv9E8GjDnvg/w80deTEZv3SWT8Ha3suACp12NIjFhLakIs6fdvEndoFzdv3sLDw4ORI0cydepU1qxZQ0xMDKNHjyYqKoohQ4YA4O/vj4ODA+3atSM8PJwLFy6wbt06IiIiAAgKCmL16tUEBQURHR3NyZMnmTp1KgBubm506dKF7t27s379euLj44mMjOTbb79ly5Yt+fvBCCHeOjINVQghhPgXfL0cmd+1iladRQAHK2OC/Dzx9XIswN6JgpDbWlY9Q1NSrpzi/uHfyUp9iIFVUXoO+4qWLVvSokULEhMTGT58ODdv3sTT05ONGzfi6uoKZK8n/PPPPxk+fDitWrUiIyMDT09PfvzxRwB8fHz45ZdfmDhxIlOmTMHS0pIGDRqo1w4ODmbSpEkMHz6ca9euUaRIEWrVqkWbNm3y5wMRQry1JBuqEEII8QrkZL28mZRCUYvsqacyovh+elaW3ByrA2tRu2zhZx4nhMgmsUH+k2moQgghxCugr6ehdtnCtPUuTu2yhV86UMwpkJ5j/PjxzyyTcfHiRTQaDVFRUS91TfFqyVpWIcS7QoJFIYQQ4g02YsQIreQlAQEBtGvXTusYJycnEhIS8PLyyufeidzIWlYhxLtCgkUhhBDiDWZubq5VDy83+vr6ODg4YGAgqQjeFDlrWR2stMumOFgZM79rFVnLKoR4K0iwKIQQQrxCPj4+DBw4kIEDB2JlZUWRIkX46quvyEkRcPfuXbp3746NjQ2mpqa0bNmS2NjYPNt7fBrq+PHjWbZsGb///jsajQaNRkNYWFiu01BPnz5NmzZtsLS0xMLCgvr16xMXFwdAWFgYNWrUwMzMDGtra+rWrculS5de22fyvvL1cmTfqMasDqzF7I+9WR1Yi32jGkugKIR4a8gjSCGEEOIVW7ZsGb179yYyMpLDhw/z6aefUrJkSQIDAwkICCA2NpaNGzdiaWnJqFGjaNWqFWfOnKFQoUJPbXfEiBFER0dz//59goODAbC1teX69etax127do0GDRrg4+PDzp07sbS0JDw8nIyMDDIyMmjXrh2BgYGsXr2atLQ0IiMjpSD7a5KzllUIId5GEiwKIYQQ/9LjmVDvP0rHycmJmTNnotFocHd35+TJk8ycORMfHx82btxIeHg4derUAWDlypU4OTnx22+/0bFjx6dex9zcHBMTE1JTU59aUP3HH3/EysqKkJAQNQB1c3MD4M6dOyQmJtKmTRvKli0LgIeHx6v4GIQQQrxjZBqqEEII8S+Enkqg3tSd+C88wJCQKM4k3Ocf05L8cfqGekzt2rWJjY3lzJkzGBgYULNmTXVf4cKFcXd3Jzo6+pX1KSoqivr16+c6Umlra0tAQAAtWrTAz8+P2bNnk5CQ8MquLYQQ4t0hwaIQQgjxkkJPJdBvxVESErWLsD9Ky6TfiqOEniqYIMzExOSp+4ODg4mIiKBOnTqsWbMGNzc3Dhx4dl1AIYQQ7xcJFoUQQoiXkJmlMGHTGZRc9qVePwfAhE1nyMxSOHDgAK6urnh6epKRkcHBgwfVY2/fvk1MTAyenp7PdV1DQ0MyMzOfekzFihXZu3cv6enpeR5TuXJlxowZw/79+/Hy8mLVqlXPdX0hhBDvDwkWhRBCiJcQGX9HZ0QxR0bSLW7vWMjlC+eZNGchc+fOZciQIbi6utK2bVsCAwPZt28fx48fp2vXrhQvXpy2bds+13WdnZ05ceIEMTEx/PPPP7kGhAMHDuT+/ft8/PHHHD58mNjYWJYvX05MTAzx8fGMGTOGiIgILl26xJ9//klsbKysWxRCCKFDgkUhhBDiJdxMyj1QBDAr3xglI42En4cxPegLhgwZwqeffgpkTwGtWrUqbdq0oXbt2iiKwtatW5+ZCTVHYGAg7u7uVKtWDTs7O8LDw3WOKVy4MDt37iQ5OZmGDRtStWpVFi5cSKFChTA1NeXs2bN06NABNzc3Pv30UwYMGMBnn332ch+EEEKId5ZGySn89Aa6f/8+VlZWJCYmYmlpWdDdEUIIIVQRcbfxX6i7zu/GqtEYFi2DbdPs4HB1YC0pnSCEEK+AxAb5T0YWhRBCiJdQo7QtjlbG5FWdUAM4WhlTo7RtfnZLCCGEeGUkWBRCCCFegr6ehiC/7KQ0eQWMQX6e6OtJsXshhBBvJ5mGKoQQQvwLoacSmLDpjFayG0crY4L8PPH1cizAngkhxLtFYoP8Z1DQHRBCCCHeZr5ejjTzdCAy/g43k1IoapE99VRGFIUQQrztJFgUQggh/iV9PY0ksRFCCPHOkTWLQgghhBBCCCF0SLAohBBCCCGEEEKHBItCCCGEEEIIIXRIsCiEEEIIIYQQQocEi0IIIYQQQgghdEiwKIQQQgghhBBChwSLQgghhBBCCCF0SLAohBBCCCGEEEKHBItCCCGEEEIIIXRIsCiEEEIIIYQQQocEi0IIIYQQQgghdEiwKIQQQgghhBBChwSLQgghhBBCCCF0SLAohBBCCCGEEEKHBItCCCGEEEIIIXRIsCiEEEIIIYQQQocEi0IIIYQQQgghdEiwKIQQQgghhBBChwSLQgghhBBCCCF0SLAohBBCCCGEEEKHBItCCCGEEEIIIXRIsCiEEEIIIYQQQocEi5qcvTwAAAhYSURBVEIIIYQQQgghdEiwKIQQQgghhBBChwSLQgghhBBCCCF0SLAohBBCCCGEEEKHBItCCCGEEEIIIXQYFHQHnkZRFADu379fwD0RQgghhBBCFKScmCAnRhCv3xsdLCYlJQHg5ORUwD0RQgghhBBCvAmSkpKwsrIq6G68FzTKGxyaZ2Vlcf36dSwsLNBoNAXdnbfS/fv3cXJy4sqVK1haWhZ0d8QT5Pt5c8l382aT7+fNJd/Nm02+nzeXfDfPpigKSUlJFCtWDD09WU2XH97okUU9PT1KlChR0N14J1haWso/PG8w+X7eXPLdvNnk+3lzyXfzZpPv580l383TyYhi/pKQXAghhBBCCCGEDgkWhRBCCCGEEELokGDxHWdkZERQUBBGRkYF3RWRC/l+3lzy3bzZ5Pt5c8l382aT7+fNJd+NeBO90QluhBBCCCGEEEIUDBlZFEIIIYQQQgihQ4JFIYQQQgghhBA6JFgUQgghhBBCCKFDgkUhhBBCCCGEEDokWBRCCCGEEEIIoUOCxffMli1bqFmzJiYmJtjY2NCuXbuC7pJ4QmpqKt7e3mg0GqKiogq6O++9ixcv0rt3b0qXLo2JiQlly5YlKCiItLS0gu7ae+vHH3/E2dkZY2NjatasSWRkZEF3SQDffvst1atXx8LCgqJFi9KuXTtiYmIKulsiF1OmTEGj0TB06NCC7or4f9euXaNr164ULlwYExMTKlSowOHDhwu6W0JIsPg+WbduHd26daNnz54cP36c8PBwPvnkk4LulnjCF198QbFixQq6G+L/nT17lqysLBYsWMDp06eZOXMm//3vf/nyyy8LumvvpTVr1jBs2DCCgoI4evQolSpVokWLFty8ebOgu/be2717NwMGDODAgQNs376d9PR0mjdvzoMHDwq6a+Ixhw4dYsGCBVSsWLGguyL+3927d6lbty6FChVi27ZtnDlzhhkzZmBjY1PQXRNC6iy+LzIyMnB2dmbChAn07t27oLsj8rBt2zaGDRvGunXrKF++PMeOHcPb27uguyWe8N133zF//nwuXLhQ0F1579SsWZPq1avzww8/AJCVlYWTkxODBg1i9OjRBdw78bhbt25RtGhRdu/eTYMGDQq6OwJITk6mSpUqzJs3j0mTJuHt7c2sWbMKulvvvdGjRxMeHs7evXsLuitC6JCRxffE0aNHuXbtGnp6elSuXBlHR0datmzJqVOnCrpr4v/9/fffBAYGsnz5ckxNTQu6O+IpEhMTsbW1LehuvHfS0tI4cuQITZs2Vbfp6enRtGlTIiIiCrBnIjeJiYkA8nflDTJgwABat26t9XdIFLyNGzdSrVo1OnbsSNGiRalcuTILFy4s6G4JAUiw+N7IGQEZP348Y8eOZfPmzdjY2ODj48OdO3cKuHdCURQCAgLo27cv1apVK+juiKc4f/48c+fO5bPPPivorrx3/vnnHzIzM7G3t9fabm9vz40bNwqoVyI3WVlZDB06lLp16+Ll5VXQ3RFASEgIR48e5dtvvy3orognXLhwgfnz5+Pq6soff/xBv379GDx4MMuWLSvorgkhweLbbvTo0Wg0mqe+ctZcAfznP/+hQ4cOVK1aleDgYDQaDb/88ksB38W763m/n7lz55KUlMSYMWMKusvvjef9bh537do1fH196dixI4GBgQXUcyHefAMGDODUqVOEhIQUdFcEcOXKFYYMGcLKlSsxNjYu6O6IJ2RlZVGlShUmT55M5cqV+fTTTwkMDOS///1vQXdNCAwKugPi3xk+fDgBAQFPPaZMmTIkJCQA4OnpqW43MjKiTJkyXL58+XV28b32vN/Pzp07iYiIwMjISGtftWrV6NKlizxdfA2e97vJcf36dRo1akSdOnX46aefXnPvRG6KFCmCvr4+f//9t9b2v//+GwcHhwLqlXjSwIED2bx5M3v27KFEiRIF3R0BHDlyhJs3b1KlShV1W2ZmJnv27OGHH34gNTUVfX39Auzh+83R0VHr9zMADw8P1q1bV0A9EuJ/JFh8y9nZ2WFnZ/fM46pWrYqRkRExMTHUq1cPgPT0dC5evEipUqVedzffW8/7/cyZM4dJkyap769fv06LFi1Ys2YNNWvWfJ1dfG8973cD2SOKjRo1Ukfk9fRkUkZBMDQ0pGrVquzYsUMt+5OVlcWOHTsYOHBgwXZOoCgKgwYNYsOGDYSFhVG6dOmC7pL4f02aNOHkyZNa23r27Em5cuUYNWqUBIoFrG7dujplZs6dOye/n4k3ggSL7wlLS0v69u1LUFAQTk5OlCpViu+++w6Ajh07FnDvRMmSJbXem5ubA1C2bFl5Ml/Arl27ho+PD6VKlWL69OncunVL3SejWflv2LBh9OjRg2rVqlGjRg1mzZrFgwcP6NmzZ0F37b03YMAAVq1axe+//46FhYW6jtTKygoTE5MC7t37zcLCQmftqJmZGYULF5Y1pW+Azz//nDp16jB58mQ6depEZGQkP/30k8xiEW8ECRbfI9999x0GBgZ069aNR48eUbNmTXbu3Cl1fIR4iu3bt3P+/HnOnz+vE7hL5aH817lzZ27dusW4ceO4ceMG3t7ehIaG6iS9Eflv/vz5APj4+GhtDw4OfuaUbyHeZ9WrV2fDhg2MGTOGr7/+mtKlSzNr1iy6dOlS0F0TQuosCiGEEEIIIYTQJQtvhBBCCCGEEELokGBRCCGEEEIIIYQOCRaFEEIIIYQQQuiQYFEIIYQQQgghhA4JFoUQQgghhBBC6JBgUQghhBBCCCGEDgkWhRBCCCGEEELokGBRCCGEEEIIIYQOCRaFEEIIIYQQQuiQYFEIIYQQQgghhA4JFoUQQgghhBBC6Pg/qtaMdPOm/NwAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAApMAAAI/CAYAAAAryFJoAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAgAElEQVR4nOzde1xP9x/A8depXKKpUXObKSS6fPtWIqKQ+5pFNdcRq99yHz9+2pjbbDMaZozdMLc0d8tcJhm5rAvdRObyNZIWKaKmy/n9kc66ukQKn+fj4bHv95zzOedzvrLe38/5fN5vSZZlBEEQBEEQBKE8tCq7A4IgCIIgCMLzSwSTgiAIgiAIQrmJYFIQBEEQBEEoNxFMCoIgCIIgCOUmgklBEARBEASh3EQwKQiCIAiCIJSbTmV3oDBDQ0PZ2Ni4srshCIIgCILwUJGRkddlWTaq7H5UtioVTBobGxMREVHZ3RAEQRAEQXgoSZIuVXYfqgLxmFsQBEEQBEEoNxFMCoIgCIIgCOUmgklBEARBEASh3EQwKQiCIAiCIJSbCCYFQRAEQRCEchPBpCAIgiAIglBuIpgUBEEQBEEQyk0Ek4IgCIIgCEK5iWBSEARBEARBKDcRTAqCIAiCIAjlJoJJQRAEQRAEodxEMCkIgiAIgiCUmwgmBUEQBEEQhHITwaQgCIIgCIJQbiKYFARBEARBEMpNBJOCIAiC8BxKS0vjm2++AeDq1at4eHgo+wYNGoRKpWLRokWV1T3hJSLJslzZfVC0adNGjoiIqOxuCIIgCEKVp9FocHV1JS4ursj2a9eu0bFjR86dO1dJPXt5SJIUKctym8ruR2UTI5OCIAiC8Bzy8/Pj/PnzqNVqPD09sbS0BKBHjx4kJiaiVqs5fPgwnTt3pmCg5vr16xgbG1dir4UXkQgmBUEQBOE5NG/ePJo3b05UVBQLFixQtu/cuVPZ3qlTp0rs4YuhcDBeFkmSfpAkyfwhx8ySJGny/ddzJEnqdv+1RpIkw0ftjyRJv0qSZPAYx9eQJGm/JElRkiQNeJS+Pi6dp3kyQRAEQRCEl40sy96PefyMJ7hWn8dsYnO/nfr++8DyXrssYmRSEARBEJ4T208m4jjvACZ+u3BffpRbWTkPbaOjo0NeXh4AWVlZFd3FKmXdunW0bdsWtVrN+++/z6VLlzA1NeX69evk5eXRqVMn9u3bh0ajoVWrVgwZMoTWrVvj4eHB3bt3S5xv1KhRtGnTBgsLC2bOnKlslyTpoCRJbe6/zpAk6VNJkqIlSTouSVL94ueRJGm1JEkexbbpSpK0W5Ikn/vvh0qSFHZ/RPFbSZK072/XSJJkKEmSsSRJZ+6f66wkSeslSeomSdIRSZL+lCSprSRJrwHrAPv752le0FdJknwlSVpQ6PpekiQtfdC1yyKCSUEQBEF4Dmw/mciHW2NJTMtEBv7OkkhKucn2k4kPbGdsbExkZCQAmzdvfgY9rRpOnz5NYGAgR44cISoqCm1tbX7//XemTp3KqFGj+PLLLzE3N6dHjx4AJCQkMHr0aE6fPk2dOnWUlfKFffrpp0RERBATE8Pvv/8OoFvKpWsDx2VZtgYOAT6P0F094BcgQJbl7yVJag0MABzvjyjmAkNKadcC+BJodf/PYKAjMBn4SJblvwFv4LAsy2pZls8XarsF6Ffo/QBg42NcWyGCSUEQBEF4DizYm0Bmdq7yXlu3DtUbt2Zw745MmTKlzHaTJ09m+fLl2NjYcP369QdeY/v27UiSxJkzZ0rd7+Xl9dCAtE+fPqSlpT3wmAdxc3PDwcHhkY4tnhKpsODgYCIjI7G3t0etVhMcHMyFCxfw9vbm1q1brFixAn9/f+X4OnXqcOzYMQCGDh1KaGhoiXP+/PPP2NraYmNjw6lTpwBqlnLpe0DQ/deRgPEj3MoOYJUsy2vuv3cB7IBwSZKi7r9vVkq7i7Isx8qynAecAoLl/DQ9sQ+7rizLKcAFSZIcJEmqR34weuQxrq0QcyYFQRAE4TlwNS2zxDajvlOQgE3z3lS2GRsbF0kX1KpVK2JiYpT3c+fOLfMaAQEBdOzYkYCAAGbPnl2ufv7666/lagf5uTMjIyPR09PjwoULNGv2wBiGRo0alQhut59MZMHeBM4Ex6HbsjOzFi3Azaaxsv/u3btcuXIFgIyMDF555RUAJEkqcp7i7y9evIi/vz/h4eG8+uqreHl58dNPP5U2KJct/5t3MZdHi7WOAL0kSdpwv60E/CTL8ocPafdPodd5hd7nPeJ1NwLvAGeAbbIsy1L+jT/KtRViZFIQBEEQngONDEp7olr29seVkZFBaGgoP/74Ixs3bgRAlmXGjh2LmZkZ3bp14++//wZgz549eHp6Km0PHjyIq6srkB/MFoyArlmzBpVKhbW1Ne+++y4AKSkpuLu7Y29vj729PUeOHFHOs3XrVt566y0GDhyo9AHyR0THjx9Phw4daNasmRJAajQaJSXS6tWradelF0Pc3+KPzweRk3GTpLBdDOjpiJmVLefPn+fSpUu4urqSnp5OdnY2tra2ytzI9PR0NBoNANOmTePo0aOoVCri4+MBuHXrFrVr10ZfX5/k5GR27979VD73+2YAN4Fl998HAx735zwiSVJdSZKaPs0L3rcNeBsYRH5gWa5ri2BSEARBEJ4DU3qaoVut6DoI3WraTOlp9lTOv2PHDnr16kXLli2pV68ekZGRbNu2jYSEBOLj41mzZg1Hjx4FoFu3bvzxxx/cuXMHgMDAQAYOHFjkfKdOnWLu3LkcOHCA6OhovvrqKwAmTJjAxIkTCQ8PZ8uWLXh7/7sQOiAggEGDBjFo0CACAgKKnC8pKYnQ0FCCgoLw8/Mr9R5iYmKp9/aHNBy+iNsngtA1bYcsaXHhwnk6d+6MRqPh1q1bXLp0ib/++gt9fX18fPKnNNarV4+jR4/SunVroqKiOHPmDDExMbRs2RIAa2trbGxsaNWqFYMHD8bR0fEpfOpFTAB0JUmaL8tyPDAd2CdJUgzwG9DwaV9QluWbwGmgqSzLYfe3Pfa1xWNuQRAEQXgOFDyqXbA3gatpmTQy0GVKT7Mij3CfREBAABMmTABg4MCBBAQEkJOTw6BBg9DW1qZRo0Z07doVyF8h3qtXL3755Rc8PDzYtWsX8+fPL3K+AwcO4OnpiaFhfgrFunXrArB//35ltA/yR/wyMjK4c+cOf/75Jx07dkSSJKpVq0ZcXJwy8ujm5oaWlhbm5uYkJyeXeg86r1uiVaMWAFo1amPQcQg6rxiSEbMP95YSzs7OfPnll3Tu3Jm0tDT++ecf5TG3lpYWgwcPZvLkyfTq1QsfHx/c3NzYs2cPenp6QP7oZ2GSJN0AkGW5c8E2WZb1Cr3eDGy+/3pWoe1ehV4bFzrliELbAykljU+h468DlmWcU1OwT5blg8DBQvuUvt5/71rKNUq9dllEMCkIgvASy8nJQUdH/Cp4XrjZNH5qwSP8O7/wctLfJO7bT1hkFLVq6JCbm4skSfTr16/MtgMHDmTp0qXUrVuXNm3aKEHZw+Tl5XH8+HFq1iy6dmXVqlXcvHkTExMTID/IDAgI4NNPPwWgRo0ayrFllYLWq1Xokb8kIWlXA+DV2jXIycl/nO3l5cX27duxtrZm9erVHDx4sMR5du3axaFDh/jll1/49NNPiY2NFf9OHkA85hYEQahkd+7c4c0338Ta2hpLS0sCAwMJDg7GxsYGKysrRo4cyT//5M+rDw8Pp0OHDlhbW9O2bVtu375Nbm4ukydPxtLSEpVKxddffw1AZGQkzs7O2NnZ0bNnT5KSkoD8ih4ffPABbdq0UR49Ci+fwqmG7iQcoZZ5F+p5/8DibUe4fPkyJiYm1KtXj8DAQHJzc0lKSiIkJERp7+zszIkTJ/j+++9LPOIG6Nq1K5s2beLGjRsApKamAvnlHgt+RgGioqKA/JHRPXv2oNFo0Gg0REZGFpk3+Shs3jAodSpAb8t/n9Levn2bhg0bkp2dzfr164H8eZ6jR48G8oPdy5cv06VLF7744gvS09PJyMh4rH68bESYLQiCUMn27NlDo0aN2LVrF5C/EMDS0pLg4GBatmzJsGHDWL58OaNHj2bAgAEEBgZib2/PrVu30NXV5bvvvkOj0RAVFYWOjg6pqalkZ2czbtw4duzYgZGREYGBgUybNo2VK1cCcO/evYeWiBNebIVTDd05/Tv67TzIzM5lwd4E3Gwa4+7uzunTpzE1NcXc3Jw33niD9u3bK+21tbVxdXVl9erV/PTTTyXOb2FhwbRp03B2dkZbWxsbGxtWr17NkiVLGDNmDCqVipycHJycnPDz8+PSpUtFUgKZmJigr6/PH3/88cj31MxIj//0t2LB3gSuAA31dfmovxVp0deISMk/5pNPPqFdu3YYGRnRrl07bt++XeQcubm5DB06lPT0dGRZZvz48RgYPHL1wpeSVNZQcWVo06aNLP7nJgjCy+bs2bP06NGDAQMG4OrqSp06dRg3bhyHDh0C8vPlLVu2jNmzZ+Pr61tk9SuAu7s7vr6+dO/eXdkWFxenrHyF/F+QDRs2ZN++fXTu3JnZs2fj7Oz87G5SeCpmzJiBk5MT3bp14/Dhw/j6+lKtWjWOHTuGru7jreo28dtFaRGABFwslGpIKJskSZGyLLep7H5UNjEyKQiCUAkK5qoVLKT4ZHUQ0pUopk+frixyeBKyLGNhYaEkYS6udu3aT3yNqmbFihXUqlWLYcOGVXZXKsycOXOU1+vXr+fDDz9k6NCh5TpXIwNdEkvJXfm0Ug0JL48KnzMpSVIvSZISJEk6J0lS6Wv5BUEQXiLFy+JdunyFuXsvoGfRhSlTpnDs2DE0Gg3nzp0DYO3atTg7O2NmZkZSUhLh4eFA/tyvnJwcunfvzrfffktOTn6d5tTUVMzMzEhJSVGCyezs7IKKHS8sX1/fCg0kvb29i6xCrmiffPIJZmZmdOzYkUGDBuHv769UoPnhhx/4+eef+fjjjxkyZAhJSUk4OTmhVquxtLTk8OHDDz1/RacaEl4eFRpM3i8MvgzoDZgDgyRJMq/IawqCIFR1xcviZadouPjjBIa86czs2bOZO3cuq1atwtPTEysrK7S0tPD19aV69eoEBgYybtw4rK2t6d69O1lZWXh7e/PGG28oyaE3bNhA9erV2bx5M1OnTsXa2hq1Wq3kCKwKNBoNrVq1wsvLi5YtWzJkyBD279+Po6MjpqamhIWFkZqaipubGyqVCgcHB2JiYsjLy8PY2LhIuT5TU1OSk5OZNWuWUh7v/Pnz9OrVCzs7Ozp16qSUB9y0aROWlpZYW1vj5OT0WH3+4YcfMDd/Nr/CCnIwRkdHs3v37hLzW729venbty8LFixg/fr1bNiwgZ49exIVFUV0dDRqtfqh13Czaczn/a1obKCLBDQ20OXz/lZPdbW48HKo6MfcbYFzsixfAJAkaSP5mdaf3Vc7QRCEKqZ4WTzdZnboNrNDAsILzVU7efJkibb29vYcP368xPaFCxeycOHCItvUarUy77Kw0lKhVIZz586xadMmVq5cib29PRs2bCA0NJSdO3fy2Wef0aRJE2xsbNi+fTsHDhxg2LBhREVF8fbbb7Nt2zZGjBjBH3/8QdOmTalfv36Rc//nP/9hxYoVmJqa8scffzB69GgOHDjAnDlz2Lt3L40bNy6zfrRGo1EC0RMnTmBhYcGaNWvo06cP/v7+tGnThj179vDRRx+Rm5uLoaEhwcHB/P7770qeRkmSOHTo0COnyynuyJEjvP3229SsWZOaNWvy1ltvPfB4e3t7Ro4cSXZ2Nm5ubo8UTMLTTzUkvJwq+jF3Y+ByofdX7m8TBEF4aVV0WbznhYmJiTLyamFhgYuLC5IkYWVlhUajITQ0VCnB17VrV27cuMGtW7eUFe0AGzduZMCAAUXOm5GRwdGjR/H09EStVvP+++8raZEcHR3x8vLi+++/Jzc3l7IkJCQwevRoTp8+TZ06dfjmm2+UfSkpKfj4+Cgjh5s2bQLA39+fZcuWERUVxeHDhx97QQzkT4FwnHeAOb/EszL0IttPJj5SOycnJw4dOkTjxo3x8vJizZo1j31tQSivSs8zKUnSfyRJipAkKSIlJaWyuyMIglDhXta5agWBkonfLtyXH+Uf+d/PQEtLS0lKraWlpcz/LE379u05d+4cKSkpbN++nf79+xfZn5eXh4GBAVFRUcqf06dPA/mLdObOncvly5exs7NTciAW16RJE6Vc3tChQwkNDVX2HT9+HCcnJyW5dkFlF0dHRyZNmsSSJUtIS0t77CTXhefS1ni9NX+fOsrUnyMJOHKWoKCgB7a9dOkS9evXx8fHB29vb06cOPFY1xaEJ1HRwWQi0KTQ+9fvb1PIsvydLMttZFluY2RkVMHdEQRBqHwv41y14ouOkm9lkXwr64Ejb506dVKSSh88eBBDQ0Pq1KmjVGaZNGkSrVu3pl69ekXa1alTBxMTE2XEUJZloqOjgfy5lO3atWPOnDkYGRlx+fJlpX+FA92s7Lwi55Qk6aH36Ofnxw8//EBmZiaOjo7KPM1HVXgubY2GLdFt0ZYL347i/aHuWFlZoa+vX2bbgwcPKrWjAwMDlcftgvAsVPScyXDAVJIkE/KDyIHA4Aq+piAIQpX3rOeqSZLEkCFDWLduHZBfRrFhw4a0a9fuoaNepfH29mbSpEmPvCCl+KIjyA/yChJkl2bWrFmMHDkSlUpFrVq1iiTGHjBgAPb29iVqJRdYv349o0aNYu7cuWRnZzNw4ECsra2ZMmUKf/75J7Is4+LigrW1tRLoFvQv+VYWKdcSmbd6J35efdmwYQMdO3bkl19+AcDBwYHRo0dz8eJFTExMSE1NpW7dupw/fx4rKyusrKwIDw/nzJkztGrV6pE+Hyg5l7ZO2/4YdByCnJ3Fpd8/x87ODh8fH2V/4XsfPnw4w4cPf+RrCcLTVOFJyyVJ6gMsBrSBlbIsf1rWsSJpuSAIQsXQ09OjRYsWSnLr3bt38+GHH/L666+XK5h8XFU5QbbjvANF8i3mpCeT/PNMDN4wwzArEXNzc9auXVtkAc7u3bv56KOPyMvL47XXXuO3335j3LhxhISEKHNAV69eXaSe9OP2I2XnArJv/IWOnMPHE0fx4YcfPtX7fhns3LmT+Ph4/PwqJjOhSFqeT1TAEQRBeAno6ekxfvx4bG1t8fDwYNiwYVhYWHD48GGCgoKYNWsWenp6TJ48GQBLS0uCgoIwMjLinXfe4cqVK+Tm5vLxxx8zYMAAOnfu/MCVzcUVD5QKNDbQ5YjfkydpfxLFA92c9GT+3jybxu9980wD3eIjpJA/l/ZFnwJRUXJych573urjEsFkPlEBRxAE4SUxcOBA5syZg6urKzExMYwcOfKhya1LqxteWMHK5kOHDimPfEszpadZqYFSVVh0VFUqwRQEjIUrI03paSYCyQdYs2YN/v7+SJKESqVCW1ubmjVrcvLkSRwdHVGpVERERLB06VK8vLyoU6cOERERXLt2jfnz5+Ph4UFeXh5jx47lwIEDNGnShGrVqjFy5Eg8PDyIjIxk0qRJZGRkYGhoyOrVq2nYsGFl33aVI4JJQRCEl4RKpUKj0RAQEECfPn0eqY2VlRX//e9/mTp1Kq6urnTq1KnI/rJWNhdXlQOl4oGujn59mvt+WymBrsj7+OhOnTrF3LlzOXr0KIaGhqSmpjJp0iSuXLnC0aNH0dbWLjGnNikpidDQUM6cOUPfvn3x8PBg69ataDQa4uPj+fvvv2ndurWSs3PcuHHs2LEDIyMjAgMDmTZtGitXrqycG67CRDApCILwgipc/zszO5ftJxPp27cvkydP5uDBg0XS4ujo6JCX9+8K5qysLABatmzJiRMn+PXXX5k+fTouLi7MmDGjXP2pqoFSVQ50hbIdOHAAT09PDA0NgX+/yHh6eqKtrV1qGzc3N7S0tDA3Nyc5ORmA0NBQPD090dLSokGDBnTp0gXIzzUaFxdH9+7dAcjNzRWjkmUQwaQgCMILqPj8O1mGD7fG8l/H3sycaYCVlVWRSjjGxsbKQpwTJ05w8eJFAK5evUrdunUZOnQoBgYG/PDDD0WuU9bK5udNVQ10hZIKviSdCT6Fbs5t7E8mFvm7q127dpltCy+IetiaEVmWsbCwUOrbC2Wr9KTlgiAIwtNXWiqezOxcVkXdZvz48SWOd3d3JzU1FQsLC5YuXUrLli0BiI2NpW3btqjVambPns306dOLtDMyMuK7776jf//+WFtbl6hGIwhPU5HE7m+oSI4+yP/WHWH7ycQy5+s+jKOjI1u2bCEvL4/k5GTlS5aZmRkpKSlKMJmdnc2pU6ee1q28UMTIpCAIwguoeM7CNyZtLrG9c+fOdO7cGQBdXV327dtX4jzGxsb07NmzxPbCo5q9e/emd+/eT6HXVZebmxuXL18mKyuLCRMm8Oqrr3Ls2DEWLlzIV199xVdffcWFCxe4cOEC7777LosXL+bzzz9n69at7Nixg4EDB5Kenk5eXh7m5uZcuHChsm/puVT4S1J1o6botx+AZs0UhmzQwbNnp4e0Lp27uzvBwcGYm5vTpEkTbG1t0dfXp3r16mzevJnx48eTnp5OTk4OH3zwARYWFk/zll4IIpgUBEF4AVWVFcovipUrV1K3bl0yMzOxt7dn7969zJ8/H4DDhw9Tr149EhMTOXz4ME5OTtjY2BAVFaXst7S0JDw8nJycHNq1a1eZt/JcK/4lSc/KBT0rFyRgdSlpnLy8vPDy8gIosRgnIyMDyC/f6e/vj56eHjdu3KBt27ZYWVkBoFarOXTo0FO/jxeNCCYFQRBeQFU5Fc/zaMmSJWzbtg2Ay5cvc/nyZTIyMrh9+zaXL19m8ODBHDp0iMOHD9O/f390dHRo3rw5p0+fJiwsjEmTJnHo0CFyc3NLrIgXHl1FfUlydXUlLS2Ne/fu8fHHH9OgQYMnOt/LRgSTgiAILyCxQvnJFF4JXzs1gdzwX4k8doxatWrRuXNnsrKy6NChA6tWrcLMzIxOnTqxcuVKjh07xpdffgmAk5MTu3fvplq1anTr1g0vLy9yc3NZsGBBJd/d86uiviQVnrYhPD4RTAqCILygxArl8im+Ev7vGze5e0diX8JNWun+xfHjxwHo1KkTM2bMYMaMGdjY2BASEoKuri76+vrK/mHDhjFs2DCMjIy4ceMGycnJWFpaVtq9Pe/El6SqSQSTgiAIglBI8ZXwuiZ23D65m8E9O9Cjgw0ODg5AfrB4+fJlnJyc0NbWpkmTJrRq1Upp165dO5KTk3FycgLyk8Zfu3YNSZKe7Q29YMSXpKpH1OYWBEEQhEKK1+ouIMEzrdUtVH2iNnc+kWdSEARBEAopazGHWAkvCKUTwaQgCIIgFDKlpxm61YqW46tqK+F37tzJvHnzXtjrCc8X8ZhbEARBeCaWLFnC8uXLsbW1Zf369ZXdnQcqvJq7qi3yyMnJQUdHLHmoCsRj7nwimBQEQRCeiVatWrF//35ef/31yu5KlbdmzRr8/f2RJAmVSoW2tjY1a9bk5MmTODo6olKpiIiIYOnSpXh5eVGnTh0iIiK4du0a8+fPx8PDg7y8PMaOHcuBAwdo0qQJ1apVY+TIkXh4eBAZGcmkSZPIyMjA0NCQ1atX07BhQzp37oy1tTW///47OTk5rFy5krZt27J69WrlesnJyfj6+ipVfJYvX461tTXvvPMOV65cITc3l48//vilKK0pgsl84jG3IAiCUOEKgo/evXujr6+Pv7+/ss/S0hKNRoNGo6F169b4+PhgYWFBjx49yMzMT1DduXNnpk6dStu2bWnZsiWHDx8G8nM5FlSaAejYsSPR0dHP9uaeslOnTjF37lwOHDhAdHQ0X331FQBXrlzh6NGjLFy4sESbpKQkQkNDCQoKws/PD4CtW7ei0WiIj49n7dq1RWpMjxs3js2bNxMZGcnIkSOZNm2acq67d+8SFRXFN998w8iRI0tca/z48Tg7OxMdHc2JEyewsLBgz549NGrUiOjoaOLi4ujVq1dFfDRCFSWCSUEQBKHCrVixgkaNGhESEsLEiRPLPO7PP/9kzJgxnDp1CgMDA7Zs2aLsy8nJISwsjMWLFzN79mwA3nvvPaVM3tmzZ8nKysLa2rpC76WiHThwAE9PTwwNDQGoW7cuAJ6enmhra5faxs3NDS0tLczNzUlOTgYgNDQUT09PtLS0aNCgAV26dAEgISGBuLg4unfvjlqtZu7cuVy5ckU516BBg4D8QP3WrVukpaWV6N+oUaMA0NbWRl9fHysrK3777TemTp3K4cOHlVybwstBBJOCIAhClWFiYoJarQbAzs4OjUaj7Ovfv3+J7Z6engQFBZGdnc3KlSuVOszPm+0nE3GcdwATv10s+u0sCddulzimdu3aZbavUaOG8vph09dkWcbCwoKoqCiioqKIjY1l3759yv7ieTAfJS9my5YtOXHiBFZWVkyfPp05c+Y8tM2LTJKkHyRJMq/sfjwrIpgUBEEQKkzhIOlaeha/xiSho6NDXl6eckxWVpbyunBQpK2tTU5OTol9hbfXqlWL7t27s2PHDn7++WeGDBlS0bf01BVU3ElMy0QGsoxas3P7VtaExAGQmpparvM6OjqyZcsW8vLySE5OVkoGmpmZkZKSUuSx96lTp5R2gYGBQP7Ipr6+folRRhcXF5YvXw5Abm4u6enpXL16lVq1ajF06FCmTJnCiRMnytXnB8nNzX34QQ9Q+Geposmy7C3Lcvwzu2AlE8GkIAhCFZecnMzgwYNp1qwZdnZ2tG/fnm3btlV2tx6qeJCUkyfzya54rlNHCTZOnDjBxYsXn+g63t7ejB8/Hnt7e1599dWn0PNnq3jFnepGTanj8A6+g97C2tqaSZMmleu87u7uvP7665ibmzN06FBsbW3R19enevXqbN68malTp2JtbY1arebo0aNKu5o1a2JjY4Ovry8//vhjifN+9dVXhISEYGVlhZ2dHfHx8cTGxtK2bVvUajWzZ89m+vTpj9VXjUZDq1atGDJkCK1bt8bDw4O7d+9ibGzM1KlTsbW1ZdOmTURFReHg4IBKpaJfv37cvHkTgPDwcPaXFr8AACAASURBVFQqFWq1milTpiglK1evXk3fvn3p2rUrLi4uZGRk4OLigq2tLVZWVuzYsaPI9b28vGjZsiVDhgxh//79ODo6YmpqSlhYGACzZs1i+PDhdOrUiaZNmwIYSJI0X5KkWEmS9kiSVA1AkqSDkiS1uf86Q5KkTyVJipYk6bgkSfXvb29+/32sJElzJUnKeKwPrQoRuQUEQRCqMFmWcXNzY/jw4WzYsAGAS5cusXPnzkru2cMVD5IAsrJzOS6bUic1CAsLC9q1a0fLli2f6Dp2dnbUqVOHESNGPNF5KsvVtMwS2/SsXHjFyoXoMirueHl5KY/0C+aMFsjIyI9JtLS08Pf3R09Pjxs3btC2bVusrKwAUKvVHDp0qNRzDx06lMWLF5d5vfr16ytBWGE9e/Ys8x4fRUJCAj/++COOjo6MHDmSb775BoB69eopXz5UKhVff/01zs7OzJgxg9mzZ7N48WJGjBjB999/T/v27ZUFSAVOnDhBTEwMdevWJScnh23btlGnTh2uX7+Og4MDffv2BeDcuXNs2rSJlStXYm9vz4YNGwgNDWXnzp189tlnbN++HYDz588TEhJCfHw8arXaBAiRZfl/kiRtA94Ethe7tdrAcVmWp0mSNB/wAeYCXwFfybIcIEmS7xN9eJVMBJOCIAhV2IEDB6hevTq+vv/+rmnatCnjxo1Do9Hw7rvvcufOHQCWLl1Khw4dOHjwIDNnzsTAwIDY2FjeeecdrKys+Oqrr8jMzGT79u00b96clJQUfH19+euvvwBYvHgxjo6O/P7770yYMAHIny936NAhXnnllcfue/Eg6fVRKwFIvpPH8UJz9AqLi4tTXk+ePFl5XfCIFsDQ0LDIXMqrV6+Sl5dHjx49HruPVUEjA10SSwkon0bFHVdXV9LS0rh37x4ff/wxDRo0eOJzVpQmTZrg6OgI5Ae0S5YsAVBSDKWnp5OWloazszMAw4cPx9PTk7S0NG7fvk379u0BGDx4MEFBQcp5u3fvrixikmWZjz76iEOHDqGlpUViYqKyYMnExEQJti0sLHBxcUGSJKysrIr8vPXu3Ztq1aoVHCsBe+7vigWMS7m1e0BBhyKB7vdftwfc7r/eAPjznBKPuQVBEKqwU6dOYWtrW+q+1157jd9++40TJ04QGBjI+PHjlX3R0dGsWLGC06dPs3btWs6ePUtYWBje3t58/fXXAEyYMIGJEycSHh7Oli1b8Pb2BsDf359ly5YRFRXF4cOH0dUtX1DzLMoSrlmzhnbt2vHpp5+ipfVsfqVJksTQoUOV9zk5ORgZGeHq6lqu81ndDuPe6ZAi24pX3Jk1axaNGzdGrVZjampK//79iY9/+JQ8Ly8vfv31V+Lj4x+4OGnGjBns37+fgwcPMnnyZJ5FzufC82ndlx8lKzuvyP6ChT8PWnj0KAq3X79+PSkpKURGRhIVFUX9+vWVObuF5+tqaWkp77W0tEqdu3v/502W/13xlEfpg3TZhY7JLeOY55oIJgVBEJ4jY8aMwdraGnt7e7Kzs/Hx8cHKygpPT88iwYW9vT0NGzakRo0aNG/eXBm1KzzKsn//fsaOHYtaraZv377cunWLjIwMHB0dmTRpEkuWLCEtLa3c1VaeRVnCYcOGcfnyZTw9PZ/aOR+mdu3axMXFKTkwf/vtNxo3Ln91nBWf+vHNrIk0NtBFAhob6PJ5f6sSFXcmTpxIVFQUf/75JwMGDKBr166kpKQ88NyrV6/m6tWrD+3DnDlz6NatW7nv4XEVn0+bfCuLlGuJzFudP31jw4YNdOzYsUgbfX19Xn31VSXH6Nq1a3F2dsbAwIBXXnmFP/74A4CNGzeWed309HRee+01qlWrRkhICJcuXaqYG3w0xwH3+68HVmZHnpQIJgVBEKqgglGbz47eZuWOA2w/mQjAsmXLCA4OJiUlhUWLFlG/fn2io6OJiIjg3r17SvtHGWXJy8vj+PHjSoqYxMRE9PT08PPz44cffiAzMxNHR0fOnDlTrntws2nM5/2tHhokPY/69OnDrl27AAgICFByM0L+6ms3NzdUKhUODg7ExMSQl5eHsbFxkZyNpqamJCcnM2vWLM4FB3DEryv7fVrxysH5fOLdl06dOpX52Q8YMICGDRuiUqlQqVSYmZlhY2ODnZ0dPXv2JCkpic2bNxMREcGQIUNQq9VkZmYyZ84c7O3tsbS05D//+Y+SRsjLy4vNmzcXuUZubi5eXl5YWlpiZWXFokWLHvq5aDQaZfELQFhYGE5OTkr/vL29uXv3Lh8uWsW1w0WDPp26r/PlV0to3bo1N2/eVHJZFjA2Nmbx4sVMmTIFlUpFVFQUM2bMAODHH3/Ex8cHtVrNnTt3ysxzOWTIECIiIrCysmLNmjW0atXqofdUgT4AJkmSFAO0ANIrszNPRJblKvPHzs5OFgThX87OznJ4eLgsy7Lcu3dv+ebNm/LNmzflZcuWKcckJibK7u7uldVFoQJsO3FFbjV9t9x0apD8xv9+kas3bCnX7z1G3nbiiizLsnzp0iW5adOm8gcffCD7+/vLsizLK1eulLn/yC0kJER+8803lfMV/jkqvG/QoEHy/PnzleNOnjwpy7Isnzt3Ttnm7u4ub9u2rQLv9vlTu3ZtOTo6WnZ3d5czMzNla2vrIp/r2LFj5VmzZsmyLMvBwcGytbW1LMuyPH78eHnlypWyLMvy8ePHZRcXF1mWZXnmzJnyggULZFmW5a5du8pnz55VjunSpUuJY2RZlo8ePSo3bdpU9vHxke/duye3adNGjomJkWVZljdu3CiPGDFCluWif/eyLMs3btxQXg8dOlTeuXOnLMuyPHz4cHnTpk1F2kRERMjdunWTZVmWc3Jy5Js3bz70s7l48aJsYWEhy7IsX7t2TX7jjTfko0ePKvs3bdokX7t2TTaeGiQ3LfSnse+PcjXDN+SmU3aUee6mTZvKKSkppe67ffu28vrzzz+Xx48f/9C+Pg1AhFzOmAeoxb9lrQcCO8p7rsr+88I9txeEF9Wvv/4K5H/z/+abbxg9ejQAjRo1KjGiIDzfCq+CliQJo/7TuRn8PQNd2qBq0YTatWvzxRdfYGtri7u7O2vWrKFXr16PPbdsyZIljBkzBpVKRU5ODk5OTqxYsYLFixcTEhKClpYWFhYW9O7duyJuE4C0tDQ2bNig/Dw/Ci8vL1xdXfHw8Hjs63l7ezNp0iTMzc0xNjYmIiJCqTTzOFQqFRqNhoCAAPr06VNkX2hoqFK5p2vXrty4cYNbt24xYMAA5syZw4gRI9i4cWOR2tUbN26kQYMGHD16tMgj+3/++afU6yclJVG7dm20tbVJSEggISGBd999l1u3bpGYmEiNGjXQ09MjNjYWyJ9zqaenh4mJCfPnzyc6Ohp9fX0sLCz48ccfOXz4MMHBwUVyWjo7O6Ojo4OhoSF+fn44OzszefLkEvW8C0oyAkUWQS1btozhw4crC2MA5e9M5/zvJF+Ip273UVzftQg5N5uctGvcO7qGjIyujBs3joiICCRJYubMmbi7u1PYunXrWLJkCffu3aNdu3Z07tyZL774gpycHJo2bVpihXsVZQcslfInh6YBJWtXPidEMCkIz5BGo6FXr17Y2dkpNW3XrFnDsWPHmDx5Mjk5Odjb27N8+fIijykB5Refn58f58+fR61W0717d8aMGYOrqytxcXHk5uYydepU9uzZg5aWFj4+PowbNw4/Pz927tyJjo4OPXr0KFIXWah6iq+C1tGri9HbU5GAsGKpYmJiYpTXX3zxBZBfx7pz587K9sIroQvvMzQ0VBJUF1awQOdZSEtLK/LlqKL98MMP5Wq3/WQiC/YmcDUtk8zsXLafTKRv375MnjyZ4ODgEiUHS9O+fXvOnTtHSkoK27dvL5GLMS8vDwMDgyK1xsvSo0cPvL292bJlC6mpqTRp0oTjx49jampKXFwcLVq0KBKsQn5y8tGjRxMREUHv3r3p1q0bWVlZrFy5kkmTJtG9e3c+//xz5RHxnTt3+Omnn6hduzY//fQT8+fP59SpUxgZGREYGMi0adNYuXIlI0aMYOnSpTg5OTFlyhTlenFxcQwfPrzU/ve2bMhazb+P8OV7mbT831bmeaj55JNP0NfXVwLhglySBU6fPk1gYCBHjhyhWrVqjB49muzs7Ef63KoSWZYPA8937c/7xJxJQXjGEhISGD16NKdPn6ZOnTosXLgQLy8vAgMDiY2NJScnR6kuUZp58+bRvHlzoqKiWLBgQZF93333HRqNhqioKGJiYhgyZAg3btxg27ZtnDp1ipiYmMdOJvyye9jK3dWrVyNJEvv371eO2b59O5IkFRkxvn79OtWqVWPFihUPveazWAVdVRT+cjRlyhQl4bSVlZUS6MqyzNixYzEzM6Nbt278/fffSvvS5gCeP3++yAr4P//8U3nfuXPnUlcqr1u3Tkm6/f777yvVVkaNGkULc2sG9OjAqV9+IH8uAXh2seVo7Dlq1arFmTNn+OCDDzh9+jRt27ZFo9Hw6aefAhAcHMw///yDi4sL1tbWNGvWjEmTJtGqVStmzpyJmZkZa9as4c6dO9SqVQsTExM2bdqk3Hd0dHSpn9vevXupVq0a3333HaampiQkJODn54eJiQnGxsbEx8czdOhQdHR0uH07vzRjwVxZQ0ND8vLylKcdS5YsYefOnUybNo3Lly8rC4u0tbV56623cHd3Z8SIEdy4caNEPe+0tDTS0tJwcnIC4N13332kv3fbpq/SvlldGt//mW5s04V5HmrcbBqzf/9+xowZoxxbPBF9cHAwkZGR2Nvbo1arCQ4O5sKFC490XaFiiGBSEJ6x4rnUgoODMTExURI3Dx8+vMxkwg+zf/9+3n//fWX1bd26ddHX16dmzZq89957bN26lVq1aj2dG3lJPMrKXSsrqyIrSAMCArC2LjrgsGnTJhwcHAgICHjoNZ/FKuiqovCXIwcHB6KiooiOjmb//v1MmTKFpKQktm3bRkJCAvHx8axZs6ZItZaxY8fSsWNHvL29yczMJCgoiNGjR3Pt2jVlpMrV1bUgwTRHjhyhb9++qNVqkpOT+euvv6hRowZjxowp8kh5/fr1AHz66afUH7aIBiO+JutyLPf+zq/WI8twJqMmly9fZuDAgfe3yYSFhbFixQp27tyJSqXCx8eHd955h/DwcMLDw7ly5Qrr1q2jRYsWyj3169dPWWG/fv16fvzxR6ytrbGwsCiSHPzz+f7UbtSCanUb8Z8Zi5j+zUbc3NyYO3cun3/+OWvXruXEiRNFKtq8/vrr+Pr68u233ypPKywtLbl48aLyqH7//v306dMHf39/bGxslFKX1atXx8XFBbVajZ+fHy1btiyznndpLCwsiIyMLHN/MyM9jvh1xcPudT4fYP/IC7NkWWb48OFKXxISEpg1a9YjtRUqhggmBaGCPSyXmoGBQYVeX0dHh7CwMDw8PAgKCqJXr14Ver0X0YNW7gJ06tSJsLAwsrOzycjI4Ny5c6jV6iLHBAQE8OWXX5KYmMiVK1ceeL0XeRX0g4SGhjJo0CC0tbWpX78+zs7OhIeHc+jQIWV7o0aN6Nq1q9ImJCSEoKAgZs6cyYEDB4iLi+P69evUrl2bVatWkZuby6VLl9ixYwdRUVHUq1ePCRMmKDkGIf9LV+3atZW8hgcPHlRGun7++WciFv+HpNUTyL7+F9nX/+KNSfkjzrnG/84FNDAwYO3atUD+6Ke+vj4xMTHY2tqyb98+1Go17dq1IzMzk71796Ktra3c05dffqnMSzUxMWHPnj1ER0cTHx+vrFZWv+3Da++vxmjYVzT+z/fU7PAuiw9cUFb5p6en069fP+rWrcvOnTvx8fEhICCAhg0bkpCQwIIFC4iNjWXu3Lls2rSJe/fu4e/vT79+/Xj11VdZt24dlpaWHD9+nMWLF9OmTRu0tLQ4ceKEEjxmZ2eXqOdtYGCAgYEBoaGhwL9BOOQH+j/99JOSsgdg69atSpLwsnTv3p1ly5Yp74s/5nZxcWHz5s3KCHVqamplp/h56YlgUhAq0KPkUmvTpg0ajYZz584B/+ZOK8srr7yiPLYqrnv37nz77bfK46zU1FQyMjJIT0+nT58+LFq0qMzHZkLZBg4cyMaNG8nKyiImJoZ27doV2S9JEt26dWPv3r3s2LFDKc9W4PLlyyQlJdG2bVveeeedUucpFudm05gjfl25OO9Njvh1feECyYIvWR2/OMCF63eUoOhxZGVlMXr0aDZs2ECdOnXw8fHhr7/+wtLSkjfeeINdu3axbds28vLy6NKlywPPVdpI18WLF/H390f9/pc0GrkU3eb2yLnZSpuGhkW/CBbMc9bW1lb+DcqyzNdff62c++LFi+Wq1FO8NGVedhaJ2/0Z3KsDKpWK+Ph45s2bx3fffcebb76Jra0tr732mnK8u7s7qampWFhYsHTpUuVJSK9evcjJyaF169b4+fnh4OBQ6vUfVM971apVjBkzBrVaraQagvyyixs3bmTy5MmYmZnRunVr9u7d+9BqStOnT+fmzZtYWlpibW1NSEjRhO7m5ubMnTuXHj16oFKp6N69O0lJSY/3gQpPlViAIwgVqLTaxAW51H76Yirm5uYsWbIEBwcHPD09lQU4bm5uDBw4kD/++IOhQ4fSrFkzcnJyeP3112nVqhW3bt1CV1eXUaNG0ahRI86cOYNarebevXtkZ2djZWVF9erV8fHxwd3dnbfffpusrCxkWWbhwoWV9Gk8vx60crfAwIEDWbJkCenp6Xz55Zd89tlnyr7AwEDeeecd5biRI0fy3//+95n0vSoq+JKVmZ2LVF2Xe5l3+HBrLH0bmxMYGMjw4cNJTU3l0KFDLFiwgJycHL799luGDx/O33//TUhICIMHD1Yql1haWqKlpUVAQACmpqa4urqSmJhIrVq18PX1xdTUlOrVq5fZn9TUVBYuXMjOnTvp3Lkzn3zyCbdv3+bWrVvUrl2bqX1tmboulMwLkdR8I7/cniTB2C4tHnqvPXv2ZPny5XTt2pVq1apx9uxZGjdujJOTU6n3VJbii7JqNGhBg3f9kYCYQouyevXqpeSmPHjwoLLYTldXt8zH0rt37y51e0GN7wJl1fO2s7Mr8iV1/vz5yuv27dsrScYLe1BtcT09PX766acSbQqXNBwwYECJBUZC5RHBpCBUoOK/AAAkLS30ek7kdKFfAC4uLpw8eRLIH8no0KEDw4cPV+bhRUdHc+vWLUaNGlVixeLq1avx9fVl6dKlQH5d2u7duzNixAjlmLCwsKd+by+yB63cPXjwIDdu3CjRpm3btsTGxlKrVi1l1KdAQEAA165dUx4BXr16lT///BNTU9Nncj9VTeEvWdq6dajR2JzzK95nXWsHBrdTYW1tjSRJzJ8/nwYNGtCvXz8OHDiAubk5b7zxhpJqxsDAQJkDePfuXUxMTLh8+TLt27dXphPk5OTw5ptvPqg7mJqa8vHHH/P5559z+PBh5TGrg4MDNjY2+A1yQdfgNQxM8pNxNzbQ5W6t6vRRNXzovXp7e6PRaLC1tUWWZYyMjNi+fXuZ91SWiqzfLQhPSgSTglCByvMLICQkhGrVquHr66tss7a2LvKtvCw5OTncuXOnxOpH4dEVHjWD/IUWH26N5b+OvZk50wArK6siqXYKmzdvHjVr1iyy7ezZs2RkZJCY+O9j3JkzZxIQEKDMh3vZFP+SZdQ3P52MBCyY92aJLAWSJClflgrbfjKREL2u5Hm255XT+9CrdZtbt6KwtLSkSZMmTJgwgfr16xcpy9erVy+aN28O5I90Ffy7Kmukq8x8hX6Xi7wt/DNhaGionFdLS4vPPvusyEh1gdLuqSxTepoV+bmEhy/KKp4iShAqipgzKQgVqPiqXB39+jT3/faBvwDi4uKws7MrdV9BChW1Wl0kdUZgYCBqtZrGjRuTmprKW2+99fRu4iVT2tSEzOxcVkXdZvz48Q9s27t37xJz8wICAujXr1+Rbe7u7o+0qvtF9TRSHxWfj5xVtwX79uwmt1p+Iu/33nuPK1eucPPmTTp06PCUel55XtZFWcLzQYxMCkIFKvgffcEj00YGukzpaVbuXwAFKVSKGzBgAEuXLkWWZcaMGcOCBQvw8/N7or6/rIqPmhWs3C28vfCIT+G5X4UVjGiVVqVFpVJx+vTpp9Ph51B5RtmKKx70VzNqSs7ddFJrNwVg27ZteHl5cezYsXJVuKmK3Gwai+BRqJJEMCkIFexRfgEUnqNX6/o9OHGsXNeSJIm33nqLr7/+WgST5STmplW8p/Elq3jQL2lp88bETUiFtpX2iLr4NmNjY+Li4h75uoIglCSCSUGoZMXn6GXUa8XfyWmMnv4F38ydCuSXzEtPT3+k84WGhipzwoTH9zRGzYSHe9JRNhH0C0LVIeZMCkIlK/64TpIk6rl9xM87d9O8eXMsLCz48MMPadCgQZnnKJgzqVKpOHnyJB9//PGz6PoLScxNez68TFWCBKGqkwonGK1sbdq0kUurmSoILzITv12U9q9QAi7Oe3BKE0F4mRWeHvKk85EFoTwkSYqUZblNZfejsomRSUGoZE9jZasgvIwqskrQ1atXS108BfkLsMTAhyD8SwSTglDJxOM6Qah6GjVqxObNmyu7G4LwXBDBpCBUMjFHTxAql5+fH8uWLVPez5o1C39/fywt8yveZGZmMnDgQFq3bk2/fv3IzPx34c++ffto3749tra2eHp6KiUIg4ODsbGxwcrKipEjR/LPP/8o1zI3N0elUjF58uRneJeCUHHEnElBEAThpXby5Ek++OADfv/9dwDMzc359ttvGTVqFHFxcSxcuJC4uDhWrlxJTEwMtra2HD9+HGNjY/r378/u3bupXbs2X3zxBf/88w//+9//MDU1JTg4mJYtWzJs2DBsbW1599136dChA2fOnEGSJNLS0jAwMKjkuxeehJgzmU+MTAqCIAgvNRsbG/7++2+uXr1KdHQ0r776Kk2aNFH2Hzp0iKFDhwL5CedVKhUAx48fJz4+HkdHR9RqNT/99BOXLl0iISEBExMTpUb78OHDOXToEPr6+tSsWZP33nuPrVu3UqtWrWd/s89Inz59SEtLq+xuCM+IyDMpCIIgvJQKrwbPadSGGYt/4DWdrFJrdJdGlmW6d+9eojRmdHR0qcfr6OgQFhZGcHAwmzdvZunSpRw4cOCJ76Mq+vXXXyu7C8IzJEYmBUGoEBqNRplzVtiMGTPYv3//A9t6eXk9tcUPenp6T+U8xYkVvc+34rW9c43bsyFgI6vXb8TT07PIsU5OTmzYsAGAuLg4YmJiAHBwcODIkSOcO3cOgDt37nD27FnMzMzQaDTK9rVr1+Ls7ExGRgbp6en06dOHRYsWlRl0PmuffPIJZmZmdOzYkUGDBuHv78/333+Pvb091tbWuLu7c/fuXSD/3+aoUaNwcHCgWbNmHDx4kJEjR9K6desiZUWNjY25fv06Go2G1q1b4+Pjg4WFBT169FDmnIaHh6NSqVCr1UyZMqXU/18IzwcRTAqC8EzNmTOHbt26VXY3XnilBdErVqxgzZo1AJw5cwa1Wo2NjQ3nz58v8zzGxsZYWVmhUqno0aMH165de+Q+9OvXD7VaTYsWLdDX10etVqNWqzl69Cje3t7Ex8c//o09JcWLBVQ3akpO1l3u6NShYcOGRY4dNWoUGRkZtG7dmhkzZmBnZweAkZERq1evZtCgQahUKtq3b8+ZM2eoWbMmq1atwtPTEysrK7S0tPD19eX27du4urqiUqno2LEjCxcufKb3XJrw8HC2bNlCdHQ0u3fvVr4g9e/fn/DwcKKjo2ndujU//vij0ubmzZscO3aMRYsW0bdvXyZOnMipU6eIjY0lKiqqxDX+/PNPxowZw6lTpzAwMGDLli0AjBgxgm+//ZaoqCi0tbVLtBOeH+IxtyAIFSY3NxcfHx+OHj1K48aN2bFjB6NGjcLV1RUPDw+MjY1555132L17N7q6umzYsIEWLVoA+fPUFi5cyLVr15g/fz4eHh7Issz//vc/du/ejSRJTJ8+nQEDBnDw4EFmzJjBK6+8wrlz5+jSpQvffPMNWlr535enTZtGUFAQurq67Nixg/r165OSkoKvry9//fUXAIsXL8bR0ZGwsDAmTJhAVlYWurq6rFq1CjMzMzIzMxkxYgTR0dG0atVKGV3Jzc3lvffeIyIiAkmSGDlyJBMnTqycD/whfH19ldfbt2/Hw8OD6dOnP7RdSEgIhoaGfPTRR3z22WcsWbKkyP7c3NxSg4Ft27YBcPDgQfz9/QkKClL2dejQoby38VQUr+0N0Oi9ZUpt78I1u3V1ddm4cWOp5+natSvh4eEltru4uHDy5Mki2xo2bEhYWNiTdfwpO3LkCG+//TY1a9akZs2avPXWW0D+COz06dNJS0sjIyODnj17Km3eeustJEnCysqK+vXrY2VlBYCFhQUajQa1Wl3kGiYmJso2Ozs7NBoNaWlp3L59m/bt2wMwePDgIj8fwvNFjEwKglBhyhqRKExfX5/Y2FjGjh3LBx98oGxPSkoiNDSUoKAg/Pz8ANi6dStRUVFER0ezf/9+pkyZQlJSEgBhYWF8/fXXxMfHc/78ebZu3QrkP3p0cHAgOjoaJycnvv/+ewAmTJjAxIkTlZEZb29vAFq1asXhw4c5efIkc+bM4aOPPgJg+fLl1KpVi9OnTzN79mwiIyMBiIqKIjExkbi4OGJjYxkxYkQFfZpPriDlza+//srixYtZvnw5Xbp0AWDdunW0bdsWtVrN+++/T25ubon2Tk5OyqNbPT09/vvf/2Jtbc2xY8fKTIVTlsLTBPT09Jg4cSIWFha4uLiQkpICwJIlS5Q0OgMHDnyaH8VLXSxg+8lEHOcdwMRvF1/t/5MzSbdKHOPl5cXSpUuJjY1l5syZZGVlKftq1KgBgJaWY8P4awAAIABJREFUlvK64H1OTk6JcxU+Rltbu9RjhOebCCYFQagwpY1IFDdo0CDlv8eOHVO2u7m5oaWlhbm5OcnJyQCEhoYyaNAgtLW1qV+/Ps7OzsqoUNu2bWnWrBna2toMGjSI0NBQAKpXr46rq2uJPuzfv5+xY8eiVqvp27cvt27dUua0eXp6YmlpqTy+g7JX9DZr1owLFy4wbtw49uzZQ506dZ7mR1gh+vTpg6+vLxMnTiQkJITTp08TGBjIkSNHlEeO69evL9EuKChIGYW6c+cO7dq1Izo6mjZt2uDl5UVgYCCxsbHk5OSwfPnyR+7PnTt3aNOmDadOncLZ2ZnZs2cDMG/ePE6ePElMTAwrVqx4Ojd/38taLKD4XNGsei3Y8csv/Hz8PBkZGcro4O3bt2nYsCHZ2dml/iw8KQMDA1555RX++OMPgDJHfoXng3jMLQjCU1N4dWxdOZ1/5H9/WWtraxdJ9lxAkqRSXxcezXiUfLiF2xZ+/3/27jyu5nx/4PjrqFRE1rHEKHNVtJ1KKSllCZMxJQ0mSwwzGKPLnUxms84dMxozw1hmsVyDJvsYxr6l8KM4KZTIUZMtW0Sl5fP7o9v3FmUslfB5Ph491Pd8l8/5dnTe57O833p6esr3JXtFCgsLOXToEAYGBqWOGzt2LF5eXqxfvx6tVounp+dDr1u/fn3i4uLYtm0bCxcuZNWqVSxevPhv21ud7Nq1i9jYWJycnICiJN2vvPKK8riXlxc6OjrY2toyY8YMoOhe+vv7A5SZCmfevHmlepofpkaNGsoK6kGDBtG3b1+gKGgPDAzE19cXX1/finmy/1VcFOBlq+19/1xR/WbmGLzmzNDenWnf1gwbGxuMjY2ZPn06HTp0oHHjxnTo0IHbt29XeFsWLVrEyJEjqVGjBp07d8bY2LjCryFVDRlMSpJUIYp7PIrfqC7fyiHjVg4bjqU/9A06IiKC0NBQIiIilPlT5XF3d+fHH39k6NChXL9+ncjISGbNmkViYiKHDx/m3LlztGrVioiICN59992Hnsvb25u5c+cSEhICFA1Xq9VqMjMzMTEpau/SpUuV/YtX9Hbp0qXUit6rV69Ss2ZN/P39sbCwUHovn4WSwXx2XsHf3vtiQgiGDh3Kl19+WebjxXMmSzIwMKi0RRPFwf/mzZuJjIzkjz/+4IsvviA+Ph5d3Yp72/K1N6lWwaOOjg42Njbk5eWhq6vLkCFDGD9+vDL3tywajYYLFy7w+uuvl9ru6+vLpUuXOHTokLJt9uzZHAn7DmrooFOrLg17/RNd41eo69wXnU6BbPvcCw8PDxwdHXFwcGD06NEPXK/k/4mS80rvf6x4BKBRo0al9ilZ9cfKykr5fzRz5kzat3/pc38/t+QwtyRJFeL+Hg8oClJmbUt66HE3btzA1taW77//nm+//fah+/r5+WFra4udnR1dunTh66+/pmnTpgA4OTkxduxY2rZti5mZGX5+fg8915w5c4iJicHW1pZ27dopw6gTJ05k0qRJ2Nvbl5rbVd6K3vT0dDw9PVGr1QwaNKjcgKyy3T98KQRMWhfPhmPpf3ts165dWbNmDVeuXAHg+vXrnD9//pGvXV4qnEdVWFiopIJauXIlnTp1orCwkLS0NLy8vPjqq6/IzMxUShW+qAwNDdFoNJw4cYIdO3awZcsWZci/PBqN5oGcjjdv3iQ2NpbMzExSUlKU7fb29jiMW0jz4T9Qy6ITN/YuAeDa1h+4siwYBwcH/P39cXBwqPgnV4bNmzejVquxtrZm//79j7QYTKqeZDlFSZIqhFnoZsr6a6ICzs30KfMYU1NTYmJiHuj1elxlrRZ+2bjN3E16iRXK5796Ax2jBujUUNG4jj4TJkzg1q1bGBkZ8eGHHzJlyhTleyjqIf7yyy8pLCxET0+PefPm4eLiUu7vyMjIqFRwt2vXLj788EPy8/NxcnJiwYIFylSFsn4/np6ehIWF0b59e4yMjHj33XfZvn07r7zyChEREdSrVw8vLy8yMzMRQjBo0CBlIdaL6v57mpKSgpOTE1evXiU3N5fRo0cTExODrq4us2fPxs3NjX/84x9kZ2djYmLCpEmT6N+/P4sXLyYmJoYmTZqgp6enLCKD/33oyPzrNNd3LKTpoFkY6unwZV+batVL+7yQ5RSLyGBSkqQKcX8wU8ykniHRoV3KPEYGkxXnSYL56uL+IOplVdZ9qFevHklJSSxfvpwTJ06wePFiEhMT8fb25vTp0/z222/ExMTwww8/KMd0796dzz//nCZNmuDv7098fHypc244ls7oMe+To1eHdq8PeynmilYWGUwWkcPckvSEyqvw8riKK0U8755kdaxWq33qQBKKerle5kASXu5UNy+DqKgoZT6upaUlrVq14vTp0w/sd/nyZZKTk+nUqRPm5ubo6emVmrMIkHViD63EJS7t+IXo0C4ykJSemgwmJUmqEL72JnzZ1waTeoaoKOqRlENnVed5TnXzMvdKlsz5WLxoqlhKSgo6OjqlVtb/nVWrVnHjxg3MzMwwNTVFq9WWqh2+c+dOvvjiCzZu3FgqY4IkPQ0ZTErSU8jPzycwMJC2bdvSr18/7t69W27y5r9L6pydnU2vXr34+eefuXPnDj4+PtjZ2WFtbU1ERMSzeHqPzdfehOjQLpyb6SN7PKqYDOafPw9bNFVcoWns2LGoVCrc3d2VfI+nT58mNTUVCwsL6tSpUyptT3h4OFu3bkWr1aLVaomNjVVyOB47doz33nuPjRs3PlaAKkl/p9KCSZVKNUulUiWqVKrjKpVqvUqlqldZ15KkZyUpKYkxY8Zw6tQp6taty+zZs8tM3pyTk/PQpM5ZWVm88cYbDBw4kJEjR7J161aaN29OXFwcCQkJ9OzZ8xk+S+l5IYP558v9GRBE/j3O/jSGt3u60a1bN7y9vZk8eTIAY8aMobCwEBsbG/r378/SpUvR19fHy8uLkydPolar+eqrrzh//jwuLi7KOc3MzDA2Nub//u//CAkJISsri4CAACVZvyRVhEpbgKNSqbyB3UKIfJVK9RWAEOKjhx0jF+BIzxOtVouHh4dS23n37t1Mnz6dgoICIiMjgaLeyHnz5jF58mQ++OCDB7avW7cOU1NTjI2NmThxIoGBgUBRz4O3tzf9+/end+/euLu7P5snKUlSpXmeF01JReQCnCKV1jMphNguhChO0nYIaFFZ15KkqlJyfpP/ggPk5BWWerxevSfrgHdzc2Pr1q1KpRdzc3OOHj2KjY0Nn376KdOmTWPDhg2oVCoSExMfei4jI6MHtmm1WgwNDVGr1bRr144hQ4aQl5f3WG0MCgpScgGWtHfvXqVc4caNG5k5cyYAGzZs4OTJk491DenFpVKpSiV0z8/Pp3Hjxsprp7JU1EK5yiAXTUkviqqaMzkc2FJF15KkSnH//KbLt3LIuJTOzKUbgaJky+3bty8zefPfJXWeNm0a9evX5/333wfgwoUL1KpVi0GDBhESEsLRo0cJDw+nU6dOpSbTP47XXnsNjUZDfHw8f/31F6tWrXqKu1G2Pn36KLkAZTAplVS7dm0SEhKUkpo7duxQKg09qpJJ5F8Ez/OiKUkq6amCSZVKtVOlUiWU8fVmiX0+AfKBMivFq1Sqd1UqVYxKpYrJyMh4muZIUqUqq8KLboMWfPP9HNq2bcuNGzcYP348S5YsISAgABsbG2rUqMGoUaMwMDAoc3tJ33//PdnZ2UycOJH4+HicnZ1Rq9VMnTqVCRMmEBUVxaJFi5TJ9BcvXsTDw6NUBYmSrl69iqurK5s3by61XUdHB2dnZ9LTi1aNFhQUEBISgpOTE7a2tvz4449AUfWasWPHYmFhQbdu3ZTqKABbt27F0tISBwcH1q1bp2xfunQpY8eO5cCBA2zcuJGQkBDUajVnz559yrsvvQhef/115fUYHh7OwIEDlccOHz6Mq6sr9vb2dOzYkaSkospJS5cupU+fPnTp0oWuXbsyYMCAUq/p4h7z8l7H1ZlcNCW9MIQQlfYFBAEHgVqPsr+jo6OQpOrK9KNNolUZX6Yfbar0ay9fvlwMHz5cCCGEq6uriImJEWFhYWLGjBlCCCHy8/PFrVu3hBBC1K5dW1y6dEk4OzuL7du3CyGEOHfunLCyshJCCJGdnS08PT1FXFycEEKIH3/8UUyfPl0IIUROTo5wdHQUKSkpYu3ataJbt24iPz9fpKenC2NjY7F69WqRnZ0tWrRoIU6fPi0KCwtFQECA8PHxEUIIsWTJEvH+++8LIYQYOnSoWL16daXfG+n5ULt2bREXFyf8/f1Fdna2sLOzE3v27FFeO5mZmSIvL08IIcSOHTtE3759hRBFrykTExNx7do1IYQQ69atE0OGDBFCCJGbmytatGgh7t69W+7ruORrX5IqGhAjKjGOel6+dCsrSFWpVD2BiUBnIcTdyrqOJFWV5vUMy6zwUhXzm8LDwwkODgZgwIABhIeH06dPH4YPH05eXh6+vr6o1WoA8vLy6Nq1K/PmzSs1lH727FnUajXnzp3Dx8cHW1tbALZv387x48eV+ZCZmZkkJycTGRnJwIED0dHRoXnz5nTpUlTFJjExETMzM9q0aQPAoEGD+Omnnyr9HkjPP1tbWyXv4euvv17qsczMTIYOHUpycjIqlarUnN7u3bvToEEDAHr16kVwcDC5ubls3boVDw8PDA0Ny30dm5ubV90TlKSXVKUFk8APgD6wQ6VSARwSQox6+CGSVH2F9LBg0rr4UkPdlTm/acOxdGZtSyLt4hXSt+/kcKyGWvq6FBQUoFKpmDVrFpGRkWzevJmgoCAmTJjAkCFD0NXVxdHRkW3btpUKJovnTF69ehU3Nzc2btxInz59EEIwd+5cevToUer6f/75Z6U8L+nlUfwavnAzW0nI3adPHz788EP27t3LtWvXlH0/++wzvLy8WL9+PVqtFk9PT+Wx2rVrK98bGBjg6enJtm3biIiIYMCAAQDlvo61Wm2lPkdJkip3Nfc/hBAthRDq/37JQFJ6rlXl/KaSi33uJEVTq50XDUf8wnfro0lLS8PMzIzIyEiaNGnCyJEjGTFiBEePHgWKVs0W1+/96quvHjh3o0aNmDlzJl9++SUAPXr0YMGCBUpP0OnTp7lz5w4eHh5ERERQUFDAxYsX2bNnD1BUyk2r1SrzIMtbEHR/MmXp5VJeQu7mzr2YPHkyNjY2pfbPzMxUFuQsXbr0oefu378/S5YsYf/+/UoO1vJex5IkVb7K7JmUpBeOr71JlUyOL7nY586pfRh36Ed2XgGztiXha2+Cv78/QUFB1K5dGz09PYyMjFi2bJlyvI6OjjIUXqdOnQeGFH19fZkyZQr79+9nxIgRaLVaHBwcEELQuHFjNmzYgJ+fH7t376Zdu3a8+uqruLq6AkU9Qz/99BM+Pj7UqlULd3f3MoPGAQMGMHLkSObMmcOaNWt47bXXKvGOSdVNWQvWsvMKWKK5TXTouAf2nzhxIkOHDmXGjBn4+Dw8x6K3tzeDBw/mzTffpGbNmgDlvo4lSap8lZa0/EnIpOWSVKQykxnPmTOHBQsW4ODgoJRne1T//ve/+fjjj5/q+vfbv38/o0aNQk9Pj4MHD2Jo+OhzULVaLQcOHODtt98Ginq0YmJi+OGHHx75HKampsTExNCoUSOMjIxe6jrRFamqE3JrtVp69+5NQkLCE59j7969hIWFsWnTpgpsmfQik0nLi8ja3JJUDVVmMuP58+ezY8eOxw4koSiYrGgrVqxg0qRJaDSaxwokoSiAWLlyZYW3SXp6MiG3JL08ZDApSdVQZSUzHjVqFCkpKfTq1Yuvvvqq3Lx+ffv2pWfPnrRp04aJEycCEBoaSnZ2Nmq1Win76Ovri6OjI1ZWVsqK7oKCAoKCgrC2tsbGxoZvv/2Ws2fP4uDgoLQjOTkZBwcHfvnlF1atWsVnn31GYGAgQghCQkKUYyMiIgDK3R4aGsr+/ftRq9V8++23AKSlpeHp6UmbNm2YOnWqcs2y2ipVnmeRkLugoICRI0diZWWFt7c32dnZaDQaXFxcsLW1xc/Pjxs3bgBw5swZunXrhp2dHQ4ODg/kQj1y5Aj29vacPXuWO3fuMHz4cJydnbG3t+f3338HwMPDA41GoxzTqVMn4uLiKu35SVK19axzE5X8knkmJel/1h/9S3T8cpcw/WiT6PjlLrH+6F8Vct5WrVqJjIyMh+b1MzMzEzdv3hTZ2dni1VdfFampqUKIolyBJRXn/rt7966wsrISV69eFTExMaJbt27KPjdu3BBCCOHp6SmOHTsmhBBi0qRJYs6cOUKI0vko16xZo+S2vHTpkmjZsqW4cOFCudtL5iksbnvTpk3F1atXlTYdOXKk3LaWvB9lPT/p6VTWa7gs586dEzo6OsprLCAgQPz666/CxsZG7N27VwghxGeffSaCg4OFEEI4OzuLdevWCSGKcq/euXNHeT1FR0cLBwcHcf78eSFE0ev1119/FUIUvZ7btGkjsrKyxNKlS5XzJSUlCfke9vJB5pms3DyTkiQ9ncpe7POwvH5du3bF2NgYgHbt2nH+/Hlatmz5wDnmzJnD+vXrgaIeweTkZCwsLEhJSeGDDz7Ax8cHb29voGiBxJIlS5g9ezYREREcPnz4gfNFRUUpuS2bNGlC586dOXLkSLnb69at+8A5unfvTsOGDQHo27cvUVFRtG/fvsy2Fu8nVY6qWrBWzMzMTMm36ujoyNmzZ7l586aSImvo0KEEBARw+/Zt0tPT8fPzA4oWlRU7deoU7777Ltu3b6d58+ZAUS7WjRs3EhYWBkBOTg6pqakEBAQwffp0Zs2axeLFiwkKCqqy5ypJ1Ykc5pakl8CGY+m4zdyNWehmLmXm8Ofxi0pev4SEBP744w9ycnKU/fX19ZXvdXR0yqyJvHfvXnbu3MnBgweJi4vD3t6enJwc6tevT1xcHJ6enixcuJARI0YA4O/vz5YtW9i0aROOjo6VFsj9N69tqZ/La6tUfc2ZU1SmNDAwkNzcXLp164ZarSYiIgJTU1OuXr1a6nXtv+AAueJ/w+o6OjrcvHnzodfQaDQP5FNt1qwZBgYGHDt2TNkmhGDt2rVoNBo0Gg2pqam0bduWWrVq0b17d37//XdWrVqlTP+QpJeNDCYl6QV3f76//ELB9M0nSUy99Mh5/Yrp6ekpPZiZmZnUr1+fWrVqkZiYyKFDh4CimuCFhYX4+/szY8YMJf+lgYEBPXr0YPTo0QwbNqzM87u7uyu5LTMyMoiMjMTZ2bnc7WXlstyxYwfXr18nOzubDRs24ObmVm5bpeqr5EKx4sBOo9HQv39/AP48frHU6/ryrRwu38phw7F05RzGxsbUr19fqVv/66+/0rlzZ+rUqUOLFi1YvHgxf/75J7m5udy9W1SorV69emzevJlJkyaxd+9eoCiH5dy5cxH/zX5SMtAcMWIE48aNw8nJifr161f2bZGkakkOc0vSC66sfH85eQXctfBh0qRJj5TXr9i7776Lra0tDg4OLF68mIULF9K2bVssLCxwcXEBID09nWHDhlFYWAigJEcHCAwMZP369crQ9/38/Pw4ePAgdnZ2qFQqvv76a5o2bVru9oYNG6Kjo4OdnR1BQUHUr18fZ2dn/P39+euvvxg0aBDt27fHxsamzLZK1cPs2bNZvHgxUBScJSYmKgvFBg0axM8//0xGRgZqtZq1a9cCMGnqDDJO/h8qvZo0eiMEVQ0dCgvyeWfwQL4wzObSpUt0796d//znPwwaNIiUlBRq1qyJubk5SUlJLFq0CEdHR/Lz81m8eDEzZ85USow2adKETZs20atXLxYvXsxnn33GP//5T2xtbSksLMTMzExJH+To6EjdunXL/YBUlZ42PdKsWbPIyckhLi6Oli1bEhcXx+7du9m9ezeLFi2id+/e/Pvf/0YIgY+PD1999RWrV6/m4MGDzJ49m++//57vv/+elJQUUlJSGDx4MNHR0RX8LKXqSOaZlKQXXFXn+3uYsLAwMjMzmT59epVeV6q+YmNjCQoK4tChQwgh6NChA8uXL8fPz0/J/3l//kdTU1MyX/XAuGN/shJ2cTcxilf6TSZj4yzqOrzOxeUTSU1NpUePHpw6dYpbt25Rq1YtdHV12blzJwsWLGDt2rVPlJf0fhcuXMDT05PExERq1Hi2g31PG0weOnSIb775htWrV+Pu7k5ubi7R0dFKSrBFixYRGxtL/fr18fb2Zty4cbi4uPDGG29w5MgR+vXrx/nz59mwYQM7d+4kMTGx1IfJF5HMM1lEDnNLlWLDhg2oVCoSExMfup+RkdED2/z8/EpVrrCwsGDGjBnKz/7+/qxbt67iGvuCqy75/vz8/Fi2bBnBwcFVel2peouKisLPz4/atWtjZGRE3759lWHph2ntUtS7XbttZ3LTi/7O5JzXcGv3T6jVavr06cOtW7fIysoiMzOTgIAArK2tGT9+PCdOnKiQti9btowOHTrwxRdfPPNAslh+fj6BgYG0bduWfv36cffuXWWOKUBMTIxS93zfvn2o1WrUajX29vaYm5vzf//3f7Rt2xZ9fX2MjY3p1q0bc+bM4YcffqBOnTo0btwYXV1d1Go1o0eP5vXXXycpKYmLFy+SlpZG3bp1cXZ2Zvz48Uq6sdWrV2NtbY2dnR0eHh7P6tZIlah6vPqlF054eDidOnUqt27zw7i5uXHgwAEArl27Ru3atTl48KDy+MGDB+nYsWOFtfVF9yzy/ZVl/fr1HD9+nEaNGlXpdaXqqXjxzLQ/TrAk+lypuY6P4l2P1/73uv7voiuVECxZt01ZKJOeno6RkdFDF5s9jSFDhpCWlkZAQECFnK8iJCUlMWbMGE6dOkXdunWZP39+ufuGhYUxb948pizZjL7vDBxmRnFTVZdLGdfp2LEj5ubmxMfHU7t2bebPn49WqyUtLY2rV6+yadMm+vXrx9GjR7GwsOCdd97B1NSUM2fO0KdPH5o1a6bkfp02bRrbtm0jLi6OjRs3VtWtkKqQDCalCpeVlUVUVBSLFi3it99+A+DixYt4eHigVquxtrZ+oOfh6tWruLq6snnzZjp27KgEkwcOHOCNN94gIyMDIQTnzp3D0NCQpk2bkpOTw7Bhw7CxscHe3p49e/YARYtJfH196d69O6ampvzwww/Mnj0be3t7XFxcuH79OgBnz56lZ8+eODo64u7urvSiBgUFMW7cODp27Ejr1q1Zs2ZNVd26SuFrb8KXfW0wqWeICjCpZ8iXfW2qNGWLJJVUclFYzRZWXI6P4qOII/x2IJn169fj7u7+t+e4kbCPL/vaoJ96CP3mFpjUM6RjZy/O7//fqEVxQvHMzMwyF5uVtYDredeyZUvc3NwAGDRoEFFRUeXu6+bmxrBRY3kvdAZ/Xb4KNXSg8WvcvHENnebtMDc35969ezg6OtKpUycKCwuJi4sjOjoarVbLpk2bUKvVpKenExkZSZcuXTA2NiY8PJzs7GyaNWumXCcoKIiff/6ZgoKCctsjPb9kMClVuN9//52ePXtibm5Ow4YNiY2NZeXKlfTo0QONRkNcXJySCw7g8uXL+Pj4MG3aNHx8fHB0dCQhIYF79+5x4MABXF1dsbCw4NSpUxw4cEDplZw3bx4qlYr4+HjCw8MZOnSo0uOQkJDAunXrOHLkCJ988gm1atXi2LFjuLq6smzZMqBoMcncuXOJjY0lLCyMMWPGKG26ePEiUVFRbNq0idDQ0Cq8e5XD196E6NAunJvpQ3RoFxlISs9UyUVh+k3/gZF1V84tCmZ4X29GjBiBvb39357jxo0bfD60F8YpOzm+dSXRoV1Y9+svxMTEYGtrS7t27Vi4cCEAEydOZNKkSdjb25dKc+Xl5cXJkyeVlEPPo/vTI+XkFZZ6XKVSoaurqyyIK9krGxoaSq2u73MvN4dLK0LIu5ZGzWZtoLCALVfqYGxsjK6uLu7u7jRr1gxLS0vGjRvHuHHjeO211zh37hwajYb9+/dz584dvLy8OHLkCK1bt8bAwICePXsCsHDhQmbMmEFaWhqOjo5cu3at6m6QVCXkam6pwoWHhyvz4gYMGEB4eDh9+vRh+PDh5OXl4evrqwSTeXl5dO3alXnz5imJhfX19bGysuLo0aMcOnSIiRMnkpKSwoEDBzh27JjyqTsqKooPPvgAAEtLS1q1asXp06eBojeJOnXqUKdO0R/EN954AwAbGxuOHz9OVlYWBw4cKDU8lZubq3zv6+tLjRo1aNeuHZcvX67kOyZJL5cLN7NL/VzX2Y+6zn6ogH/+s2hRmFarVR739PRU5vmVfOyrr74qdZ5GjRqVGRS6uroqfxsAZQ52gwYNOHLkyFM8k2eruIe3ODC/fCuHjEvpzFy6kdCgPqxcuZJOnTpx+/ZtYmNj6dWrl7IaHopGZzINmmHs0o97F0+Td+0vDFq0Q6/Rq1wuypTEoEGDmDBhAgAmJiZ8++23WFlZ4ejoyJkzZ/jHP/5B06ZNSUpKonnz5mRmZnL06FEyMzNp3bq1cp0OHTrQoUMHtmzZQlpamiwY8IKRwaRUITYcS2fWtiTSLl4hfftODsdqqKWvS0FBASqVilmzZhEZGcnmzZsJCgpiwoQJDBkyBF1dXRwdHdm2bZsSTELRsEhkZCS3b9+mfv36uLi48MMPP3Ds2DHee++9v21PyaTbNWrUUH6uUaMG+fn5FBYWUq9evVJ1dcs7vjplPJCkF0Hzeoak3xdQFm+XHl1Zab90G7Tgm+/n8J+vPqJdu3aMHj0aZ2dn3nnnHT777LNSQfl3333H5dWbyC8EvUavYti6PQV3iqYBPex30bhxY5YuXcrAgQOVD+EzZsygTp06vPnmm+Tk5CCEYPbs2QCEhISQnJyMEIKuXbtiZ2dXwXdCetZkMCk9tZKfju8kRVOrnRcN3whW5uV17tyZyMhIPv/8c7755hsY8/RyAAAgAElEQVRGjBjB0aNHGTJkCCqVis6dO/PFF19gbGzMRx99BEDHjh3517/+pfzhs7W15dChQ1y+fBlra2ugKMH1ihUr6NKlC6dPnyY1NRULCwslSfbD1K1bFzMzM1avXk1AQABCCI4fPy7/yD1nEhMTGTBgACqVijVr1mBnZ0dWVtazbpb0N0J6WJTqUYNnsyjseXd/D6+ucRNMRi5EBZwqkfbL3d29VM9ssblz59J1eGip34WucRNeG/UjIT0s8LXvUqpEZHFqJoAuXbqU2atbVplUmX3jxSfnTEpPreSn4zun9lHL3JXsvAJmbStKC+Hv78/QoUOJiYkhMDCQiIiIUulhatSogbe3N7t371ZWHnbs2JGUlBRcXV0B0NXV5ZVXXqF9+/ZKCo4xY8ZQWFiIjY0N/fv3Z+nSpaV6FP/OihUrWLRoEXZ2dlhZWfH7779XyP2Qqs6GDRvo168fx44d47XXXnvWzZEekVwUVjEqIu2X/F1IFUH2TEp/a9asWejr6zNu3DjGjx//QFWE07dNyDy4ChAYvuaEYWtHAA5+7sO/Msawc+dOli9fzqeffkpYWBjt27dnyZIl9OjRg3bt2hEdHY2+vj7btm0jKCiIxMREYmJiMDMzo169eko7fHx8WLVqFba2tvj5+TF16lTatWuHvb290rbp06eze/duXn31VQIDA1m2bJky30qlUjF8+HAlQbGZmRlbt2594PneX1pQ9nQ9e1qtll69etGpUycOHDiAiYkJwcHBfPfdd+jo6LBr1y5lNb/0fPC1N5EBy1OqqB5e+buQnpbsmZT+lru7u5LKJyYmhqysLPLy8ti/fz/m5ubcivwPTQb+m2bD5pJ7MZm7p4tyQoq8HDp06EBcXBydOnVSznfx4kUmT55MdHQ0UVFRnDx5stT1ylpJvX37dpKTkzl8+DAajYbY2FgiIyMf2jYPDw8l11xCQgLx8fHVouSZ9GSSk5N5//33OXHiBPXq1ePGjRuMGjWK8ePHy0BSeinJXkWpupA9k9ID7u8FatasGWfPnkWj0XDy5El0dHRo3749tWrV4q233qLGvSxq163P3axb5KbGc8ewLg2tioJHW1tb9u3bR3BwMGfOnCEwMJApU6bg6elJ48aNAejfv3+p+TxlraTevn0727dvV1KGZGVlkZyczJAhQ4iNjeXWrVvo6+vj4OBATEwM+/fvZ86cOTRr1oyUlBQ++OADfHx8yq0JLVV/ZmZmShYAR0fHUqt9JellJXsVpepA9kxKZTqdnMwRQyfuvvE1miv55NfQ46233mLgwIHMnz+fjh07otFoaN26NQ3q12OMWp/aN86gU7cxOjk3mNbbHJVKhaWlpVJloX379vz888/UrFnzodcuayW1EIJJkyYplS3OnDnDO++8g56eHmZmZixdupSOHTvi7u7Onj17OHPmDG3btqV+/frExcXh6enJwoULGTFiRKXeN6ni3J8/L1f8r4qPjo5OqXyBle3mzZsPrSTyMEFBQRWW+N7T05OYmJgKOZckSVJFkcGk9IDtJy6hU7cJN2uZIICCBmZcKzAkOTmZLVu2MHnyZBYtWoSuri7Ozs7cuXOH3HOx+DS+QbtWTfhHg5o0y7ugLJRxc3NjwoQJpKenc/v2bTp27Mi+ffu4du0aeXl5rF69+m/b1KNHDxYvXqzMX0xPT+fKlStA0TB8WFgYHh4euLu7s3DhQuzt7VGpVFy9epXCwkL8/f2ZMWPGI630lp69khVSBEX58y7fynnsknsV5WmCSUmSpBedDCYloHQv0IxNp0CnxAwIVQ3QNwLg+PHjnDhxAlNTU6ZOnUqzZs0IDg7miy++YMGCBXTv3h19fX327t2Ljk5RT1JoaCi//PILBQUFvPPOO2RmZjJlyhRcXV1xc3Ojbdu2f9s+b29v3n77bVxdXbGxsaFfv35KGTR3d3cuXryIq6srTZo0wcDAQCnHlp6ejqenJ2q1mkGDBvHll19W8J2TKkNZ+fOEEEqGgKoWGhrK2bNnUavVhISEMGvWLJycnLC1tWXy5MnKfsuWLcPW1hY7OzsGDx6sbI+MjHygPOfevXvx9PSkX79+WFpaEhgYqPTE79q1C3t7e2xsbBg+fHiphPrFwsPDsbGxwdraWkmpBbBo0SLMzc1xdnZm5MiRjB07ltu3b2NmZkZeXh4At27dKvWzJEnSUxFCVJsvR0dHIVW99Uf/EpafbhGtPtokWn20SZiMWiT0Gr2q/FzPc7gwdhso9JtbilWrVgkhhCgsLBQajUYIIUROTo5o1aqV8PLyEkIIMWrUKNGiRQvl8TNnzijX8vf3F+vXr6/iZyg9b0z/+9q7/8v0o03PpD3nzp0TVlZWQgghtm3bJkaOHCkKCwtFQUGB8PHxEfv27RMJCQmiTZs2IiMjQwghxLVr14QQQgwdOlT069dPFBQUiBMnTojXXntNCCHEnj17RN26dUVaWpooKCgQLi4uYv/+/SI7O1u0aNFCJCUlCSGEGDx4sPj222+FEEJ07txZHDlyRKSnp4uWLVuKK1euiLy8POHl5SXWr18v0tPTRatWrcS1a9fEvXv3RKdOncT7778vhBAiKChI+b/3448/igkTJlTdDZSkFxQQI6pB/PSsv2TPpFRmL1BZrAI/LTMvo76+Pi1btsTFxQUo6im8ffs2NjY2QFGVBWtra2xtbdHT06NXr16V92SkF0JF5M+rLCUXgzk4OJCYmEhycjK7d+8mICCARo0aAUWl+oqVV57T2dmZFi1aUKNGDdRqNVqtlqSkJMzMzDA3Nwdg6NChREZGlmrDkSNHlEVsurq6BAYGEhkZyeHDh+ncuTMNGjRAT0+vVLnQESNGsGTJEgCWLFkiMxtIklRh5GpuqcwqCs3f+d/8MOMOfTHU0+Gzvjb4fh1Y5jmK0/MAvP3227z99tvKz3Pnzq3gFksvuupSIaW4TOj581quX73DhmPpymKw+8t6Pux1Xl55zpLbK3tRkZubG1qtlr1791JQUKBUkpIkSXpasmdSKre3R0elkrnLpGeiOuTPK7kISFXTkHvZd5i0Lp46rzmWuRisS5curF69mmvXrgFw/fr1J7quhYUFWq2WM2fOAPDrr7+WqlsPRT2a+/bt4+rVqxQUFBAeHk7nzp1xcnJi37593Lhxg/z8fNauXVvquCFDhvD222/LXklJkiqU7JmUyu0FkgGk9Cw96/x5Jad/6BjWRd+kHWcXvsfPbV2Y+N/FYABGRkYsX74cKysrPvnkEzp37oyOjg729vYPVFN6FAYGBixZsoSAgADy8/NxcnJi1KhRpfZp1qwZM2fOxMvLCyEEPj4+vPnmmwB8/PHHODs706BBAywtLTE2NlaOCwwM5NNPP2XgwIFPeFckSZIepCo55PKstW/fXsgcas9G8XDehZvZNK9nSEgPCxlISi81s9DNlPXXUQWcm+lT1c15ZFlZWRgZGZGfn4+fnx/Dhw/Hz88PgDVr1vD777/z66+/PuNWPhsbNmzAz8+PU6dOYWlp+cjH7d27l7CwMDZt2vTIx8TExLBs2TLmzJnzwGOmpqbExMQo82ul55dKpYoVQrR/1u141mTPpAQ8+14gSapumtczJP2++cTF26uzKVOmsHPnTnJycvD29sbX1xeADz74gC1btvDnn38+4xY+O+Hh4XTq1Inw8HCmTp361OfLz89HV7fst9H27dvTvv1LH2NILwk5Z1KSJKkMIT0sMNTTKbXtWSwCelxhYWFoNBoSExOZM2cOKpUKKFogdObMGWWV+MsmKyuLqKgoFi1axG+//QY8PNfn1q1bsbS0xMHBgXXr1innmTJlCoMHD8bNzY3BgweTk5PDsGHDsLGxwd7eXqkTv3fvXnr37g3AtWvX8Pb2xsrKihEjRlCdRgQlqSLIYFKSJKkM1WERkFRxfv/9d3r27Im5uTkNGzYkNjYWgGPHjvHdd99x8uRJUlJSiI6OJicnh5EjR/LHH38QGxvLpUuXSp3r5MmT7Ny5k/DwcObNm4dKpSI+Pp7w8HCGDh1KTk5Oqf2nTp1Kp06dOHHiBH5+fqSmplbZ85akqiCHuSVJksohp3+8OMLDwwkODgZgwIABhIeH07t3byXXJ6Dk+jQyMsLMzIw2bdoAMGjQIH766SflXH369MHQsGi6Q1RUFB988AEAlpaWtGrVitOnT5e6dmRkpNK76ePjQ/369Sv3yUpSFZPBpCRJkvRCKl5YmHbxCunbd3I4VkMtfV0KCgpQqVT4+Pg8Ua7P2rVrV2azJem5I4e5JUmSpBdOyTyhd5KiqdXOi4YjfuG79dGkpaVhZmZWqthCSZaWlmi1Ws6ePQsU9WqWx93dnRUrVgBw+vRpUlNTsbAoPa/Ww8ODlStXArBlyxZu3LhREU9RkqoNGUxKkiRJL5ySeULvnNpHLXNXsvMKmLUtCQB/f/9yg0QDAwN++uknfHx8cHBw4JVXXin3OmPGjKGwsBAbGxv69+/P0qVLS/V2AkyePJnIyEisrKxYt24dr776agU9S0mqHmSeSUmSJOmF87zmCZWeLzLPZBHZMylJkiS9cMrLB1rd84RK0vNIBpOSJEnSC+d5zRMqSc8jGUxKkiRJT02r1WJtbV0p5166dCljx459rGP+6efGJC8TTOoZkjq7n8wTKkmVSAaTkiRJ0mMxMjJ61k14qDlz5pCens7G+VMZ2TQVHZFPdGiXZxJIVvd7JUkVQQaTkiRJUoXIz88nMDCQtm3b0q9fP+7evcu0adNwcnLC2tqad999VyklOGfOHNq1a4etrS0DBgwA4M6dOwwfPhxnZ2fs7e35/ffflXOnpaXh6elJmzZtStXV9vX1xdHRESsrKyWx+Pz582nSpAkLFy6swmcvSS8vGUxKkiRJT2zWrFk4OTnRs2dPkpKSGDNmDKdOneLIkSOYm5uzcuVKRo4cSUJCAtnZ2YwbNw5zc3M+/PBDXFxc8PDwYOHChQQFBTF48GC6dOnC4cOHSU5OJiQkhDt37rBlyxb+/PNPMjIyCAgIYPXq1cTExDB79mySkpLIzc0lKCiIOXPmEBQUREpKCleuXHkgmMzIyMDf3x8nJyecnJyIjo5Wtnfv3l2pnd2qVSuuXr0KwPLly3F2dkatVvPee+9RUFCUbsjIyIhPPvkEOzs7XFxcuHz5MgDnzp3D1dUVGxsbPv30U+XaFy9exMPDA7VajbW1dbk5LiXpeSSDSUmSJOmJbN++neTkZA4fPsyff/6JgYGBEmx9//33tG/fns8//5x//vOftG3blh07drB8+XIOHTqEp6cnGzduJCkpCV3domJscXFxzJw5E7VaTXZ2Njk5OYSHh3P58mUGDBhAfHw8x48fx9HRkfDwcJYsWYKfnx8An332GVqtllGjRtG8eXOaNGnCqFGjSrU3ODiY8ePHc+TIEdauXcuIESOAotrZXbp04cSJE/Tr10+pnX3q1CkiIiKIjo5Go9Ggo6OjJCi/c+cOLi4uxMXF4eHhwc8//6xcY/To0cTHx9OsWTPl2itXrqRHjx5oNBri4uJQq9WV+JuRpKolg0lJkiTpicxfvo5lqzei3/Q1rF29uJeXR3JyMgDr169nz549DB06FF1dXZYsWYKnpyetWrWiQYMGbNmyhYEDB5KRkYGTkxOFhYUIIVi7di0ajQZDQ0NSU1NJTEwkISGBzZs34+DgQGJiItevX+fcuXPY29sTGRnJ4cOHCQkJoWnTpuTk5JTb3p07dzJ27FjUajV9+vTh1q1bZGVlERUVpQy19+zZU6mdvWvXLmJjY3FyckKtVrNr1y5SUlIAqFmzJr179wbA0dERrVYLQHR0NAMHDgRg8ODByrWdnJxYsmQJU6ZMIT4+njp16lTsL0OSniEZTEqSJEl/a8OxdNxm7sYsdDPZeQV8uiGeyNMZ1O7Qj+bD5tKg72QKCwrI0GnM3r172bZtG6GhoTRo0AC1Ws2NGzc4cOAAAIWFhaSlpdGmTRs6duxIZmYmAHZ2dsydO5eCggLu3bvHsWPHEELg4+ODgYEBu3fvJj4+nvPnz2NmZkZubi7169enVq1aXL16VQnoylNYWMihQ4fQaDRoNBrS09MfukBGCMHQoUOV/ZOSkpgyZQoAenp6qFQq4MGa3sXbS/Lw8CAyMhITExOCgoJYtmzZ49x+SarWZDApSZIkPVTJOtcCEAJWHEpFr5WarOM7KLyXDYBOvWbMCgtj0KBBqFQqgoOD8fPzIyoqipCQEFxcXNBqtVy9epXAwEAmTZpERESEMo+ydevW5OXlKf9+9tln9OjRg6ioKBwcHPD396ddu3b06NGDwMBATp48SW5uLhYWFqxYsQI7O7uHPg9vb2/mzp2r/KzRaABwc3Nj1apVQNHQfXHt7K5du7JmzRquXLkCwPXr1zl//vxDr+Hm5sZvv/0GoAyJA5w/f54mTZowcuRIRowYwdGjRx/jNyBJ1ZsMJiVJkqSHKlnnupgADM0cqN2uM5d+/ZAra6ahU6suhm5DOHv2LGq1GkdHRy5duoSHhwc//PADq1at4uuvv8bNzY38/Hz69evHqFGjCA0NZeTIkURHR3Po0CEGDBhA7dq12bRpE97e3owbN47z589z9epVmjZtyqhRo3BwcGDYsGFcuHABPT09pk2bRmxsLJ6engAcO3aMRo0aAfDee+8BRSvIY2JisLW1pV27dsoCncmTJ7N9+3asra1ZvXo1TZs2pU6dOrRr144ZM2bg7e2Nra0t3bt35+LFiw+9V99//z3z5s3DxsaG9PR0ZfvevXuxs7PD3t6eiIgIgoODK+i3I0nPnqzNLUmSJD1UeXWuy2JSz5Do0C7lPp6VlYWRkRH5+fn4+fkxfPhwZRHNs5Kbm4uOjg66urocPHiQ0aNHK72WkvQwsjZ3Ed1n3QBJkiSpemtez5D0m9kPbFdBqSDzUcoVTpkyhZ07d5KTk4O3tze+vr4V29gnkJqayltvvUVhYSE1a9ZUVmZLkvRoZM+kJEmS9FDFcyZLDnUb6ung72jCnsQMLtzMpnk9Q0J6WMhyhdJLRfZMFpE9k5IkSdJDFQeIs7YlycBRkqQHyGBSkiRJ+lu+9iYyeJQkqUxyNbckSZIkSZL0xGQwKUmSJEmSJD0xGUxKkiRJkiRJT0wGk5IkSZIkSdITk8GkJEmSJEmS9MRkMClJkiRJL6jXX3+dmzdvlvnYF198gVqtRq1Wo6Ojo3w/Z84cAJYtW4a1tTU2NjbY29sTFhamHDt79mwsLS2xsbHBzs6OCRMmkJeXVyXPSap+ZDApSZVkxIgRnDx58lk3Q5Kkl9iff/5JvXr1ynzsk08+QaPRoNFoMDQ0VL4fN24cW7Zs4bvvvmP79u3Ex8dz6NAhjI2NAVi4cCHbt2/n0KFDxMfHc+TIEV555RWysx+skiS9HCq9Ao5KpfoXEAY0FkJcfdi+sgKOJFWugoICdHR0nnUzJEm6j1arpWfPnri4uHDgwAGcnJwYNmwYkydP5sqVK6xYsQKA4OBgcnJyMDQ0ZMmSJVhYWHD37l2CgoJISEjAwsKCCxcuMG/ePNq3b4+pqSkxMTE0atSIZcuWERYWhkqlwtbWll9//VW5vpGREVlZWcrPHh4eTJkyhS5dHqyz3rJlSyIjIzEzM6v8G1PNyQo4RSq1Z1KlUrUEvIHUyryOJD1rd+7cwcfHBzs7O6ytrYmIiMDT05PiD0dGRkZ88skn2NnZ4eLiwuXLlwE4e/YsLi4u2NjY8Omnn2JkZARAVlYWXbt2xcHBARsbG37//Xeg6A3H0tKSwMBA2rZtS79+/bh79y4Au3btwt7eHhsbG4YPH05ubi4ApqamfPTRRzg4OLB69WrOnj1Lz549cXR0xN3dncTExKq+XZIkleHMmTP861//IjExkcTERFauXElUVBRhYWH8+9//xtLSkv3793Ps2DGmTZvGxx9/DMD8+fOpX78+J0+eZPr06cTGxj5w7hMnTjBjxgx2795NXFwc33///UPbkpCQgKOj4wPbb926RVZW1hMFkiWH0tVqNVqt9rHP8Tj27t1L7969K/UaUpHKHub+FpgIVJ8C4JJUCbZu3Urz5s2Ji4sjISGBnj17lnr8zp07uLi4EBcXh4eHBz///DNQ1MsQHBxMfHw8LVq0UPY3MDBg/fr1HD16lD179vCvf/2L4lGEpKQkxowZw6lTp6hbty7z588nJyeHoKAgIiIiiI+PJz8/nwULFijna9iwIUePHmXAgAG8++67zJ07l9jYWMLCwhgzZkwV3CFJkv6OmZkZNjY21KhRAysrK7p27YpKpcLGxgatVktmZiYBAQFYW1szfvx4Tpw4AUBUVBQDBgwAwNraGltb2wfOvXv3bgICAmjUqBEADRo0qJA2b9u2DbVajampKQcOHHjoviWH0jUaDaampqUez8/Pr5A2SVWv0oJJlUr1JpAuhIirrGtIUnVhY2PDjh07+Oijj9i/f78yt6hYzZo1lU/Ijo6OyifygwcPEhAQAMDbb7+t7C+E4OOPP8bW1pZu3bqRnp6u9Ga2bNkSNzc3AAYNGkRUVBRJSUmYmZlhbm4OwNChQ4mMjFTO179/f6Cox/PAgQMEBASgVqt57733uHjxYiXcEUmS/s6GY+m4zdyNWehm/BccIFf8bwpKjRo10NfXV77Pz8/ns88+w8vLi4SEBP744w9ycnIAyMzM5NChQ499fa1Wi7W1NQCFhYWMGzdOeczKyqrMHs66detiZGTEuXPnAOjRowcajQZra2vu3bv32G1YunQpffr0oUuXLnTt2vWB3sSxY8eydOlSoGj+p6WlJY6OjowbN07Z7/Dhw7i6umJvb0/Hjh1JSkp67HZIT+epgkmVSrVTpVIllPH1JvAx8PkjnONdlUoVo1KpYjIyMp6mOZJUpUq+EQxb9xfTl25ShqunTZtWal89PT1UKhVQNNTzd5/AV6xYQUZGBrGxsWg0Gpo0aaK8cRSfp9j9P5eldu3aQNEbRr169Ur1Dpw6deqRn7MkSRVjw7F0Jq2LJ/1mNgK4fCuHy7dy2HAsvdxjMjMzMTEpqo9eHGABvPLKK8qcypMnTxIfH//AsV26dGH16tVcu3YNgOvXr5d6vEaNGsoqboBJkyYREhLCpUuXALh37x6//PKL8tjo0aOVVeJCCOXv08NkZ2crQ9x+fn7K9qNHj7JmzRr27dtX7rE5OTm89957bNmyhdjYWErGC+UN/0tV56mCSSFENyGE9f1fQApgBsSpVCot0AI4qlKpmpZxjp+EEO2FEO0bN278NM2RpCpz/xvB+bS/mLEtBSMrL0JCQjh69OgjncfFxYW1a9cC8NtvvynbMzMzeeWVV9DT02PPnj2cP39eeSw1NZWDBw8CsHLlSjp16oSFhQVarZYzZ84A8Ouvv9K5c+cHrle3bl3MzMxYvXo1UPQmEBcnBw8kqarN2pZEdl4BAFfWzeDKmqncu36Bf03/BkAJDqGoRy49PZ2JEyfywQcfYGhoyIIFC7h06RL37t0jOjqa5ORkDAwMCAoKomHDhkyfPh03NzcyMjJITU1l1KhR5Obm0qJFC9q0acOECRNKtaegoKBUT9/06dPJyMigdevWtGnTBgcHB27dugXA6NGj6dq1Kx06dMDW1hY3Nzfs7e2xt7d/6HMuOcy9fv16ZXv37t3/dtg9MTGR1q1bK3M1Bw4cqDxW3vC/VHUqZZhbCBEvhHhFCGEqhDAF/gIchBCXKuN6klTVSr4RAORlaDm3KJhAn85MnTqVTz/99JHO89133zF79mxsbW05c+aMMjweGBhITEwMNjY2LFu2DEtLS+UYCwsL5s2bR9u2bblx4wajR4/GwMCAJUuWEBAQoMy5GjVqVJnXXLFiBYsWLcLOzg4rKytlcY8kSVXnws3/pdFp2CuY5u/Mp2Xwb6TtX8u1a9fQ19enX79+QFHP45tvvomrqysNGzbkzJkzXL58mUuXLlGzZk2mTZvGsGHDyMnJITw8nNzcXFJTU9m5cyd37tzB0tKSHTt2cP78eY4fP46xsXGpnk2ALVu2KN8X9/SlpqayceNGbG1tSUhIUAJQlUpFSEgISUlJHD9+nAMHDjBr1qwHpvdA6RGc7LyCMntei0dOAHR1dSksLFR+fpQez/KG/6Wqo/usGyBJz6OSbwQAhq0dMWztiAo4MtMHKFpJWKxkyo1+/fopbxImJiYcOnQIlUrFb7/9psz1adSokdL7WJJWq0VXV5fly5c/8FjXrl05duxYmceUZGZmxtatWx/peUqSVDma1zMk/b9/R27HbuTu6aL/74W3r5GcnFzucW5ubgQFBfHWW2/Rt29fAHJzc1m3bh0HDx5ECMHrr7+Oubk5hoaGAOTl5TF27Fg0Gg06OjqcPn36oW3LzMxk6NChJCcno1KpnjgZefEITvEHbyFg0rqiIXhfe5Myj2nVqhUnT54kNzeX7Oxsdu3apYy+pKSkoNVqMTU1JSIiolR7yxr+l6pOlSQt/28P5UNzTErS86R5PcPH2l6e2NhY1Go1tra2zJ8/n2+++aYimidJUjXnZdkYFZCTepwcbRxNB4fx2nsLsLS2IScnp9Rc6JI9bQsXLmTGjBmkpaXh6OjItWvXMDQ05K233iIuLo7jx4/Tpk2bUr193377LU2aNCEuLo6YmJi/XShTUT1994/gAGTnFTBrW/kLZFq2bMlbb72FtbU1b731ljJ0bmhoyPz585W0ZnXq1FF6QidOnMikSZOwt7eXK8KfEdkzKUlPIKSHRalP3ACGejqE9LB4rPO4u7s/1pxFU1NTEhISHusakiRVLxuOpbM2Nh0BFObepYZBbXT0DOjc5B5LE4pGF5o0acKpU6ewsLBg/fr11KlTByjKTduhQwc6dOjAli1bSEtLo06dOty+fbvc62VmZtKiRQtq1KjBf/7zHwoKCsrdt3j/iujpu38E59UJax7YHhQURFBQUKn9vmwHEPYAACAASURBVP76a77++usHzufl5UViYiJCCN5//33aty/KFe7q6lqqt3XGjBkAeHp64unp+cTtlx6dLKcoSU/A196EL/vaYFLPEBVgUs+QL/valDt0I0mSVKxkj52hmSOisJC/fh7F6gVf4eLiAsDMmTPp3bs3HTt2pFmzZsqxISEh2NjYYG1tTceOHbGzs8PLy4uTJ0+iVqtLDf8WGzNmDP/5z3+ws7MjMTGxVK9lWSqqp6+iRnCK/fzzz6jVaqysrMjMzOS999574rZJFavSyyk+DllOUZIkSXrRmYVuLrOShwo499851y+C++dMQtEIzov0wVuWUywieyYlSZIkqQpVdI9ddSVHcF4eMph8CiVrLz+JCxcuKKt6JUmSpJdDSA8LDPV0Sm17kjnXzwNfexOiQ7twbqYP0aFdZCD5gnppg8n7C87PnDmzytvQvHlz1qxZ88D2mJiYUmWtJEmSpBeH7LGTXjQv7Wru4kz8ZfH19SUtLY2cnByCg4N55513eOedd4iJiUGlUjF8+HDGjx+v7F9YWMjw4cNp0aIFU6dOLXPfM2fOMGrUKDIyMtDR0WH16tXo6OjQu3dvEhISKCgoIDQ0lL1795Kbm8v7778PFOUqnDJlCo0aNSIhIQFHR0eWL1+OSqXiyJEjBAcHc+fOHfT19dm1axe1atV64DxykrIkSVL14mtvIoNH6YXx0vZMlufIkSOkp6eTn5+PoaEhX375JQMGDGD79u2oVCp0dXX5xz/+AcClS5f48MMPMTEx4Y8//uDevXtoNBrS09P55JNPAPjpp5/46KOPCAwM5P333+fs2bN07tz5/9m777CuyveB4+8DAqIoauBe0BcXeylIKDhw4U7JDWZlrrRy5fiiWVqZ5irTFNTMTEUyMwcq5UxAQNwTRTQcCIqArPP7gx/nKwquRFDu13V5XZ/PGc95zqHw9nmec994e3vTr18/0tLS8PDwoFq1aly9epWwsDBmz57N+PHjuXDhAmlpaezfv5+jR4+ip6dHeHg4f/31FwMGDMDNzY2EhAR8fX0JCQnB0NCQZcuWYWxsTFhYGGFhYSxdupQLFy4U5yMVQohi880335CamvrCr7tu3ToaN26Mp6cnUVFRbNmy5YX3QYgXpdQGk/cXnM9Lp5CRkYGPjw92dnZAblWBxMREqlevTkpKCh4eHgwbNoxhw4ZpSVwPHDjAkCFDiI+PZ+3atejr63PmzBneffddJk+eTHR0NAcOHODcuXN0796du3fv4uXlxYkTJyhfvjwJCQns2LEDe3t71q1bh52dHcOGDSMjI4MzZ86wcuVKqlevzsmTJ4mJicHNzY2//vqLs2fP4uzszLlz5/Dz86NixYqUKVOG7du3s3LlSuzs7GjWrBk3bz66moIQQrzKniWYfFweRuCxKXOWLVvG0qVL2b17twST4pVXaoPJsmXLUqtWLVRVJSsri7ALiVgNnEZsXDzLV/zI3UzYv38/ZmZmrF69mjVr1uDi4sKMGTP4559/sLOz486dO5ibm7NhwwZ8fHxITU2lY8eOWFtb4+DgwJo1a/D29ubChQvcunWL1q1bo6+vj7u7O4MHDyY6Opp79+6xZcsWKlSoQJkyZYiKiuKHH36gVatWeHl5ERERweuvv671u1y5clSpUoXLly9z/vx5tm7dSsWKFbX9qqqyYMECoqKiiIqK4sKFC3h5eRXHIxZCiBfq7t27dOrUCVtbW6ysrJg2bRpXrlzB09MTT09PAN5//32cnJywtLTkv//9r3Zu/fr1GT9+PA4ODqxbt46zZ8/Spk0bbG1tcXBw4Ny5c4SGhuLu7k6XLl1o0qQJkLssytHREUtLS5YsWQLA9OnT2bt3L2+//TZjxoxh6tSprF27ttA8kEK89FRVLTF/HB0d1aK08fBltfnMnWr98ZtVpYye2qZ7H227xcfrVV2j11TdilVVw9edVYux69VvfglR9fT0VGNjY3XTpk3qxIkT1c8//1wtX768umfPHlVPT0/t3r276uPjo5YrV05t06aN+ssvv6gmJiZqjx491D///FPV09NTZ86cqVavXl1duXKlWr58eXXs2LHqDz/8oH7wwQeqqampamFhoc6fP1/V1dVVMzIy1N27d6stW7ZUU1JSVAsLC9XDw0O7h+HDh6sBAQHqzZs31apVq6otWrRQ/fz81Nu3b6uZmZnq999/r3bt2lXNyMhQVVVVT506paakpBTpcxUvj++++05dsWJFcXdDiCKxfv16dciQIdr3pKQktV69eur169e1bTdv3lRVVVWzsrLUli1bqtHR0aqqqmq9evXUL774QjuuadOmalBQkKqqqpqWlqbevXtX3b17t1quXDn1/PnzD7WXmpqqWlpaqjdu3FBVVVVbtmyphoWFqaqqqgEBAerw4cOL4pZFMQPC1RIQPxX3n1LzAs5DBedRCN21kx6+wzhj0ICULD10K5qSnZJIdtodLiwbzbRNtWjcuDEJCQn4+fmRmppKdnY22dnZDB8+HFVVSU1NpUuXLpw+fZrjx4+TnJxMeno6v/76K+Hh4TRv3pyQkBCmTp1KQEAAqampLFq0iFq1anHr1i2SkpIoW7YsrVq1QkdHBwcHB1JSUrh9+zZZWVk4OTlx4MAB7T7u3bvHnTt3KFOmDJs3b2bIkCGsWbOG48ePExISwpAhQ4iNjcXBwQFVVTE1NSU4OLi4HrsoYYYOHVrcXRCiyFhbW/PRRx8xfvx4vL29cXd3f+iYX375hSVLlpCVlcXVq1c5fvw4NjY2APj4+ABw584d4uPj6d69O5A7k5WnadOmmJmZad/nz5/Pxo0bAYiLi+PMmTO89tprRXaPQpREpWaa+6GC89lZoFeW34M3cCxgIonbvkVRFEy7TQBVRdEtw93bScyaNQsHBwc6d+6MqqqYm5uzY8cOoqOjWbp0qfYyTvPmzbG3t+c///kPLVu2ZOLEiQBERUXh6OjI+++/z65duyhXrhyNGjXit99+Y/jw4cycOZNLly5haWmJvr4+MTExBAQE4OrqirGxMUuWLKFFixZYWVlha2tLhw4daNGiBR4eHrzzzjsoikJQUBAHDx7EyMgIHR0dPv/8c2JiYjh69Ci7d+/G2Ni4OB65eEEKmmYzMjJi0qRJ2Nra4uLiQkJCAgD+/v7Mnj0byM2TOmbMGJycnGjcuDFhYWH06NEDCwsLJk+erLX/448/0rRpU+zs7HjvvfeeaD2ZEC9KcGQ8brN2YTbhd/yCLvNp4Gasra2ZPHky06dPz3fshQsXmD17Njt37uTIkSN06tRJW/8OPLbM4IPHhIaGEhISwoEDB4iOjsbe3j5fe0KUFqVmZDL+gYLztd4PQNewAkoZfQyvRhG7dyNZNy8DUGPg1+TcS6W2SSUMDfXQ0dEhICCAGjVqcPv2ba1wvK2tLb6+vgQGBgKwefNmAGbPnk3r1q0ZNWoUDg4O2mhQYmIiKSkpfPLJJyxYsIAFCxagKAqRkZHY29uTkpIC5C9Ob2RkxIoVKx66n8OHDz/vRyReUsuXL6dKlSqkpaXh7OxMz549uXv3Li4uLnz22WeMGzeOpUuX5gsQ8+jr6xMeHs68efPo2rUrERERVKlShddff50xY8Zw7do11q5dy759+9DT02PYsGGsXr2agQMHFsOdCpHfgzNOF+MuMyM5hS96ezJ2bCV++OEHKlSowJ07dzAxMeH27duUL18eY2NjEhIS+OOPP7TftferUKECtWvXJjg4mG7dunHv3r0C/xGVnJxM5cqVKVeuHCdPnuTgwYMF9jOvD0K8qkpNMKmrKGTfV4c883os10IDUBSFuiYVqeYxmHtZ2STu+B416x66+gZ8sXYTZF7RzpkyZQqjR4/GxsaGnJwczMzMtACyIKampixZsoQePXqQk5ND1apV2bFjx1O3I8SjFDTNpq+vj7e3NwCOjo7s2LGjwHO7dOkC5E4PWlpaUqNGDQDMzc2Ji4tj7969RERE4OzsDORmQahatWpR35IQT+TBGafM67FcWBdAvxW6NKlVme+++44DBw7Qvn17atasye7du7G3t6dRo0bUqVMHNze3QttetWoV7733HlOnTkVPT49169Y9dEz79u1ZvHgxjRs3pmHDhri4uBTYlqenJ7NmzcLOzo6JEydq0+lPw8PDg9mzZ+PkVOrLQIsSqNQEk/cHkgCG5o4YmjsCcGZWJ4Ij4/lq2ynK1viampUMGduu4f8nlG2g/cvV0NCQ77///qG2fX198fX11b7fHxh26NCBDh065L92Ie0I8STy/lu9kpRG+cRTZIdtIeLAAcqVK4eHhwfp6eno6emhKAqQW+2psDQmBgYGAOjo6Gif875nZWWhqiqDBg1i5syZRX9jQjylKw/MOOX9XleAsFmdAHBycmLkyJHaMXkzSQ+KjY3N993CwoJdu3bl22Zubp5vJNPAwIA//vijwPZCQ0O1z1WqVCEsLOzRNyPES6zUrJmsVcnwkdulfqh4GeRN68UnpaEC127eIu6uwvZTtx45zfasWrduzfr167l27RqQu1Tj4sWLz/UaQjyrmoX8Xi9s+8sgNjaWxo0b884772BpaYmXlxdpablB86pVq7Czs8PKyopDhw4BcOjQIVxdXbG3t6d58+acOnWqOLsvSqlSE0yObdcQQz3dfNsM9XQZ265hMfVIiKf34LSeoZkj2VnZ9G3XnAkTJhQ6zfasmjRpwowZM/Dy8sLGxoa2bdty9erV53oNIZ7Vq/p7/cyZMwwfPpxjx45RqVIlNmzYAEBqaipRUVF8++23DB48GIBGjRqxZ88eIiMjmT59Op988klxdl2UUor6wPRvcXJyclLDw8OLrP37pwfzT2UL8XIwm/A7Bf0fqwAX/n9aT4jS5FX7vR4bG0vbtm21ymVffPEFmZmZWoq5Vq1aAVC3bl2OHDnCnTt3GDVqFGfOnEFRFDIzMzl58mRx3kKpoihKhKqqpX4ha6lZMwm5U9kv8y8ZIWpWMnwoM0HediFKo1fh9/r9AXEVNZl76v9GW3V1dbVp7rx10HkURWHKlCl4enqyceNGYmNjC3w7XYiiVmqmuYV4Fbyq03pClFYProNOuJ1Owu10giPjHzo2rxTj3r17MTY2xtjYmOTkZGrVyg2mC3u5SIiiJsGkEC+Rbva1mNnDmlqVDFHIfYFsZg/rl35kRojS6qGCGuSWOf5q28Mv0pQtWxZ7e3uGDh3KsmXLABg3bhwTJ07E3t6+0KwNQhS1UrVmUgghhChJZB30y03WTOaSkUkhhBCimLyK6Y1E6SPBpBBCCFFMZB20eBWUqre5hRBCiJIkb73zq5TeSJQ+MjIpxAsQGhqq1cp+lD59+mBjY8PcuXOfqv3AwEBGjBhR4L6OHTuSlJT0VO09LSMjo0fuDw8PZ9SoUUXaByFeVlKBTbzsZGRSiBLin3/+ISwsjLNnzz7Xdrds2fJc23sWTk5OODmV+jXqQgjxSpKRSSGeUGxsLI0aNcLX15cGDRrQr18/QkJCcHNzw8LCgkOHDj1Rndy7d+8yePBgmjZtir29Pb/++isAXl5exMfHY2dnx549e1i6dCnOzs7Y2trSs2dPUlNTAVi3bh1WVlbY2trSokULrd0rV67Qvn17LCwsGDdunLa9fv363LhxA4A5c+ZgZWWFlZUV33zzjXZfhdUCLqwPFy5cwNXVFWtrayZPnqxd66233uL333/Xvvv6+rJ+/fp8I7MpKSn4+flhbW2NjY2NVipu+/btuLq64uDgQK9evUhJSfmXPzEhhBAvhKqqJeaPo6OjKkRJdeHCBVVXV1c9cuSImp2drTo4OKh+fn5qTk6OGhwcrHbt2lVNTk5WMzMzVVVV1R07dqg9evRQVVVVd+/erXbq1ElVVVWdOHGiumrVKlVVVfXWrVuqhYWFmpKSol64cEG1tLTUrnfjxg3t86RJk9T58+erqqqqVlZW6uXLl7XzVVVVAwICVDMzMzUpKUlNS0tT69atq166dElVVVWtV6+eev36dTU8PFy1srJSU1JS1Dt37qhNmjRRDx8+rN1XZGSkqqqq2qtXL61/hfWhc+fO6ooVK1RVVdWFCxeq5cuXV1VVVYOCgtSBAweqqqqq9+7dU2vXrq2mpqbmu/9x48apH3zwgdZuYmKiev36ddXd3V1NSUlRVVVVZ82apU6bNu3ZflBCCPGCAOFqCYifivuPTHML8RTMzMywtrYGwNLSktatW6MoCtbW1sTGxpKcnMygQYPy1cl90Pbt29m0aROzZ88GID09nUuXLmFomD8VyNGjR5k8eTJJSUmkpKTQrl07ANzc3PD19aV379706NFDO75169YYGxsD0KRJEy5evEidOnW0/Xv37qV79+6UL18egB49erBnzx66dOmCmZkZdnZ2ADg6OhIbG/vIPuzbt08bURwwYADjx48HoEOHDnzwwQfcu3ePrVu30qJFi4fuKyQkhJ9//ln7XrlyZTZv3szx48dxc3MDICMjA1dX1yf7oQghhChWEkwK8QiPqpmro6ODgYGB9jkrK+uJ6uSqqsqGDRto2DB/6o+8AC6Pr68vwcHB2NraEhgYSGhoKACLFy/m77//5vfff8fR0ZGIiAgArS+QW8/3aaphPHhu3jR3YX2Ah+sEQ26FDg8PD7Zt28batWt56623nuj6qqrStm1b1qxZ88R9FkIIUTLImkkhCvE0NXPzPEmd3Hbt2rFgwQLU/68+FRkZWeBxd+7coUaNGmRmZrJ69Wpt+7lz52jWrBnTp0/H1NSUuLi4J7ofd3d3goODSU1N5e7du2zcuBF3d/dHnlNYH9zc3LTRxfu3A/j4+BAQEMCePXto3779Q222bduWRYsWad9v3bqFi4sL+/bt014+unv3LqdPn36i+xJCCFG8JJgUohBPUzM3z5PUyZ0yZQqZmZnY2NhgaWnJlClTCjzu008/pVmzZri5udGoUSNt+9ixY7G2tsbKyormzZtja2v7RPfj4OCAr68vTZs2pVmzZgwZMgR7e/tHnlNYH+bNm8eiRYuwtrYmPj5/cO3l5cWff/5JmzZt0NfXf6jNyZMnc+vWLe0lot27d2NqakpgYKCWGsnV1ZWTJ08+0X0JIYpPaGgo+/fvL5Zr+/v7a8uFRPGS2txCFEJq5gohxKP5+/tjZGTExx9//MTnZGVlUabMv19l9yzXft6kNncuGZkUohBSM1cIURo8SdqzxMREunXrho2NDS4uLhw5coTY2FgWL17M3LlztZRmsbGxtGrVChsbG1q3bs2lS5eA3PXXQ4cOpVmzZowbN67AFGHLly9n9OjRWr+WLl3KmDFjAFi5ciU2NjbY2toyYMCAh+7h3LlztG/fHkdHR9zd3bWZjcJSqYnnS17AEaIQY9s1ZGJQTL6pbqmZK4R4FZ09e5Z169axfPlynJ2d+emnn9i7dy+bNm3i888/p06dOtjb2xMcHMyuXbsYOHAgUVFRDB06NN/oYOfOnRk0aBCDBg1i+fLljBo1iuDgYAAuX77M/v370dXVZfz48RgbGxMTEwPkrp3W09Pjs88+46uvvkJPT4+AgAC+//57jh07xowZM9i/fz8mJiYkJiY+1P93332XxYsXY2Fhwd9//82wYcPYtWsX06dPZ9u2bdSqVavIK4GVZhJMClEIqZkrhCgtHpf27OLFi1o6sFatWnHz5k1u3779UDsHDhwgKCgIyE0bdn8BhV69eqGrm5sRo6AUYXltb968mcaNG5OZmYm1tTULFiygV69emJiYAFClSpV810xJSWH//v306tVL23bv3j2g8FRq4vmSYFKIR+hmX0uCRyHEK+dp057p6en962vm5bh9lCFDhvD555/TqFEj/Pz8nqjdnJwcKlWqRFRU1EP7Ckql9tprrz1138WjyZpJIYQQohR5lrRn7u7uWhqw0NBQTExMqFixIhUqVODOnTvacc2bN8+XNqyw9GMFpQgDaNasGXFxcfz000/06dMHyB2tXLduHTdv3gR4aJq7YsWKmJmZsW7dOiA360Z0dDTw7KnUxNORYFIIIYQoRZ4l7Zm/vz8RERHY2NgwYcIEVqxYAeSukdy4caP2As6CBQsICAjAxsaGVatWMW/evALbKyhFWJ7evXvj5uamTX1bWloyadIkWrZsia2tLR9++OFD7a1evZply5Zha2uLpaUlv/76K/DsqdTE05HUQEIIIUQpUtLTnnl7ezNmzBhat25d3F15LEkNlEtGJoUQQohSpKSmPUtKSqJBgwYYGhq+FIGk+B95AUcIIYQoRUpq2rNKlSpJGdWXlASTQgghxHPyvKq7FCVJeyaet5L9X7wQQghRTFauXMns2bNRFAUbGxt69+7NjBkzyMjI4LXXXmP16tVUq1YNf39/zp07x/nz56lbty6TJ0/Gz8+PjIwMcnJy2LBhAxYWFsV9O/lI2jPxPEkwKYQQQjygoKoriqJw8OBBFEXhhx9+4Msvv+Trr78G4Pjx4+zduxdDQ0NGjhzJBx98QL9+/cjIyCA7O/sxVxPi5SbBpBBCCPGAXbt2PVR1JSYmBh8fH65evUpGRgZmZmba8V26dMHQMPcFFldXVz777DMuX75Mjx49StyopBDPm7zNLYQQQpCbzNtt1i7MJvzO3B2nOfXPnXz7R44cyYgRI4iJieH7778nPT1d23d/dZe+ffuyadMmDA0N6dixI7t27Xph9yBEcZBgUgghRKn3YFWYdNPGbAoOYuXuo0Bu1ZXk5GRq1cpdZ5iXtLsg58+fx9zcnFGjRtG1a1eOHDnyIm5BiGIj09xCCCFKvQerwuib1qOiS2+G9unM19UqYm9vj7+/P7169aJy5cq0atWKCxcuFNjWL7/8wqpVq9DT06N69ep88sknL+o2hCgWUgFHCCFEqVfSq8KIkkkq4OSSaW4hhBClXkmtCiPEy0CCSSGEEKXe2HYNMdTTzbetJFSFEeJlIGsmhRBClHpSFUaIZyfBpBBCCIFUhRHiWck0txBCCCGEeGYSTAohhBBCiGcmwaQQQgghhHhmEkwKIYQQQohnJsGkEEIIIYR4ZhJMCiGEEEKIZ1akwaSiKCMVRTmpKMoxRVG+LMprCSGEEEKIF6/IgklFUTyBroCtqqqWwOyiupYQQohXk4eHB+Hh4U90bGxsLFZWVkXcIyHEg4pyZPJ9YJaqqvcAVFW9VoTXEkII8QKpqkpOTk5xd+OJZGVllYg2hHhVFWUw2QBwVxTlb0VR/lQUxbkIryWEEKKIxcbG0rBhQwYOHIiVlRW6uv+rZb1+/Xp8fX0B8PX1ZejQoTg5OdGgQQM2b96sne/u7o6DgwMODg7s378fgNDQULy9vbW2RowYQWBgYL5rZ2dn4+vri5WVFdbW1sydOxeAiIgIbG1tsbW1ZdGiRdrxgYGBdOnShVatWtG6dWtSUlJo3bo1Dg4OWFtb8+uvv2rHfvrppzRs2JA33niDPn36MHt27kSah4cHo0ePxsnJiXnz5vHbb7/RrFkz7O3tadOmDQkJCQCkpKTg5+eHtbU1NjY2bNiwAYDt27fj6uqKg4MDvXr1IiUl5Xn8GIQocf5VOUVFUUKA6gXsmvT/bVcBXABn4BdFUcxVVVUfaONd4F2AunXr/pvuCCGEKGJnzpxhxYoVuLi4YGRkVOhxsbGxHDp0iHPnzuHp6cnZs2epWrUqO3bsoGzZspw5c4Y+ffo88RR2VFQU8fHxHD16FICkpCQA/Pz8WLhwIS1atGDs2LH5zjl8+DBHjhyhSpUqZGVlsXHjRipWrMiNGzdwcXGhS5cuhIeHs2HDBqKjo8nMzMTBwQFHR0etjYyMDK2Pt27d4uDBgyiKwg8//MCXX37J119/zaeffoqxsTExMTHacTdu3GDGjBmEhIRQvnx5vvjiC+bMmcPUqVOf/GG/pIyMjPIFzt988w0TJkwgISEBY2Pjp24vKyuLGjVq8PbbbzNr1qzn2VXxnPyrYFJV1TaF7VMU5X0g6P+Dx0OKouQAJsD1B9pYAiwBcHJyUh9qSAghRIlRr149XFxcHntc79690dHRwcLCAnNzc06ePImZmRkjRowgKioKXV1dTp8+/cTXNTc35/z584wcOZJOnTrh5eVFUlISSUlJtGjRAoABAwbwxx9/aOe0bduWKlWqALnT8p988gl//fUXOjo6xMfHk5CQwL59++jatStly5albNmydO7cOd91fXx8tM+XL1/Gx8eHq1evkpGRgZmZGQAhISH8/PPP2nGVK1dm8+bNHD9+HDc3NyA3KHV1dX3i+32VrFmzBmdnZ4KCgvDz83vq83fs2EGDBg1Yt24dM2fORFGUIuil+DeKcpo7GPAEUBSlAaAP3CjC6wkhhCgCwZHxuM3axRtf7CIhLfc7kO8v9fT09HznPPgXvqIozJ07l2rVqhEdHU14eDgZGRkAlClTJt/6ywfbgtwALTo6Gg8PDxYvXsyQIUMe2+/y5ctrn1evXs3169eJiIggKiqKatWqFXidR7UxcuRIRowYQUxMDN9///0jz1dVlbZt2xIVFUVUVBTHjx9n2bJlj73eq+bcuXOkpKQwY8YM1qxZo20PDAykW7dutG3blvr167Nw4ULmzJmDvb09Li4uJCYmaseuWbOGDz74gLp163LgwIHiuA3xGEUZTC4HzBVFOQr8DAx6cIpbCCFEyRYcGc/EoBjik9IAyMrOYWJQDMGR8VSrVo0TJ06Qk5PDxo0b8523bt06cnJyOHfuHOfPn6dhw4YkJydTo0YNdHR0WLVqFdnZ2UDuaOfx48e5d+8eSUlJ7Ny586F+3Lhxg5ycHHr27MmMGTM4fPgwlSpVolKlSuzduxfIDRgLk5ycTNWqVdHT02P37t1cvHgRADc3N3777TfS09NJSUnR1ncW1katWrUAWLFihba9bdu2+dZr3rp1CxcXF/bt28fZs2cBuHv37lONxL4qfv75Z9566y3c3d05deqUCoPsPgAAIABJREFUts4U4OjRowQFBREWFsakSZMoV64ckZGRuLq6snLlSiD3HxYhISF07tyZPn365AtIRclRZMGkqqoZqqr2V1XVSlVVB1VVdxXVtYQQQhSNr7adIi0zO9+2tMxsvtp2ilmzZuHt7U3z5s2pUaNGvmPq1q1L06ZN6dChA4sXL6Zs2bIMGzaMFStWYGtry8mTJ7VRvzp16tC7d2+srKzo3bs39vb2D/UjPj4eDw8P7Ozs6N+/PzNnzgQgICCA4cOHY2dnx6PGK/r160d4eDjW1tasXLmSRo0aAeDs7EyXLl2wsbGhQ4cOWFtbF7quz9/fn169euHo6IiJiYm2ffLkydy6dQsrKytsbW3ZvXs3pqamBAYG0qdPH2xsbHB1deXkyZNP8MRfLWvWrOGtt95CR0eHnj17sm7dOm2fp6cnFSpUwNTUFGNjY22JgbW1NbGxsQBs3rwZT09PDA0N6dmzJ8HBwdo/QkTJoZSkwUInJyf1SRdjCyGEKHpmE36noL8lFODCrE4FnuPr64u3tzdvvvlmkfbteUlJScHIyIjU1FRatGjBkiVLcHBwKO5uvVSCI+P5atspriSlcWnum2w4eJbXyyTi5OSk/UMjb53pvn37CAwMJDw8nIULFwJQv359wsPDMTExybevZ8+e7N27F0NDQwCuXbvGr7/+Stu2bYvtXu+nKEqEqqpOxd2P4vavXsARQgjxaqtZyVCb4n5w+6vi3Xff5fjx46SnpzNo0CAJJJ9S3lKIvBFsVYWJQTFYxv+Ov78/EydO1I41MzPTlhg8zu3bt9mzZw9xcXEYGBgAuSPRa9asKTHBpMglwaQQQohCjW3XMF+gAGCop8vYdg0LPefBHJEl3U8//VTcXXipFbYUYnPwBmbsz7/CrXv37vz8889Uq1btse1u3LiRVq1aaYEkQNeuXRk3bhz37t3Lt10UL5nmFkII8Uj3T2HWrGTI2HYN6WZfq7i7JUqIZ1kK8aqQae5cMjIphBDikbrZ15LgURSqNCyFEI9WlKmBhBBCCPGKG9uuIYZ6uvm2PW4phHi1yMikEEIIIZ5Z3qi1LIUovSSYFEIIUeQeTAUjXi2yFKJ0k2luIYQQQgjxzCSYFEKIUmzlypXY2Nhga2vLgAED+O2332jWrBn29va0adNGK3/n7+/P4MGD8fDwwNzcnPnz52ttdOvWDUdHRywtLVmyZIm2PSAggAYNGtC0aVP27dunbS/sGkKIl5OkBhJCiFLq2LFjdO/enf3792NiYkJiYiKKolCpUiUUReGHH37gxIkTfP311/j7+7N9+3Z2797NnTt3aNiwIf/88w96enokJiZSpUoV0tLScHZ25s8//yQjI4NmzZoRERGBsbExnp6e2Nvbs3DhQm7dulXgNYR42UhqoFyyZlIIIUqpXbt20atXL63OdJUqVYiJicHHx4erV69q5e/ydOrUCQMDAwwMDKhatSoJCQnUrl2b+fPns3HjRgDi4uI4c+YM//zzDx4eHpiamgLg4+PD6dOnAbh8+XKh1xBCvHxkmlsIIUqZ4Mh43Gbtwn/TMVbsjyU4Ml7bN3LkSEaMGEFMTAzff/896enp2r77K47o6uqSlZVFaGgoISEhHDhwgOjoaOzt7fOdU5BHXUMI8fKRYFIIIUqRvDrK8UlpGNS1ISE6lHE/7iM4Mp7ExESSk5OpVSv3rdwVK1Y8tr3k5GQqV65MuXLlOHnyJAcPHgSgWbNm/Pnnn9y8eZPMzEzWrVuX75ynuYYQomSTYFIIIUqR++so65vWw9jVh9iVY+nXsQUffvgh/v7+9OrVC0dHR236+1Hat29PVlYWjRs3ZsKECbi4uABQo0YN/P39cXV1xc3NjcaNG2vnPO01hBAlm7yAI4QQpUhprqMsxPMmL+DkkpFJIYQoRQqrlyx1lIUQz0qCSSGEKEWkjrIQ4nmT1EBCCFGKSB1lIcTzJsGkEEKUMlJHWQjxPMk0txBCCCGEeGYSTAohhBBCiGcmwaQQQgghhHhmEkwKIYQQQohnJsGkEEIIIYR4ZhJMCiGEEEKIZybBpBBCCCGEeGYSTAohhBBCiGcmwaQQQgghhHhmEkwKIYpN/fr1uXHjxjOf37x58+fYGyGEEM9CgkkhxEsnKysLgP379xdzT4QQQkhtbiHEC3H37l169+7N5cuXyc7OZsqUKdq+tLQ0evToQY8ePYiLi6NKlSqMHj0agEmTJlG1alVsbW2ZMmUKlStX5uTJk5w+fRojIyNSUlIIDQ3lv//9L+XLl+fQoUMMGTIEa2tr5s2bR1paGsHBwbz++uvFdetCCPFKk5FJIcQLsXXrVmrWrEl0dDRHjx6lffv2AKSkpNC5c2f69OnDO++8w+DBg1m5ciUAOTk5/Pzzz/Tv3x+Aw4cPM2/ePE6fPv1Q+9HR0fj7+1OtWjVWrVrF6dOntcBy3rx5L+5GhRCilJFgUgjxQlhbW7Njxw7Gjx/Pnj17MDY2BqBr1674+fkxcOBAIHcd5WuvvUZkZCTbt2/H3t6e1157DYCmTZtiZmZWYPvOzs7MmTOH8+fPk5yczC+//IK7uztr165l+fLlAPz44480bdoUOzs73nvvPbKzswHYvn07rq6uODg40KtXL1JSUgCYMGECTZo0wcbGho8//rhIn48QQrysJJgUQhSp4Mh43Gbtot3yM1Qd+A33KtRi8uTJTJ8+HQA3Nze2bt2KqqraOUOGDCEwMJCAgAAGDx6sbS9fvnyh1zEwMGDWrFm8/vrrODk5MXr0aA4fPszIkSPx8PDgxIkTrF27ln379hEVFYWuri6rV6/mxo0bzJgxg5CQEA4fPoyTkxNz5szh5s2bbNy4kWPHjnHkyBEmT55cdA9JvBICAwMZMWJEcXdDiBdOgkkhRJEJjoxnYlAM8UlpZN65SUKqyrZ7DXijx2AOHz4MwPTp06lcuTLDhw/XzuvevTtbt24lLCyMdu3aPfP1mzZtSo0aNQDYuXMnERERODs7Y2dnx86dOzl//jwHDx7k+PHjuLm5YWdnx4oVK7h48SLGxsaULVuWt99+m6CgIMqVK/fvHoYQQryiJJgUQhSZr7adIi0zdyo583osV1d+yLklw5j31cx8I315L8qMGzcOAH19fTw9Penduze6urqPvc7eM9fZf/YGb3yxi/M37nIj5R6QfyRTVVUGDRpEVFQUUVFRnDp1Cn9/f1RVpW3bttr248ePs2zZMsqUKcOhQ4d488032bx5s7bGU7yaunXrhqOjI5aWlixZsgQAIyMjxowZg6WlJa1bt+b69esAeHh48MEHH2BnZ4eVlRWHDh16qL3r16/Ts2dPnJ2dcXZ2Zt++fS/0foR4kSSYFEIUmStJadpnQ3NHag5eSE2/BZj2/xonJydiY2MxMTFBURQCAgL48ssvgdwXbw4ePMjbb7+tne/h4cHmzZvztZ+SkkJwZDyrL1WgYrcpKPqGZKTdJbvDf7muUyXfea1bt2b9+vVcu3YNgMTERC5evIiLiwv79u3j7NmzQO5b56dPnyYlJYXk5GQ6duzI3LlziY6OLtJnJYreZ599hqWlJTY2NtjZ2fH3339ruU6XL19OREQE4eHhzJ8/nx9//JG7d+/i5OTEsWPHaNmyJdOmTdPaSk1NJSoqim+//TbfUow8H3zwAWPGjCEsLIwNGzYwZMgQIPe/x/Dw8IeO79ixI0lJSQAoisJHH32k7Zs9ezb+/v6PvDd/f39mz54NQHp6Om3btn3sOQW5cuUKb775JgDh4eGMGjXqqdsQpY+kBhJCFJmalQyJvy+gvH97YY4fP463tzfdu3fHwsKCpKQkfvrpJ4YNGwbA2LFj2bJlCx07dqRGjRpM/mIeWaoOuuUq8lqH0RjUasK5xe8xy8AQD7v/aO02adKEGTNm4OXlRU5ODnp6eixatAgXFxcCAwPp06cP9+7ljmjOmDGDChUq0LVrV9LT01FVlTlz5jznpyNepAMHDrB582YOHz6MgYEBN27cICMjQ9s/f/58Nm7cCEBcXBz/+c9/0NHRwcfHB4D+/fvTo0cP7fg+ffoA0KJFC27fvq0FgnlCQkI4fvy49v327dvai10F2bJli/bZwMCAoKAgJk6ciImJyVPdZ0ZGBj179sTR0fGZgsmaNWuyfv16AJycnHBycnrqNkTpI8GkEKLIjG3XkIlBMdpUN4Chni5j2zUs9JwmTZpw/vx57XtSUhLffvutFkwuWbKExMREdHV12b17N6b956DoleVO5BZuhQZg2nU8AAqweVanfG37+PhowcH9WrVqRVhY2EPbC5q+FC+nq1evYmJigoGBAQB74+7x1bZTXL6VRmOvvtw5uY/XzeqzYcMGhg4dSnBwsPZSmK+vL6qqcubMGczNzTE0NERRFHJychgxYgRXr16le/fu3Lx5k5o1awK5QV2FChVITU3FxMSEP/74AyMjI60/OTk5DB48mNq1azNjxgzq169PeHg4JiYmlClThnfffZe5c+fy2Wef5buP2NhYBg8ezI0bNzA1NSUgIIC6desCucn8fXx8sLCwYNasWQBMnTq1wLyto0aNYty4cfzxxx8oisLkyZPx8fEhNjYWb29vjh49SmhoKLNnz2bz5s34+/tz6dIlzp8/z6VLlxg9erSMWgqNTHMLIYpMN/tazOxhTa1KhihArUqGzOxhTTf7Wk/cxoQJEzh37hx2dna0bduWlJQUHB0dWbt2LZ6entQ2rQyAQc2GZN/5X2nGR41+itLHy8uLuLg4GjRoQIfeg/hg7mpt1DwtW4cyta14o0s/Jk+ezMGDB4HcdbZ5o3QRERH4+fmxefNmzp8/z9q1awkKCuLw4cM0atSINWvWaEslMjMzKVOmDK1atSIiIoLBgwczdOhQrS9ZWVn069cPCwsLZsyYUWB/hw8fzurVq0lOTs63feTIkQwaNIgjR47Qr1+/fAHdl19+ib6+Pt988422rbC8rUFBQURFRREdHU1ISAhjx47l6tWrj3yGJ0+eZNu2bRw6dIhp06aRmZn5RM9evPpkZFIIUaS62dd6quDxQbNmzeLo0aNERUUBuS9F5H2G/41+Jh7ZTllzR+Dxo5+i9DEyMiIiIoI9e/Yw4NPlXPl7JpVb+gJg7PomidsXs2LxfCqUycbFxQVAewnr119/pWbNmvz3v//F1NSUjIwMypYty7Bhw9DX1ycoKIjq1avTuHFjAE6dOkV6ejrz5s3jyy+/RFXVfNPV7733Hr1792bSpEmF9rdixYoMHDiQ+fPnY2j4v38YHThwgKCgIAAGDBigvbQG8MYbb7B//35Onz5NgwYNgPx5WxMSErS8rXv37qVPnz7o6upSrVo1WrZsSVhYGDY2NoX2qVOnThgYGGBgYEDVqlVJSEigdu3aT/mTEK8iCSaFEC+1bva1CP19A8uun6eS5+fUqmTI2HYN/1UAK14NwZHxfLXtFFeS0qiZ99+Fhwdltt6lSoVa3D26EwAd/XJU6z2NjKtnaHAxmNDQUAIDAwGYM2cOiYmJeHt7Y2pqqrWdV5XJ1taWpk2bAlCvXj369u2LqqpYW1tz4MCBAvvVvHlzdu/ezUcffUTZsmUL7f/o0aNxcHDAz8/vie63RYsWDBo0iA4dOrB3714tLVZe3tZ//vmnwJeFnlTeEgEAXV1dsrKynrkt8WqRaW4hRImTl+jcbMLv9PxuP7fTC/9LKyQkhG2rv+NceCixs7uxb0IrCSRFvhynKhB77gwf/bCV4Mh4alYyJOPaBcoYV813jmkFg4IbK4SbmxsbNmwgJyeHhIQEQkNDAWjYsCHXr1/XgsnMzEyOHTumnff222/TsWNHevfu/ciArEqVKvTu3Ztly5Zp25o3b87PP/8MwOrVq3F3d893Ts+ePfn4449p37699lJQQXlb86pDZWdnc/36df766y8tKBbiaUkwKYQoUR4MAq6lK1y9fovgyPiHjo2MjOS9995j06ZNVK1a9eHGRKl1f45TgJzMdOKDZ9O3fXPilw0nJzEOY7e+2n5DPV0GuNbL18Z7771XYNuGhoY4OTnRs2dPateuTZMmTejfvz8ODg4YGxujr6/P+vXrGT9+PLa2ttjZ2bF///58bXz44YfY29szYMAAcnJyCr2Pjz76iBs3/rcWeMGCBQQEBGBjY8OqVasKrDv//vvv0717d7p06UJ6enqBeVu7d++OjY0Ntra2tGrVii+//JLq1asDuamJhHgayv0lzIqbk5OTWlD+LSFE6eE2a9dD6YSub/oKNfEiwwe8yXfffaelWGnTpg0xMTHadF7dunXZtGnTC++zKHnMJvxOQX+7KcCFWZ0KngJ/hhHtlJQUjIyMuHnzJk2bNmXfvn1aUFZS5OTk4ODgwLp167CwsHjksREREXz44Yf8+eefL6h3LzdFUSJUVS31+ZNkzaQQokS5UkBeStMuY1GAr2Z14quvvtK2h4SEvMCeiZfJ43Kc/tsXw/J4e3uTlJRERkYGU6ZMKXGB5IN5Wx8lPDycvn37ammFhHhSMs0thChRCkvp86qk+pk6depjg+CTJ09iZ2eHvb09586de0E9+/d0dXWxs7PD0tISW1tbvv76a20K90mqqQQGBjJixIgC992fo/FJjG3XEEO9/KU4i+It/9DQUK0Mp6+v73Nt+3nIy9v69ddfP/ZYJycnTp8+nS85uxBPQkYmhRAlyrMkOn+ZTJ8+/bHHBAcH8+abb+arX/4yMDQ01NI2Xbt2jb59+3L79m2mTZv2wqup5I06Po+pbCHEo8nIpBCiRHkeic6LUrdu3XB0dMTS0pIlS5aQnZ2Nr68vVlZWWFtbM3fuXACioqJwcXHBxsaG7t27c+vWLSC3mkpeIuzp06fj7OyMlZUV7777LqqqsmXLFr755hu+++47PD09i+0+/62qVauyZMkSFi5ciKqqhIaG4u3tDeRWFnJ1dcXe3p7mzZtz6tQp7by4uDg8PDywsLDIVws7j6qqjB07Vnvea9euLbQP3exrsW9CKy7M6iRv+QtRhGRkUghR4jyv9WxFYfny5VSpUoW0tDScnZ1xdHQkPj6eo0ePAmjpWAYOHMiCBQto2bIlU6dOZdq0afkqkwCMGDGCqVOnArkJqDdv3kznzp0ZOnQoRkZGfPzxxy/25p4zc3NzsrOzuXbtWr7tjRo1Ys+ePZQpU4aQkBA++eQTNmzYAOQGmkePHqVcuXI4OzvTqVOnfCOa91duuXHjBs7OzrRo0UJ7CUsI8eLJyKQQQjyF+fPnY2tri4uLC3FxcWRkZHD+/HlGjhzJ1q1bqVixIsnJySQlJdGyZUsABg0axF9//fVQW7t376ZZs2ZYW1uza9eufLkIX2XJycn06tULKysrxowZk+++27Zty2uvvYahoSE9evRg7969+c4trHKLEKL4yMikEEI8wv0pZMonniI7bAsRBw5Qrlw5PDw8uHfvHtHR0Wzbto3Fixfzyy+/aFPdj5Kens6wYcMIDw+nTp06+Pv7k56e/gLu6Pm6//mkZWYTHBmvjSqfP38eXV1dqlatyokTJ7RzpkyZgqenJxs3biQ2NhYPDw9t34M5DiXnoRAln4xMCiFEIR5KoH7zFnF3FbafusXJkyc5ePAgN27cICcnh549ezJjxgwOHz6MsbExlStXZs+ePQCsWrVKG6XMkxc4mpiYkJKSoq2jfJk8+HxUFSYGxRAcGc/169cZOnQoI0aMeCggTE5Oplat3IAzr2xhnh07dpCYmEhaWhrBwcG4ubnl2y+VW4QoeWRkUgghCvFgFRVDM0fuRP5B33bN8Wpuj4uLC/Hx8Xh4eGgpcGbOnAnAihUrGDp0KKmpqZibmxMQEJCv7UqVKvHOO+9gZWVF9erVcXZ2fi59rl+/PuHh4ZiYmDyX9h7lweejZmVwbskw+i7JwaK6MQMGDODDDz986Lxx48YxaNAgZsyYQadOnfLta9q0KT179uTy5cv079//oTfAu3fvzoEDB7C1tUVRlHyVW4QQxUMq4AghRCEeV0WlJHqRweTL+HyEeJ6kAk4umeYWQohClIQE6rGxsTRq1Ih+/frRuHFj3nzzTVJTU9m5cyf29vZYW1szePBg7t27l++8tLQ0OnTowNKlSwH48ccfadq0KXZ2drz33ntkZ+eOKBoZGTF27FgsLS1p06YNhw4dwsPDA3Nzc600ZXp6On5+flhbW2Nvb8/u3bsBKHPuT65t/IyEX6YSv+Qdbu1eDrw6CeaFEE9GgkkhhCjEi6qi8jinTp1i2LBhnDhxgooVKzJnzhx8fX1Zu3YtMTExZGVl8d1332nHp6Sk0LlzZ/r06cM777zDiRMnWLt2Lfv27SMqKgpdXV1Wr14NwN27d2nVqhXHjh2jQoUKTJ48mR07drBx40YtbdGiRYtQFIWYmBjWrFnDoEGDSE9Pp4NVDbKuXcC063hqDl7E3ZN7KJOa+MokmBdCPBkJJoUQohAlJYF6nTp1tBdR+vfvz86dOzEzM6NBgwbAw6mHunbtip+fHwMHDgRg586dRERE4OzsjJ2dHTt37uT8+fMA6Ovr0759ewCsra1p2bIlenp6WFtbExsbC+Sm4+nfvz+QmyOyXr16nD59God6lWnp6UmdaibolNGnQvX6DLYzKrE5QoUQRUNewBFCiEcojgTq96fbqaImk56Zk29/pUqVuHnzZqHnu7m5sXXrVvr27YuiKKiqyqBBg7SXg+6np6envW2to6ODgYGB9jkrK+uxfW1YswoLJ7QCwPvodzQ3r/zE9ymEeDXIyKQQQpQgD6bbSbidzvV/4pkVmLt+8aeffsLJyYnY2FjOnj0LPJx6aPr06VSuXJnhw4cD0Lp1a9avX69VoklMTOTixYtP3Cd3d3dtWvz06dNcunSJhg1lKlsIkavIgklFUewURTmoKEqUoijhiqJIIjAhhHiMB9PtAJSpUpuv582ncePG3Lp1izFjxhAQEECvXr2wtrZGR0eHoUOH5jtn3rx5pKWlMW7cOJo0acKMGTPw8vLCxsaGtm3bcvXq1Sfu07Bhw8jJycHa2hofHx8CAwO1EUwhhCiy1ECKomwH5qqq+oeiKB2BcaqqejzqHEkNJIQo7R5Mt5OVnMC19dOo9fa3km5HiBJGUgPlKsppbhWo+P+fjYErRXgtIYR4JZSEdERCCPE0ijKYHA18pShKHDAbmFiE1xJCiFfCg+mIyhhX4/Wh30u6HSFEifWv3uZWFCUEKKiO1SSgNTBGVdUNiqL0BpYBbQpo413gXYC6dev+m+4IIcRLL+/N8by3uWtWMmRsu4aSbkcIUWIV5ZrJZKCSqqqqkpt3IllV1YqPOkfWTAohhBDiZSFrJnMV5TT3FSAvV0Ur4EwRXksIIYQQQhSDogwm3wG+VhQlGvic/5/KFkII8WpLSEigb9++mJub4+joiKurKxs3bnzkOR07diQpKQnIrRdeEF9fX9avX//c+/ugqKgoXF1dsbS0xMbGhrVr1xb5NYV4mRVZBRxVVfcCjkXVvhBCiJJHVVW6devGoEGD+OmnnwC4ePEimzZteuR5W7ZseRHdyycrK4syZR7+a7BcuXKsXLkSCwsLrly5gqOjI+3ataNSpUovvI9CvAykAo4QQojnZteuXejr6+dLol6vXj1GjhxJYGAgI0aM0LZ7e3sTGhoKQP369blx40a+tsLCwrCxsaFhw4Y4Ojpy6tQpbXuPHj0A+PXXXzE0NCQjI4P09HTMzc0BWLp0Kc7Ozujr6+Pt7c2JEyewsrLC19eXoUOH0qxZM8aNG8f333+PqakpdnZ22Nvbc+fOHRo0aICFhQUANWvWpGrVqly/fv2x9x4bG4uVldWzPzwhXlJSm1sIIcRzc+zYMRwcHJ5LW3FxcVSrVo2tW7cybtw4vv32WwDs7e2JiooCYM+ePVhZWREWFkZWVhbNmjUDoEePHrzzzjvUr1+fBg0a5Juqvnz5Mvv370dXV5fOnTsTHByMm5sbKSkplC1bNl8fDh06REZGBq+//vpzuSchXkUSTAohhCgyw4cPZ+/evejr6+Pj48OaNWtYuHAhAOfOnSMwMBB/f39u3bqFl5cXKSkpZGfnlpP86aefuHbtGnFxcVpt8I8//phq1apRp04dvLy8OHDgAFWrVmXVqlXUrl0be3t7vLy8OHPmDGlpaSQmJrJ+/Xrc3d21PrVs2RInJyeWLFlCjRo16NSpE9OnTyc2NpakpCTOnz/PpUuX8PPz48cff2TFihV89tln/Pjjj5iamlKnTh0cHR35+OOPiYiIYPDgwQB4eXlp10hPT+f9998nPDycMmXKMGfOHDw9PQkMDCQ4OJi7d+9y5swZPv74YzIyMli1ahUGBgZs2bKFKlWqvKgfjxDPhUxzCyGE+NeCI+Nxm7WLz/ffYfmvuwiOjAdg0aJF7Ny5k+vXr6Orq8v96eiysrK0z6qqsn37dr755hsyMjLytV2/fn2GDh3Kf/7zH2bPno27uzu3bt2iQYMGNG3alPXr1/Pjjz+yd+9eoqOjeeONNwCYNGkSmZmZjB07lnv37gGQnJzMt99+S2BgIM7OzvTt2xdbW1vS0tJYtmwZkZGRbNu2jZ07dzJt2jSmTZuGrq4uGzZsIDo6mj/++IP7U9j5+fmxYMECoqOj8/V50aJFKIpCTEwMa9asYdCgQaSnpwNw9OhRgoKCCAsLY9KkSZQrV47IyEhcXV1ZuXLlc/ypCPFiSDAphBDiXwmOjGdiUAzxSWkY1LMlPT2doZNmagFlamoqALVr1yY9PZ2cnBzi4uKIi4vT2ihXrhwAjo6OWsBpY2PDlStXyM7O5s6dO5w9e1Y7Pi4ujiVLlnD06FH8/PzIyMjgxIkTHDt2jP79+3Pnzh18fHyoXLmy9gb49evX2bVrF6NGjcLW1haA+Ph4KlSowPjx46lVqxZNmjRBURSGDBnCa6+9xhtvvMG+ffvo2rUrZcuWpUKFCnTu3BmApKQ859J1AAAgAElEQVQkkpKSaNGiBQADBgzQ+rd371769+8PQKNGjahXrx6nT58GwNPTkwoVKmBqaoqxsbHWnrW1NbGxsc/xJyPEiyHBpBBCFEJRFC0ggNyRNFNTU7y9vQHYtGkTs2bNKq7u0adPH2xsbJg7dy6BgYFcuXKlWPrx1bZTpGXmTk0rioJpj8ncuXCEt1o7YWFlh7VnF1JtfZgVkY2iU4YmTZowatQoatSoobWRW9sCdHX/V0rS3d2d8uXL06RJEzZu3Ei9evW0fXnHrVq1iqioKHx9fbUAEeDTTz+lWbNm3L59W3uZxtjYmPLly3Py5EntuPXr1/Pnn39iY2ODjo4O1tbW/PLLL/z111/cvn0bLy8vvvrqK/7555/n9rwMDAy0zzo6Otp3HR2dfKO1QrwsJJgUQohClC9fnqNHj5KWlgbAjh07qFXrf2UNu3TpwoQJE4rs+o8KLP755x/CwsI4cuQIY8aMKdZg8kpSWr7vZYyqYNp1PNXf/YEy3WdRuddnlGvcghvZhtzLUfjku/X8/PPPVKtWjfr16wPw22+/YWJiAvyvtK6iKFhZWXHq1CmGDx+Oj48Pb775JpC7PvGzz/6PvfuOz+lsAzj+exLBQ0SMGElLQovIeiISMUJQoWjEniWUtvZ4qdBBS4uKUaNUi5iREkKNGiWI0cgipCEtjxEzSAgJGef9I81pIolVK3p9Px+fz/uccZ/7nOd59XKf+76ur9V5ioMGDWLTpk00btyY1atXM3DgQL7//nsyMjKYOnUqvr6+FC1alLi4OE6cOKGmLRo+fDhNmjTh2LFjdOrUiSJFitCrVy/S0tKoXr06W7duJSgoiCNHjpCamkpycjKbN28GwNTUFFNTU0JCQgDUeZ2QFQhnfz516hTnzp2jZk2pry5eTxJMCiHEQ7Ru3ZotW7YA4O/vT/fu3dV9OVPdeHt7M2zYMBo0aEC1atVyJdeePn06zs7O2NvbM2HCBHX78uXLsbe3x8HBQX1F+mDqmtDQUOrXr4+joyMNGjRQ0+N4eHgQHx+PTqdj0qRJhIWF0bNnT3Q6nRr8vijmptp8txtqNOqIJYDGsAgmDbrxYScPWrRoQa1atR77Gu+99x4bNmxAp9Oxf/9+5syZQ1hYGPb29tSuXZuFCxcCMGHCBPbt24eNjQ3r169XA9NsJUuWZPPmzcyaNeuRuS+zOTs74+npib29Pe+++y52dnaULl0agKVLlzJ48GB0Ol2u+aCDBg0iMzMTOzs7unbtip+fX64RSSFeJ8+tNvfTkNrcQohXibGxMQcPHuSrr75i5cqVuLq6Mnv2bHx9fdm8eTN+fn6EhYUxb948vL29uXPnDgEBAcTGxuLp6cmff/7Jjh07WLduHT/88AOKouDp6cknn3xCuXLlaN++PQcPHqR8+fLcuHGDsmXL4u3tTUJCAhs3bsTQ0JBbt25RokQJihQpwq5du1iwYAGBgYHo9Xratm3L8ePHAXB3d8fX15e6dV98meDsOZM5A0etkWGuzzlpgDNT27yg3j0bycnJGBsbc/fuXRo3bsyiRYueWQokUXhJbe4skhpICCEewt7eHr1ej7+/P61bt37osV5eXhgYGFC7dm2uXLkCwI4dO9ixYweOjo5AVlASFxfH0aNH6dy5s/pqN2c6mM6dO6tzApOSkujTpw9xcXFoNBrS0tKex23+K16OWa/+p28/ycXEFMxNtYxpWZPp208Sn5h3lLSgkcxX2YcffkhMTAypqan06dNHAkkhcpBgUgghcgiKjFeDopS0DIIi4/H09GT06NEEBwdz/fr1As/N+Roz+62PoiiMGzeOjz76KNexc+fOLbCdkiVLqv/7888/p2nTpmzYsAG9Xo+7u/tT3tnz5eVooQaVOeU3YjmmZeGbO5g9x1IIkZfMmRRCiL/lTHGjAIqSFQyZu7zLhAkTsLOze+I2W7ZsyZIlS0hOTgayUtFcvXqVZs2asXbtWjU4vXHjRr7nJyUlqYt+/Pz8CrxOqVKluH379hP373nycrRgSgc7LEy1aAALUy1TOtjlG3QKIQovGZkUQoi/5Uxxky0lLYOlUbc54DPsqdr08PDgjz/+oH79+kDWPMyVK1diY2PDp59+SpMmTTA0NMTR0THfYPGTTz6hT58+TJ48mTZtCp5nmL1wR6vVcujQIbTaV+NVckEjlkKI14cswBFCiL9Z+Wwhv78RC+OCESHE8ycLcLLIa24hhPhbQQtDCuOCESGEeFEkmBRCiL+NaVkTrZFhrm2FdcGIEEK8KDJnUggh/lZQihuZ8yeEEAWTYFIIIXKQBSNCCPFk5DW3EEIIIYR4ahJMCiEKDWNj4zzbFi5cyPLly4GskoL/JiOEXq9/6uTUDRo0eOrrCiFEYSbBpBCiUPv444/p3bv3v24nPT39qYLJ9PR0AA4ePPiv+yCEEIWRBJNCiEJt4sSJ+Pr6qp9XrFiBTqfD1taW0NBQAO7cuUO/fv1wcXHB0dGRjRs3AlkVZTw9PWnWrBnNmzfHx8eH/fv3o9PpmDVrFnq9Hjc3N+rUqUOdOnXUgDE4OBg3Nzc8PT2pXbs28M+oaXJyMs2bN6dOnTrY2dmp19Lr9VhbWzNgwABsbGzw8PAgJSVv3WohhChsZAGOEEI1adIkVq5ciZmZGW+++SZOTk5s3rwZX19f6tatS0JCAnXr1kWv1+Pn58emTZu4e/cuf/31F+3bt+fbb78lIyODDz74gLCwMDQaDf369WPkyJEv7B7u3r1LVFQU+/bto1+/fhw/fpyvv/6aZs2asWTJEhITE3FxceGdd94BICIigmPHjlG2bFmCg4Px9fVl8+bNals7d+6kePHixMXF0b17d/U1ekREBMePH8fKyirX9YsXL86GDRswMTEhISEBV1dXPD09AYiLi8Pf358ff/yRLl26EBgYSK9evV7YsxFCiOdBgkkhBABHjhwhMDCQo0ePkpaWRp06dXBycnroOVFRUURGRlKsWDFq1qzJ0KFDuXr1KvHx8Rw/fhyAxMTEF9F9Vffu3QFo3Lgxt27dIjExkR07drBp0yZ1BDM1NZVz584B0KJFC8qWLZtvW2lpaQwZMoSoqCgMDQ05deqUus/FxSVPIAmgKArjx49n3759GBgYEB8fz5UrVwCwsrJCp9MB4OTkhF6vf2b3LYQQL4sEk0IIAA4cOEC7du0oXrw4xYsX57333nvkOc2bN6d06dIA1K5dm7Nnz2JjY8Pp06cZOnQobdq0wcPD41/1KygyXs37mJKWQVBk/ENT92g0mjyfFUUhMDCQmjVzJx///fffKVmyZIFtzZo1i4oVK3L06FEyMzMpXry4uq+g81atWsW1a9cIDw/HyMgIS0tLUlNTAShWrJh6nKGhobzmFkK8FmTOpBD/YUGR8TScuhsrny18tyuO2Eu38hxTpEgRMjMzAdSgKNuDwVF6ejplypTh6NGjuLu7s3DhQvr37/+v+jdufTTxiSkogKLAuPXRBEXGF3hOQEAAACEhIZQuXZrSpUvTsmVL5s6di6JkVd6OjIzM99xSpUpx+/Zt9XNSUhKVK1fGwMCAFStWkJGR8cg+JyUlUaFCBYyMjNizZw9nz559gjsWQojCR4JJIf6jHgzUUsu9xcZffuHnw3+RnJyszhu0tLQkPDwcgHXr1j2y3YSEBDIzM+nYsSOTJ08mIiLiqfs4fftJUtL+CeCUtHvEze5FV3cdb7zxBjNnzsxzTvHixXF0dOTjjz9m8eLFAHz++eekpaVhb2+PjY0Nn3/+eb7Xs7e3x9DQEAcHB2bNmsWgQYNYtmwZDg4OxMbGPnQUM1vPnj0JCwvDzs6O5cuXU6tWrae8eyGEKBw02f9SfxXUrVtX+Tc54oQQj6/h1N3EJ+Z+zZoYsop7sfuoa21FhQoVaNWqFW5ubnTp0gVDQ0PatGnDypUr1QU4YWFhzJs3D4C2bdsyevRoypQpQ9++fdXRzClTpvDuu+8+VR+tfLaQ399QGuDM1DZP1aYQQjwrGo0mXFGUui+7Hy+bBJNC/EflF6hl3k/BsKiWE180pXHjxixatIg6deq8lP5B/gEvgIWplgM+zV5Cj4QQ4h8STGaR19xC/EeZm2rzbLv+6zyuLh9OnTp16Nix40sNJAHGtKyJ1sgw1zatkSFjWtYs4AwhhBAvmqzmFuI/akzLmoxbH51rTmKVjj5M6WD30NXSL1J2P7JXc5ubahnTsuYr0z8hhBASTArxn1VYAjUvR4tXrk9CCCH+IcGkEP9hEqgJIYT4t2TOpBBCCCGEeGoSTAohhBBCiKcmwaQQQgghhHhqEkwKIYQQQoinJsGkEEIIIYR4ahJMCiGEEEKIpybBpBBCCCGEeGoSTAohhBBCiKcmwaQQQrwGDA0N0el0ODg4UKdOHQ4ePAjAxYsX6dSpEwBRUVFs3br1qa/RunVrEhMTn0l/hRCvDwkmhRDiNaDVaomKiuLo0aNMmTKFcePGAWBubs66deuAfx9Mbt26FVNT02fSXyHE60OCSSGEeM3cunWLMmXKAKDX67G1teX+/ft88cUXBAQEoNPpCAgIIDk5mb59+2JnZ4e9vT2BgYEA+Pv7Y2dnh62tLWPHjlXbtbS0JCEhAb1ej7W1NQMGDMDGxgYPDw9SUlJeyr0KIV4+qc0thBCvgZSUFHQ6HampqVy6dIndu3fn2l+0aFG++uorwsLCmDdvHgBjx46ldOnSREdHA3Dz5k0uXrzI2LFjCQ8Pp0yZMnh4eBAUFISXl1eu9uLi4vD39+fHH3+kS5cuBAYG0qtXrxdzs0KIV4qMTAohxGsg+zV3bGwsv/76K71790ZRlIees2vXLgYPHqx+LlOmDEeOHMHd3R0zMzOKFClCz5492bdvX55zrays0Ol0ADg5OaHX65/p/QghCg8ZmRRCiEIqKDKe6dtPcjExhZS0DIIi4/FytKB+/fokJCRw7dq153btYsWKqf/b0NBQXnML8R8mI5NCCFEIBUXGM259NPGJKSiAosC49dEERcYTGxtLRkYG5cqVy3VOqVKluH37tvq5RYsWzJ8/X/188+ZNXFxc2Lt3LwkJCWRkZODv70+TJk1e1G0JIQohCSaFEKIQmr79JClpGepnJf0+fy0aRM82TejatSvLli3D0NAw1zlNmzYlJiZGXYDz2WefcfPmTWxtbXFwcGDPnj1UrlyZqVOn0rRpUxwcHHBycqJdu3Yv+vaEEIWI5lFzal6kunXrKmFhYS+7G0II8cqz8tlCfn97a4AzU9u86O4I8Z+k0WjCFUWp+7L78bLJyKQQQjzCxIkT8fX1fdndyMXcVPtE24UQ4nmRYFIIUehlV3+xsbHBwcGBGTNmkJmZ+bK79VyNaVkTrVHu19haI0PGtKz5knokhPivkmBSCFHoZafFOXHiBDt37mTbtm18+eWXeY5LT09/7Da//vpratSoQaNGjTh58iQAP/74I87Ozjg4ONCxY0fu3r3L7du3sbKyIi0tDchKGG5lZcXZs2fR6XTqH0NDQ86ePftsbhjwcrRgSgc7LEy1aAALUy1TOtjh5WjxzK4hhBCPQ+ZMCiEKPWNjY5KTk9XPp0+fxtnZmYSEBJYtW8b69etJTk4mIyODL7/8El9fXzZv3gzAkCFDqFu3Lt7e3mzdupVRo0ah0Wi4du0azs7OBAQEYG1tjaIolC1bFiMjI/bt28e0adOoWLEiQ4cOpW/fvrRr1w4vLy8WLVrEyZMnmTFjhtqf+fPns3fvXn7++ecX/myEEM+PzJnMIiOTQojXTrVq1cjIyODq1asAREREsG7dOvbu3VvgOampqXz00Uds27aNjz76iEqVKmFoaIiJiQnFihWjffv2zJ8/H61WS/369Vm1ahUnTpwAoH///ixduhSApUuX0rdvX7XdAwcO8OOPP7JkyZLneMdCCPHySNJyIcRrr0WLFpQtW/ahxywI2scto7I0+yEGjsdhYVENyJp3aW5uzi+//MKaNWvw9/fHw8MDPz8/goODAWjYsCF6vZ7g4GAyMjKwtbUF4NKlS3zwwQds2rQJY2Pj53mLQgjx0sjIpBCiUAqKjKfh1N1Y+WxRq79kO336NIaGhlSoUAGAkiVLqvuKFCmSa3FOamoqEWdvMue3OO6lZ6IAqeVr8Pvvv3Pxxh1u377NlStX6Ny5M6mpqfTr14/o6GhWrVqVqz+9e/emR48e6qhkWloanTt3Ztq0adSoUeM5PgkhhHi5JJgUQhQ6D6v+cu3aNT7++GOGDBmCRqNhw4YNnD9/Xj23atWqxMTEcO/ePRITE/ntt9/YdvwSGSaVSU+8THrSFYpVegtN0RIcDTvMu+++i7W1NZUrV8bX15ebN2/SoUMHatWqlatPPXv25ObNm3Tv3h2AgwcPEhYWxoQJE9RFOBcvXnyRj0kIIV4Iec0thCh0Cqr+0mNRJm9XKs3777/PqFGjAGjfvj05F/a9+eabdOnSBRsbG6pVq4ajoyP77qZhbFSMsh4DufLzBAyMilHMohaKWVVCQoIZOnQofn5+GBgY4OnpiZ+fX67a1AAhISF06tQJU1NTAJo0aUJqauoLeBpCCPFySTAphHihli9fjq+vLxqNBnt7eyZNmkS/fv1ISEjAzMyMpUuXUqVKFby9vTExMSEsLIzLly/z7bff0qlTJy5dukT4/GFk3r8LmRmU9RhE1U82cW5mJ7QOLUm/G8e2bdvo06cPZmZmBAcH07ZtWwAsLS3p2rUru3btYvLkydy+fZtFixaRcuEoKX+GUsZjEBYDFpKefIPLK8dgkJmOg4MDCxYsoEyZMpQtW5YRI0YA8Omnn1KhQgWGDx/O0KFD2bZtG1u3bn2Zj1YIIV4KCSaFEC/MiRMnmDx5MgcPHqR8+fLcuHGDPn36qH+WLFnCsGHDCAoKArIWsISEhBAbG4unpyedOnVi9erVVKxdD41jB5TMDJS0ewAoaalUrFabEwFBfPXVV3z55ZfMmzcvTx/KlStHREQEANevX2fAgAEERcbTf+hobmyfT3rSZdITL1O0rDnLAn+lUz0rkpOTMTc3p0OHDowYMYLMzEzWrFlDaGgoAHPnzn1BT1AIIV49/2rOpEaj6azRaE5oNJpMjUZT94F94zQazZ8ajeakRqNp+e+6KYR4HezevZvOnTtTvnx5AMqWLcuhQ4fo0aMHAO+//z4hISHq8V5eXhgYGFC7dm2uXLkCgLOzM/djfiP5oD9p185iUKxE1sEaA6aM/giAXr165Wonp65du6r/+/jx47i5ufF571YYnjmASZlyWPSdi6FRUVYE7aBrg7cwNDSkdOnSWFpaUq5cOSIjI9mxYweOjo6UK1fumT8jIYQobP7tyORxoAPwQ86NGo2mNtANsAHMgV0ajaaGoigZeZsQQrzOgiLjmb79JBcTU9DEnKKOmeaxz805LzG7wELjxo0J//0gkxesZNni7yhRx5MajdpyXgPtclR/0Wjyv07Old3e3t4EBQXh4OCgpvrxm9oGs8VGeOryVpLp378/fn5+XL58mX79+j32fQghxOvsX41MKoryh6IoJ/PZ1Q5YoyjKPUVRzgB/Ai7/5lpCiMLnwVXXqWbWbApaz/I9xwG4ceMGDRo0YM2aNQCsWrUKNze3h7Z59uxZKlasyPeTPmHa+BH0fCuTAz7NyMzMZN26dQCsXr2aRo0aPbJ/t2/fpnLlyqSlpeVK9dO8eXMWLFgAQEZGBklJSUDWYp5ff/2VI0eO0LKlvHARQgh4fnMmLYDDOT5f+HubEOI/5MFV10XNqmLi2oWPu7/HjIomODo6MnfuXPr27cv06dPVBTgPExwczPTp0zEyMsLY2Jjly5cDWSOOoaGhTJ48mQoVKhAQEPDI/k2aNIl69ephZmZGvXr1uH37NgDfffcdH374IYsXL8bQ0JAFCxZQv359ihYtStOmTTE1NcXQ0PBfPBkhhHh9PLI2t0aj2QVUymfXp4qibPz7mGBgtKIoYX9/ngccVhRl5d+fFwPbFEVZl0/7HwIfAlSpUsXp7NmzT383QohXipXPFvL7G0YDnJna5ple68H63M9DZmYmderUYe3atbz99tvP9VpCiFef1ObO8siRSUVR3nmKduOBN3N8fuPvbfm1vwhYBFC3bt2HR7ZCiELF3FRLfGJKvtsLm5iYGNq2bUv79u0lkBRCiByeVwWcTUA3jUZTTKPRWAFvA6HP6VpCiFfUmJY10Rrlfh2sNTJkTMuaz/xaz3tUsnbt2pw+fZoZM2Y81+sIIURh86/mTGo0mvbAXMAM2KLRaKIURWmpKMoJjUbzMxADpAODZSW3EP89Xn+vrs5ezW1uqmVMy5rqdiGEEIXfI+dMvkh169ZVcpY9E0IIIYR4VcmcySzP6zW3EEIIIYT4D5BgUgghXgEajYZevXqpn9PT0zEzM1Prij8r3t7eWFlZodPpcHBw4LffflP39e/fn5iYmEeen53PMye9Xs/q1aufaV+FEIWDBJNCCPEKKFmyJMePHyclJWv1+86dO7GweD5zS6dPn05UVBSzZ8/m448/Vrf/9NNP1K5d+6nalGBSiP8uCSaFEOIV0bp1a7Zs2QKAv78/3bt3V/fduXOHfv364eLigqOjIxs3bgTAz8+PDh060KpVK95++20++eQTIKtyj7e3N7a2ttjZ2TFr1qw816tfvz7x8f9kbXN3dyd73vrixYupUaMGLi4uDBgwgCFDhqjH7du3jwYNGlCtWjV1lNLHx4f9+/ej0+mYNWsWJ06cwMXFBZ1Oh729PXFxcc/4aT1/QUFBaDQaYmNjH3qcsbFxnm16vR6NRsNnn32mbktISMDIyCjXsxTidSDBpBBCvCK6devGmjVrSE1N5dixY9SrV0/d9/XXX9OsWTNCQ0PZs2cPY8aM4c6dOwBERUUREBBAdHQ0AQEBnD9/nqioKOLj4zl+/DjR0dH07ds3z/V+/fVXvLy88my/ePEikyZN4vDhwxw4cCBPMHXp0iVCQkLYvHkzPj4+AEydOhU3NzeioqIYOXIkCxcuZPjw4URFRREWFsYbb7zxLB/VC+Hv70+jRo3w9/d/qvOtrKzUfxwArF27Fhsbm2fVPSFeGRJMCiHEK8Le3h69Xo+/vz+tW7fOtW/Hjh1MnToVnU6Hu7s7qampnDt3DsiqJV66dGmKFy9O7dq1OXv2LNWqVeP06dMMHTqUX3/9FRMTE7WtMWPGUKNGDXr06MHYsWPz9CM0NJQmTZpQtmxZjIyM6Ny5c679Xl5eGBgYULt2ba5cuZLvvdSvX59vvvmGadOmcfbsWbTawpWoPjk5mZCQEBYvXqzWjr906RKNGzdGp9Nha2vL/v37c52TkJBA/fr11QCyRIkSWFtbq6O9AQEBdOnSRT3+2rVrdOzYEWdnZ5ydnTlw4AAAEydOpF+/fri7u1OtWjXmzJnzIm5ZiKcmwaQQQrwkQZHxNJy6GyufLaSkZRAUGY+npyejR4/O9YobQFEUAgMDiYqKIioqinPnzmFtbQ1AsWLF1OMMDQ1JT0+nTJkyHD16FHd3dxYuXEj//v3VY6ZPn86pU6eYNm0a/fr1e+J+57xeQenlevTowaZNm9BqtbRu3Zrdu3c/8XVepo0bN9KqVStq1KhBuXLlCA8PZ/Xq1bRs2ZKoqCiOHj2KTqdTj79y5Qpt2rThq6++ok2bf0qFZo82nz9/HkNDQ8zNzdV9w4cPZ+TIkRw5coTAwMBc31FsbCzbt28nNDSUL7/8krS0tBdz40I8BQkmhRDiJQiKjGfc+mjiE1NQAEWBceujMXd5lwkTJmBnZ5fr+JYtWzJ37lw1eIuMjHxo+wkJCWRmZtKxY0cmT55MREREnmOGDBlCZmYm27dvz7Xd2dmZvXv3cvPmTdLT0wkMDHzk/ZQqVYrbt2+rn0+fPk21atUYNmwY7dq149ixY49s40GGhobodDr1j16vJywsjGHDhgEQHBzMwYMHn7jdx+Hv70+3bt2ArIDQ398fZ2dnli5dysSJE4mOjqZUqVIApKWl0bx5c7799ltatGiRq51WrVqxc+dO1qxZQ9euXXPt27VrF0OGDEGn0+Hp6cmtW7fUSk5t2rShWLFilC9fngoVKhQ4AizEq+BfVcARQgjxdKZvP0lKWu7CYClpGSyNus0Bn2F5jv/8888ZMWIE9vb2ZGZmYmVlxebNmwtsPz4+nr59+5KZmQnAlClT8hyTvUDk22+/pWXLlup2CwsLxo8fj4uLC2XLlqVWrVqULl36ofdjb2+PoaEhDg4OeHt7c+/ePVasWIGRkRGVKlVi/PjxDz0/P1qtlqioqFzbLC0tqVs3K0d0cHAwxsbGNGjQ4Inbzk9QZDzTt5/k/KWrxO/YRWh4FCWKFSEjIwONRsP06dPZt28fW7Zswdvbm1GjRtG7d2+KFCmCk5MT27dvp0mTJrnaLFq0KE5OTsyYMYOYmBg2bdqk7svMzOTw4cMUL148T1/yG20W4lUlFXCEEOIlsPLZQn5/+2qAM1Pb5LPnxUpOTsbY2Jj09HTat29Pv379aN++/Qvtg7GxcZ6a68HBwfj6+jJv3jxcXV0xNDTEzMyMuXPnsnjxYkxMTAgLC+Py5ct8++23dOrUCch6tf/zzz9z79492rdvz5dffsmdO3fo0qULFy5c4Oade2TYt8eoRiOu/PwF9y6epGhpM1q1bEnQ8oU0adKEr776ikaNGmFoaMi8efP4888/mT17NsbGxiQlJdG5c2fq1avH2LFj0ev1tG3bluPHj3PixAnCwsLo06cPfn5+hIWFMW/ePHr06IGjoyNjxowBshZS6XQ6Jk6ciLGxMaNHjwbA1taWzZs3Y2lp+UKfv3g0qYCTRUYmhRDiJTA31RKfmJLv9lfBxIkT2bVrF6mpqXh4eOS76vt5S0lJUeclWllZsWHDBnWfpaUlH3/8ca6ga/HixepK89jYWDw9PenUqRM7duwgLi6O0NBQFEXB07T1S+8AACAASURBVNOTffv2ce3aNczNzdmyZQsNp+7m/JUEMlJuce/8ccp7jadE9bpcKpY1etyxY0e8vb0pWbIkRkZGGBsbs3z5crU/hoaG+Pv74+npSalSpXItoLKxscl3FfecOXMYPHgw9vb2pKen07hxYxYuXPhcnqUQz5MEk0II8RKMaVmTceujc73q1hoZMqZlzZfYq3/4+vq+7C7k+5r7UfJbab5jxw527NiBo6MjkDXqGhcXh5ubG//73/8YO3Yspy+UpdibtiiZGRQpY87dkyGQfp8rbzkDMGzYMHWu5oOyR0+LFSuWa/7p8ePH8xzr7e2Nt7c3AOXLlycgICDPMRMnTsz1Ob92hHiVSDAphBAvgZdjVnWb6dtPcjExBXNTLWNa1lS3/1dlz1u8mJiirnB/kmeS30pzRVEYN24cH330UZ7jIyIi2Lp1K3fXTiflnB2mDbtTufcsUs9GcefkAe4d2wrTX/yorBCFiQSTQgjxkng5Wvzng8ecsle4Z4/WZq9wB/J9TqVKleLWrVuPbLdly5Z8/vnn9OzZE2NjY+Lj4zEyMiI9PZ2yZcvSq1cv/riewZzvfyDzfgpK2j201Z0pbWnHpR/7P7J9If7rJDWQeG1kpxGxtbXlvffeIzEx8Zm0q9frsbW1fSZtFSQ9PR0zMzO1msjDREVFsXXr1qe6zokTJ2jWrBk1a9bk7bffZtKkSerozb1793jnnXfQ6XT5vnrLll0S7kXN7UpMTOT7779/7textLQkISHhuV9HFKygFe7Tt5/M9/j33nuPDRs2oNPp8iQQz8nDw4MePXpQv3597Ozs6NSpE7dv3yY6Olot+bhj5TwmfPEZFYsrXA38kmvLhnIv6FPmfTf7md6jEK8jWc0tXhs5V3726dOHGjVq8Omnn/7rdnOuynxetm3bxuTJk7l8+TJ//vknGo2mwGNzrgZ9EikpKdja2rJgwQI8PDy4e/cuHTt2pG3btgwePJjDhw/z2WefsWvXroe2s2DBAlavXo2BgQF79+59oj48jYKef3p6OkWKPLuXK5aWloSFhVG+fPlc2zUaDaNGjWLGjBlA1lzC5OTkPPPantY333zzVGlzXkev+gp3IR4kq7mzyMikeC3Vr1+f+Ph4IGtyfPPmzalTpw52dnZs3LgRyApSrK2tGTBgADY2Nnh4eJCSkrW6Njw8HAcHBxwcHJg/f77abmpqKn379sXOzg5HR0f27NkDZAV4Xl5etGjRAktLS+bNm8fMmTNxdHTE1dWVGzduPLS//v7+DB8+nCpVqnDo0CF1+5EjR2jQoAEODg64uLiQlJTEF198QUBAgDqCuHfvXjWps6OjY67E0TmtXr2ahg0b4uHhAWSVeps3bx5Tp07l6tWr9OrViyNHjqDT6fjrr78e2tcZM2YQHx/PhQsX1O3GxsaMGTMGGxsb3nnnHUJDQ9VycNm59fR6PW5ubtSpU4c6deqoCae/+OIL9R4sLCzUOtIzZ87EwcGBmJgYLCws1LJzpUqVoly5ctjY2FChQgUcHR2xsbFh0aJFACxcuFBNt5L9/QwZMgSAlStXqqNRH330ERkZuUfCHlSsWDHWr1//1KOWj8oP+M033zxVu6+jglayvyor3IUQBVAU5ZX54+TkpAjxtEqWLKkoiqKkp6crnTp1UrZt26YoiqKkpaUpSUlJiqIoyrVr15Tq1asrmZmZypkzZxRDQ0MlMjJSURRF6dy5s7JixQpFURTFzs5O2bt3r6IoijJ69GjFxsZGURRF8fX1Vfr27asoiqL88ccfyptvvqmkpKQoS5cuVapXr67cunVLuXr1qmJiYqIsWLBAURRFGTFihDJr1qwC+52SkqJUrlxZuXv3rvLDDz8oQ4YMURRFUe7du6dYWVkpoaGhiqIoSlJSkpKWlqYsXbpUGTx4sHp+27ZtlZCQEEVRFOX27dtKWlpavtcZOXKkMnv27DzbTU1NlaSkJGXPnj1KmzZtHvqMz507p7z11luKoijKuHHjFF9fX3UfoGzdulVRFEXx8vJSWrRoody/f1+JiopSHBwcFEVRlDt37igpKSmKoijKqVOnlAf/P3/z5k3F1tZWCQsLU8LCwhRbW1vlxIkTirW1tVK7dm1l0aJFilarVQwMDNTvrV27dsqKFSuUu3fvKjY2NkpCQoJy9epVpXr16mq7rVq1Uvbv36/ExMQobdu2Ve7fv68oiqIMHDhQWbZsmaIoilK1alXl2rVree65ZMmSyjfffKOMHz9eURRFmT59ujJhwgRFURRl06ZNiouLi6LT6ZTmzZsrly9fVhRFUSZMmKD06tVLadCggdKtW7c831mbNm2UPXv2KGPHjlUMDAwUBwcHpUePHoqiKMqMGTMUGxsbxcbG5qG/m9fRhogLSq3PtilVx25W/9T6bJuyIeLCy+6aEPkCwpRXIH562X9kZFK8NrJz0lWqVIkrV66oZc0URWH8+PHY29vzzjvvEB8fr6YMsbKyUvPYOTk5odfrSUxMJDExkcaNGwPw/vvvq9cICQmhV69eANSqVYuqVaty6tQpAJo2bUqpUqUwMzOjdOnSvPfeewDY2dmh1+sL7PfmzZtp2rQpWq2Wjh07EhQUREZGBidPnqRy5co4O2elJjExMcn3tW7Dhg0ZNWoUc+bMITEx8Zm++n1QQEAAXbp0Af4pMZetaNGitGrVCsi65yZNmmBkZJTr/tPS0hgwYAB2dnZ07tyZmJgY9XxFUejVqxejRo3CycmJkJAQ2rdvT4kSJTAwMKBDhw4cO3YMBwcHqlWrpn5vycnJjB49GldXV86fP09cXBxmZmZUq1aNw4cPc/36dWJjY2nYsCG//fYb4eHhODs7o9Pp+O233zh9+vQj73vw4MGsWrWKpKSkXNsbNWrE4cOHiYyMpFu3bnz77bfqvpiYGHbt2pXrGT1o6tSpavqbVatWER4eztKlS/n99985fPgwP/744yPLJr5OvBwtmNLBDgtTLRrAwlTLlA52skhJiFecrOYWhVrONCIUKcrEpVvwqFmGli1bMn/+fIYNG8aqVau4du0a4eHhGBkZYWlpSWpqKpC3ZFn2a+6nkbMtAwMD9bOBgcFDX3X6+/sTEhKiVre4fv06u3fvplKlSo91XR8fH9q0acPWrVtp2LAh27dvp1atWnmOq127Nvv27cu17fTp0xgbG2NiYvJY1/L39+fy5cusWrUKgIsXLxIXF8fbb7+NkZGROtezoPufNWsWFStW5OjRo2RmZuYqIzdx4kTeeOMN+vbtS1BkPLN3neLmjRtsuXeQW6n/PL8SJUqobQcHB/PXX3/xwQcf8PXXX+Pu7q5+t926dePnn3+mVq1atG/fHo1Gg6Io9OnTJ9/Sgg9jYmJC7969mTNnDlrtP69cL1y4QNeuXbl06RL379/HyspK3efp6Znr2MeRHUCXLFkSgA4dOrB//341P+J/gaxwF6LwkZFJUWhlpxGJT0xB4Z80IjtO3mTOnDnMmDGD9PR0kpKSqFChAkZGRuzZs4ezZ88+tF1TU1NMTU0JCQkBUAMnADc3N/XzqVOnOHfuHDVrPn2S6Vu3brF//37OnTuHXq9Hr9czf/58/P39qVmzJpcuXeLIkSMA3L59m/T0dEqVKpVrXuRff/2FnZ0dY8eOxdnZmdjY2Hyv1bNnT0JCQtQFNikpKQwbNoxPPvnksfp66tQpkpOTiY+PV/s6bty4h468PSgpKYnKlStjYGDAihUr1PmKv/zyC7t27WLOnDnq95pargZ34w5z+fZ9Ll27wTL/tdjb2+dpT6vVYmRkRGxsLIcPH1b3tW/fno0bN+Lv70+3bt0AaN68OevWrePq1asA3LhxI9/fQ1BkPA2n7sbKZ4ua63DEiBEsXryYO3fuqMcNHTqUIUOGEB0dzQ8//KAGsoAaEAIUKVJErZEN5DpOCCEKOwkmRaH1sDQijo6O2Nvb4+/vT8+ePQkLC8POzo7ly5fnO2r3oKVLlzJ48GB0Op2aOgdg0KBBZGZmYmdnR9euXfHz88s1IvmkNmzYQLNmzXK10a5dO3755RcURSEgIIChQ4fi4OBAixYtSE1NpWnTpsTExKgLcGbPno2trS329vYYGRnx7rvv5nstrVbLxo0bmTx5MjVr1sTOzg5nZ2d1Ycqj+Pv756nN3LFjxycKJgcNGsSyZctwcHAgNjZWDbhmzpxJfHw8Li4u9GzThEu7l1Gs0lsY2zbn2toJZNy7w+Ur19QFNtlatWpFZmYm8+fPx8fHB1dXV3VfmTJlsLa25uzZs7i4uABZo7OTJ0/Gw8MDe3t7WrRowaVLl3K1WdA/UvadTaFLly4sXrxYPTYpKQkLi6xRtGXLlhV435aWlkRFRZGZmcn58+cJDQ1V9xkZGZGWlgZk/WMlKCiIu3fvcufOHTZs2ICbm9tjP18hhHgZJDWQKLReZhqR69ev07x5cwAuX76MoaEhZmZmAISGhlK0aNEnau/WrVvodDp27dpFtWrVuH//Po6Ojixbtoy6dety8eJFRowYQUREBKVLl6ZSpUp899136PV65s2bR1BQ0DO/x5flZaeHaTh1d66a2edmdqLKqHVYmGpZ39cGKysrPvnkEyZOnMjGjRsZOXIkZcqUoVmzZhw5coTg4GAmTpyYq2Z09nzQ8PBwrK2tuXnzJhMnTsTd3Z2xY8eyadMm6tSpw6pVq5g5cyZLliwBoH///owYMeKx+l1QaqOCBAcHU7RoURo0aPCET+jZcXd3x9fXl7p1//OZVUQhJamBssicSVFomZtqc/1HP+f2561cuXJqzeAHA4enYWJiwuTJkxk6dChbtmxh2rRpuLu7U7duXRRFwcvLiw8//JCff/4ZgMjISHUR0eN41jkZn1RQUBDt27fnjz/+eOTIcM7vNTFkFRojLaXrdaDIX3u5eNERc3Nz4MmDp2wjRoxg7dq1nD9/HgODvC9nLj7wm6oyap26vWLFihgYGKg5Jtu1a0e7du3ytJEzB2X27yPndImcpk2bxrRp09TPo0aNYtSoUU90T08jODgYY2PjZxJMvuzflxDi5ZLX3KLQGtOyJlojw1zbtEaGjGn59HMY/60pU6ao1VqGDh2q5nTcsWMHtWrVQqfTUbVqVbRaLVqtlrZt26rn9ujRg9TUVKZPn87ixYv5+uuvAdi5cyfGxsb07/9PWTdHR0caNmwIZM2l7NChAzVr1qR3795ER0ej0+koWrQoFSpUQKvVYm1tTUREBPXq1cPe3p6OHTuqK5MbNWrEqFGjqFu3LrVr1yYsLAxzc3OKFStGpUqV1PyP/fv3x9bWFltbW+bOnate+91338XBwQFbW1vWrVun3q9Op8POzo4BAwawcuVKGjVqhIuLC+PHj8fBwQFnZ2ciIiLw8PCgevXq/Pjjj0DW93o3NJBLy0dyO3IrqfpItEaGFPkzmE6dOqnXyjl38XFlZmayYcMG3nzzzQITrr/quQ71ej21atWiZ8+eWFtb06lTJ+7evQvA3Llz1Xyq2XNnb9y4gZeXF/b29ri6unLs2DH0ej0LFy5k1qxZavUYvV5Ps2bNsLe3p3nz5pw7dw7ImpPr6uqKnZ0dn332GcbGxkBWMOrm5oanpye1a9cGwMvLCycnp1w5PyErB+nIkSOxsbGhefPmXLt2Td23du1aXFxcqFGjhlrFpnHjxuo/1iDrN3r06NHn+FSFEP+GBJOi0HoV04i4ubmp/0GMiIggMTGRjIwM9u/fz5gxY9i8eTMajYbz589z69Ytbt++zebNm9Xz58yZwyeffMKECRMwNTUF4Pjx4zg5ORV4zYiICObNm0dMTAx//PEHd+7cISoqigoVKuDj40NKSgpxcXH06tWLmTNncuzYMWrWrMmkSZPUNrRaLWFhYXzwwQd4eXkRGRlJUlISBgYGBAcH88MPP3DkyBGOHDnCoUOH+P7774mOjmbr1q1YWlpy9OhRjh8/TosWLbh79y79+vUjMDCQ6Ohobt26xa5du1i8eDEpKSlYWVnx3XffcfHiRZo2bcqpU6do3bo1Y8aMwcXFhRHtG1Kr+E2chi7AWPcuJF+jzo3dnPnjGDExMWg0Go4cOYJWq803eLpz5w79+vXDxcUFR0dHNUk9ZAVANjY2DBw4MNdcz5yJ3y/5DaNo5j3Sk29wedVYLi4dyqUlg2lj9k95zk8//RQHBwdcXV3VEeKCgrGc/vrrL1q1aoWTkxNubm4FLpZ6lJMnTzJo0CD++OMPTExM1H/AlC9fnoiICAYOHIivry8AEyZMwNHRkWPHjvHNN9/Qu3dvLC0t+fjjjxk5ciRRUVG4ubkxdOhQ+vTpw7Fjx+jZsyfDhg0DYPjw4QwfPpzo6GjeeOONPL+97777Tk2PtWTJEsLDwwkLC2POnDlcv35d/U7q1q3LiRMnaNKkCV9++aXaRnp6OqGhocyePVvd/sEHH+Dn5wdkLfxKTU3FwcHhqZ6VEOL5k2BSFGpejhYc8GnGmaltOODT7KWnFHF2dubIkSMkJiZibGysjr7t378fNzc3fv/9d5o1a0b58uUxMjKiR48eudL1bNu2jcqVKz9R6UZXV1fMzc3V2uQ5c1p27doVyJrjmZqaqo5m9unTJ9d1PT09gaz8kHZ2dlSsWJHixYtjaWnJhQsXCAkJoWPHjmi1WkqVKoWXlxf79+/H3t6eX3/9FR8fHw4cOEDp0qX5448/qFGjBtWrVweyXkeXKVOGGjVqYGBgQNWqVQG4efMmXl5enDx5kvXr15ORkcHu3bupWrUqRw/s5m7A/9D+tYeSSgotnGtjZ2dHyZIladWqFRERERgYGOQbPH399dc0a9aM0NBQ9uzZw5gxY9RRTH9/f7p370779u3ZsmWLuvDF19eX+fPnExUVxbGww0zp7ESRMwfQWtXBeeRPrN66j6FdsvKW3rlzB1dXV44ePUrjxo3VEdWCgrGcPvzwQ+bOnUt4eDi+vr4MGjTosb/nnN588031u+zVq5eaeaBDhw7APzlTISvdUHau1GbNmnH9+nVu3bqVp81Dhw7Ro0cPICu3anabhw4donPnzgDq/mwuLi650iHNmTNHDbKzc35CVnqo7N9izv4W1OfOnTuzefNm0tLSWLJkCd7e3k/xlIQQL4oEk0I8gZwpYxpO3U1QZHyu/cWKFcPCwoLly5fTsGFD3Nzc+O233zh79iw1atR4aNsXLlzg+++/JzQ0lI0bN3LixAkAbGxsCA8PL/C8B3Nl5sxpmTM9zcPkzAn5YL7Mh+XItLa2JiwsDBsbG3x8fPItDRgcHEyVKlWArBHQ7NKK2UFmsWLFqF69OlqtlvT0dMqVK0e1atWIiori448/xsfHB29vb0qWLMmyZcvUayUlJeUbiOzYsYOpU6ei0+nUvJPnzp3j/v37bN26FS8vL0xMTKhXrx7bt28H8iZ+7+hcleVje1Du4kFapO6jepEblCpVCshKzp49PSHndQsKxrIlJydz8OBBOnfurJZyfHAleUFy/u46LjhIalpmrv3Z+T2zv7sHfwfPS87fV3BwMLt27eLQoUMcPXoUR0fHAlMg5aw9n1+fS5QoQYsWLdi4cSM///wzPXv2fI53IYT4tySYFOIxPZgyJj4xhXHro4m9lHuUx83NDV9fXxo3boybmxvz589XV6vWq1ePPXv2cP36ddLT01mzZg1NmjQBsl4nfvHFF7zxxhtMnz6dwYMHA+Dh4cGtW7fUFb4AR48e5cCBA4/d93LlyqHVatVa2CtWrFCv+zjc3NzYsGEDKSkpJCcns3HjRtzc3IiPj8fY2Jj333+f//3vf0RERGBtbc2xmFicPllJleFrCD0SRkTkUSwtLUlOTiYoKAhFUXIt2DAwMFADDBcXF/R6vTqamJiYSEJCAvfu3aNEiRLqte7fv59vIKIoCoGBgURFRREVFcW5c+ewtrZm+/btJCYmYmdnh6WlJSEhIeqrbh8fH3766SdSUlJo2LAhsbGxNG7cmH379mFhYYG3tzfLly8HyJWc/UmCtszMTExNTdV+RUVF8ccffzzyvAd/d1dupXLtcjxT/bKC8tWrV9OoUaOHfnfZi3+Cg4MpX748JiYmefKVNmjQgDVr1gBZuVWzUxK5uroSGBgIoO7PT1JSEmXKlKFEiRJ5cn5mZmaq82kf1d9s/fv3Z9iwYTg7O1OmTJlHHi+EeHkkmBTiMRWU1/LAX9dzbXNzc+PKlSu4urpiYWGBkZGR+h/mN954g0mTJuHu7o5Op8PV1ZU2bdqwbds2rly5Qp8+fQDUMoKrVq1Co9GwceNGtm7dSvXq1bGxseGzzz577Ao52VasWMHIkSOxt7cnJiaGzz777LHPdXFxoXv37jg7O+Pq6srAgQOxs7Pj6NGjamnCb775hvHjx7Pj5E1KNBtM9LIvuLhkMEXKmGMxZAWzNxygUqVKVK1aVZ1Xmh9XV1cqV66Mq6srCxYsYOXKlSQnJ5ORkcH777+vXqugqj0tW7Zk7ty5an7Q7HKE/v7+/PTTT2rC9TNnzrBz507u3r2bb+L3s2fPUrFiRQYMGED//v2JiIh46DMqKBjLZmJigpWVFWvXrgWygt7HWVSS3++uSNk3mPHdHDXN0MCBAws8f+LEiYSHh2Nvb4+Pj4+aD/O9995jw4YN6gKcuXPnsnTpUuzt7VmxYgXfffcdALNnz2bmzJnY29vz559/Urp06Xyv06pVK9LT07G2ts6T87NkyZKEhoZia2vL7t27+eKLLx55305OTpiYmNC3b99HHiuEeLkkz6QQj+ll5z8sLHLmabzsP47S9TqhreaEhamWAz7NmDNnDgsWLKB69erq4qOc+QaDg4Px9fVl8+bNudIuBQYGMn78eLRaLYcOHVJfsZcvX56wsDBGjx5NcHAwKSkpjBgxgoMHD5KZmYmVlRU///wzb7zxBnq9PlcQ2qFDB7p27UpISAh79uzBwMAAGxsb/Pz8WLNmDdOnT8fIyAhjY2OWL1+OlZUVxsbGJCcnA7Bu3To2b96Mn58fZ8+epW/fviQkJGBmZsbSpUupUqVKrns4c+YMAwcO5NKlS6SlpdGtW7dHBlYP/u7Sk65wdd2XWHzw/Qv53d29exetVotGo2HNmjX4+/vnWtT0OHI+s8d18eJF3N3diY2NzTeFkxCvAskzmUWCSSEe04PJrLNlB0kiiwTdz9aDv7vsYNL5f0tfyO9u//79DBkyBEVRMDU1ZcmSJbz11ltP1MaTBpPLly/n008/ZebMmeriHyFeRRJMZpFgUojHlD13LecrR62R4UtPR/SqkaD72ZLfnRCvLgkms8i7AyEe06uY1/JV9Comky/M5HcnhHjVSTApxBN4FfJaajQaevXqpX5OT0/HzMwsVzWd/Pj5+TFkyJDn3T0avVmMuKnvYXhyV4HBz8P6kl1h5eLFi3Tq1Omh15o9e7Za/QWgdevWJCYmPuSMwulV+N0JIURBJJgUopApWbIkx48fJyUl61Xyzp07sbB4dYKLtWvXUt/VlTduRPyr4Mfc3FxNJ1OQB4PJrVu3qpWDXoQLFy7Qrl073n77bapXr87w4cO5f//+Q4Pl5cuXY2tri52dHY6Ojmqy9adRUF1tb2/vRz47IYR4ViSYFKIQat26NVu2bAH+qeqSLTQ0lPr16+Po6EiDBg04efJknvO3bNlC/fr1SUhI4Nq1a3Ts2BFnZ2ecnZ3V/JUTJ07MFejY2tqqaXUKqg2d3Z8ZM2YQHx/PhQsX1O1Lly6lRo0auLi45MqReebMGerXr6/Wfs6m1+uxtbUFICMjg9GjR2Nra4u9vT1z585lzpw5aknGpk2bAlmJ0BMSEgCYOXOmWkt89uzZapvW1tYMGDAAGxsbPDw81KD8SSmKQocOHfDy8iIuLo5Tp06RnJzMp59+WuA527ZtY/bs2ezYsYPo6GgOHz5cYKqdx5GdN1QIIV4mCSaFKIS6devGmjVrSE1N5dixY9SrV0/dV6tWLfbv309kZCRfffUV48ePz3Xuhg0bmDp1Klu3bqV8+fIMHz6ckSNHcuTIEQIDA+nfv/8jr19Qbejz589z6dIlXFxc6NKlCwEBAQBcunSJCRMmcODAAUJCQoiJiVHbGj58OAMHDiQ6OprKlSvne71Fixah1+uzyh3mKFdobm7Onj172LNnT67jw8PDWbp0Kb///juHDx/mxx9/VPNNxsXFMXjwYE6cOIGpqamakPtJ7d69m+LFi6t5EA0NDZk1axZLlizJFVznDNynTJmCr68v5ubmQFb1lwEDBgBZ6ZGyFyAmJCRgaWkJZE0JaNeuHe7u7rz99tu56lpnTwlQFIUhQ4ZQs2ZN3nnnHa5evfpU9ySEEE9DgkkhCiF7e3v0ej3+/v60bt06176kpCQ6d+6Mra0tI0eOVMsyQlYANG3aNLZs2aJWFdm1axdDhgxBp9Ph6enJrVu3HpnGpaDa0AEBAXTp0gXICnizK8z8/vvvuLu7Y2ZmRtGiRdU6zQAHDhxQR1aza0g/aNeuXXz00Udq1ZyyZcs+tH8hISG0b9+ekiVLYmxsTIcOHdRE6VZWVuh0OiB3OcQndeLECZycnHJtMzExoUqVKmpVnAcD9+PHj+c553GEhoYSGBjIsWPHWLt2LQ9mvdiwYQMnT54kJiaG5cuXy4ilEOKFKvLoQ4QQL1tQZDzTt5/kYmIKKWkZBEXG4+npqSbqvn79nyo8n3/+OU2bNmXDhg3o9Xrc3d3VfdWrV+f06dOcOnVKLfGYmZnJ4cOHKV68eK5rFilShMzMf2pA56yznLO2cs7P/v7+XL58WS3fd/HiReLi4h55fw+29zw9WMv8aV9zP8ru3bsJCwtjx44dBVbreVwtWrSgXLlyQFai9ZCQEPX7A9i3bx/du3fH0NAQc3NzmjWTFExCiBdHRiaFeMU9WJtZUWDc+mjMXd5lwoQJ2NnZ5To+KSlJXZDj5+eXa1/VqlUJDAykd+/e6oilh4cHRPtgFgAAIABJREFUc+fOVY+JiooCsuYfZpcQjIiI4MyZM+ox586d49ChQ8A/tZaz5wzGx8ercyvHjRuHv78/9erVY+/evVy/fp20tDS1pCBAw4YNc5UhzE+LFi344Ycf1BG/GzduAOSpL53Nzc2NoKAg7t69y507d9iwYUOe8oZPKygynoZTd/PNwdssCfqNoMh4dd+tW7c4d+4cRYoUoXr16ty+fZtTp06p+21sbAgPD8+33ZzBe87AHQoO3oUQ4lUgwaQQr7iCaoIvjbrNsGHD8hz/ySefMG7cOBwdHdXgK6datWqxatUqOnfuzF9//cWcOXMICwvD3t6e2rVrs3DhQgA6duzIjRs3sLGxYd68edSoUUNto2bNmsyfPz9XbWh/f3/at2+f61odO3bE39+fypUrM3HiROrXr0/Dhg2xtrZWj/nuu++YP38+dnZ2xMfHk5/+/ftTpUoV7O3tcXBwYPXq1QB8+OGHtGrVSl2Ak61OnTp4e3vj4uJCvXr16N+/P46Ojg97zI8lZ2BfrKoDqakpDJo4i6DIeDIyMvjf//6Ht7c3JUqUyDdwHzduHGPGjOHy5csA3L9/n59++gnICt6zA80HV2Lv3LmTGzdukJKSQlBQkDrFIFvjxo0JCAggIyODS5cu5ZlDKoQQz5NUwBHiFfeqlSfU6/W0bduW48ePv/Brv2x5ShveusaNHd+jJF7EvHQxWrduja+vL/7+/oSFhTFv3jwiIyPp2bMnv/zyC9WrV2fp0qXMmDEDRVHQaDT069ePUaNGERsbS5cuXfh/e/cd12XVP378dQkICCokOHAhJqjs6SAUJybmQM2Z4R6pTQt/VlJa2VdvNc0yu1NyVG6yzD1yD1BcOADBAQ5MQVnKOL8/iM8NCoo4EH0/Hw8e8bnWOdf10dv3fc513m89PT38/f1ZvHgxcXFxhISEEBoaSnJyMhcvXqR///5MnDgR+F+ZQqUUY8aMYdOmTdSpUwcDAwMGDRr0wDydQohHIxVwckkwKcQz7lkrT/giB5OlEdiHhIToAlMhxLNFgslcMs0txDPuSZUn1DSN999/X/d52rRpBAcH3/ec4OBgvL290dfXx8XFhaCgoEfqQ1ljZWb8UNuFEOJFIMGkEM+4J1Wb2dDQkFWrVumSfBfXu+++S0REBBEREUyZMuWR+lDWlEbd8cDAQBmVFEI80yQ1kBBlQFfXmo+9HrO+vj7Dhg1jxowZfPHFFwX2xcXFMWjQIK5du4alpSULFiygTp06RV7r888/548//iA9PZ3mzZvzww8/oGkavr6+ODs78/fff5OVlcX8+fPx8vJ6rPfxNOV9B3lpmqzMjBnnZye1soUQLzQZmRTiBfbWW2+xZMkSkpOTC2wfM2YMb775ZoFqM3lmzJiBi4sLLi4ubNiwAYDRo0dz8OBBXc3wP//8U3f8tWvXaNiwIdevX6dly5Z07NiRefPm0alTp2L18dNPP2Xz5s1F7g8NDS1QUedBx99PWFiY7l63b99eaPLvrq412R3U+pHqjgshxPNEgkkhXmCVKlViwIABzJo1q8D2vXv30rdvXyC3Kk1ehRsoOM3t5+cHwLZt22jSpAmOjo5s3bq1QNWdw4cP4+vrS0JCApaWlowfP54rV64Uq3/Z2dl8/vnntG3btshj7g4mH3T8/Xh4eOieRVHBpBBCiIIkmBTiBZKXcLte0FpdJZ133nmHn376idTU1BJdMyMjg1GjRrFixQqOHTvG0KFDdUm3k5KS0NfXZ8SIEbrjnZyc8PHxISUlhR49etCwYUP69etHXmYJa2trPvroI9zc3Fi+fDmBgYG6vItBQUE0btwYJycnPvjgA/bs2cOaNWsYN24cLi4uxMTEFDj+888/x9PTEwcHB4YNG6Zrw9fXl48++ggvLy9sbW11pRa3b99Op06diIuLY+7cubpR2J07d1KvXj0yMzOB3OTk+T8LIcSLTIJJIV4QRVXS2XEunddff52ffvpJd2zz5s0LVKW5X/WYvMDRwsKClJSUAgm38weou3btonLlylSuXBnIHbGcOXMmkZGRnD17lt27d+uOrVKlCocOHaJ37966bf/88w+rV6/mxIkTHD16lI8//pjmzZvTuXNnpk6dSkREBPXr1y/Qt/tNv2dlZXHgwAFmzpzJZ599VuA8a2trRowYoRuF9fHxwdfXl7Vr1wLw22+/ERAQgIGBwf0fuhBCvAAkmBTiBVFUJZ2pG07z/vvvF1jVPXv2bBYsWICTkxOLFi3im2++KfK6ZmZmDB06FAcHB/z8/PD09CywX09PD1dXV0aMGFEgYPXy8qJWrVqUK1cOFxcX4uLidPt69ep1TzuVK1fGyMiIwYMHs2rVKipUqPDAe77f9HtAQAAA7u7uBdouypAhQ1iwYAEACxYsYODAgQ88RwghXgSymluIF0TCXYnP67y3Qre9WrVqpKWl6fbVrVuXrVu33nONovJQTp48mcmTJ+s+502nn0vOonzqLRav/fuehSqGhoa63/X09AqUfjQxMbmnDX19fQ4cOMCWLVtYsWIF3377baF9zJM3/R4WFkbt2rUJDg4uUPM6r/272y6Kt7c3cXFxbN++nezsbBwcHB54jhBCvAhkZFKIF8TTSridfzq9nJEpt+9kMvLjrwk9nFt3++jRo7p3FB9GSkoKycnJdOzYkRkzZnDkyBEAKlasyK1bt+45/n7T78VR2HUHDBhA3759ZVRSCCHykWBSiBfE00q4nX86vXrfKVTrNYmbMYfp3dYTe3t7xo8fT/Xq1R/6urdu3aJTp044OTnxyiuvMH36dAB69+7N1KlTcXV1JSYmRnf8g6bfH+S1115j9erVugU4AP369ePGjRv06dPnofsvCufr68uDyujOnDmzwMh5x44dSUpKemx9CA4OZtq0aYXua968+WNrR4jnldTmFuIFEno4/okn3C6N+tVPy4oVK/j9999ZtGhRaXelTFFKoZSiXLl7xy98fX2ZNm0aHh5Flze2trYmLCwMCwuLJ9K/4OBgTE1N+eCDD57I9cXzS2pz55KRSSFeIE8j4fbzWr96zJgxBAUF8cknn5R2V8qEuLg47OzsGDBgAA4ODixatIhmzZrh5uZGz549SUlJueeckSNH4uHhgb29PRMnTgRg1qxZJCQk0KpVK1q1agXkBpd5C8amT5+Og4MDDg4OzJw5U9d2o0aNGDp0KPb29rRv35709HTd9fLSS+XPFhAZGYmvry82NjYF8q6ampoCuWmjWrRogb+/P3Z2dowYMYKcnByys7MJDAzEwcEBR0dHZsyY8QSephDPNlmAI4R4rMb52TF+1bECK8efdP3qp2H27Nml3YUyJyoqip9//pmXX36ZgIAANm/ejImJCV9//TXTp0/n008/LXD8F198wUsvvUR2djZt2rTh6NGjjB07lunTp7Nt27Z7RibDw8NZsGAB+/fvRylFkyZNaNmyJebm5kRFRfHrr7/y448/8vrrr7Ny5Ur69+/PlClTiI2NxdDQsMBU+alTp9i2bRu3bt3Czs6OkSNH3pP66cCBA0RGRlK3bl06dOjAqlWrqFevHvHx8Rw/fhzgsU6/C1FWyMikEOKx6upak68CHKlpZowG1DQz5qsARyk7+AKqW7cuTZs2Zd++fURGRuLt7Y2Liws///wz586du+f4ZcuW4ebmhqurKydOnChQ2agwu3btolu3bpiYmGBqakpAQIDu/dZ69erh4uICFEz/5OTkRL9+/Vi8eDH6+v8bT/H398fQ0BALCwuqVq1aaJUmLy8vbGxs0NPTo0+fPuzatQsbGxvOnj3LmDFjWL9+PZUqVSrp4xKizJKRSSHEY9fVtaYEjy+g/O/kvqSSydbLTb+klKJdu3b8+uuvRZ4bGxvLtGnTOHjwIObm5gQGBhZI5fSw7k49lTfNvXbtWnbs2MEff/zBF198wbFjxwo9vrB0UZqm3fPZ3NycI0eOsGHDBubOncuyZcuYP39+ifstRFkkI5NCCCEe2d0Vlq7czODKzQxCD8fTtGlTdu/eTXR0NJBbGenMmTMFzr958yYmJiZUrlyZK1eusG7dOt2+otI/+fj4EBoaSlpaGqmpqaxevfq+1ZpycnK4cOECrVq14uuvvyY5ObnQdzeLcuDAAWJjY8nJyWHp0qW88sorXLt2jZycHLp3787kyZM5dOhQsa8nxPPikUYmNU3rCQQDjQAvpVTYv9vbAVOA8sAdYJxSqujswkIIIcq0wiosKaWYuuE0XYNaExISQp8+fbh9+zaQm+je1tZWd6yzszOurq40bNiQ2rVr4+3trds3bNgwOnTogJWVFdu2bdNtd3NzIzAwEC8vLyC3SpGrq2uRFY2ys7Pp378/ycnJKKUYO3YsZmZmxb5HT09PRo8eTXR0NK1ataJbt24cO3aMgQMHkpOTA8BXX31V7OsJ8bx4pNRAmqY1AnKAH4AP8gWTrsAVpVSCpmkOwAal1APnvCQ1kBBClE3Pc0ooyF3NPW3atAL13YWQ1EC5HmlkUil1Eu59j0QpdTjfxxOAsaZphkqp24/SnhBCiGeTlZkx8XeV7MzbLoR4vj2Ndya7A4ckkBTif/T09HBxccHe3h5nZ2f+85//6KbJirJ9+3Y6der0UO3cXTmkKL6+vtjZ2eHs7IynpycRERHFOidvJqE4FUk+/fRTNm/eXLyOizLnaVVYKi2+vr4yKilEER4YTGqatlnTtOOF/HQpxrn2wNfA8PscM0zTtDBN08ISExMfrvdClFHGxsZERERw4sQJNm3axLp16/jss88eezv5g8n8iZ4Ls2TJEo4cOcKoUaMYN26cbvv27dvZs2fPfdv566+/Hvju2eeff07btm2Bx1eiLiEhgR49ejyWa91PaGhogTQ1ISEhJCQkPPF2yxJJCSXEi+uBwaRSqq1SyqGQn9/vd56mabWA1cAApVRMUccppeYppTyUUh6WlpYPfwdClHFVq1Zl3rx5fPvttyilyM7OZty4cXh6euLk5MQPP/ygO/bmzZv3VOCA4lcO2bZt2wOrkDRr1oz4+Hjd559++ok+ffoUek5e+pT8geqkSZOws7PjlVdeoU+fPrqax4GBgaxYsUJ3jKurK46OjgwaNEi3KMPa2pqJEyfi5uaGo6Mjp06duu+zs7Ky0l3zUWVnZxe5T4LJ4nkaFZaEEM+gvJqpj/IDbAc88n02A44AAQ9zHXd3dyXEi8DExOSebZUrV1aXL19WP/zwg5o0aZJSSqmMjAzl7u6uzp49q7Zt26YMDQ1VTEyMysrKUm3btlXLly9XSin1zz//KKWUysrKUi1btlRffPGF8vT0VAYGBmrAgAEqKytL1a5dWzVt2lSlpKSoRYsWqVq1aqnq1aurYcOGqRYtWqiDBw+qdevWqZo1a6qqVauq1q1bq/DwcGVgYKBq1KihnJ2d1fDhw5WLi4saPny4qlixourbt6/6559/lLGxsWrcuLFycHBQtra2Kj09XQUFBamKFSsqGxsbVa9ePeXl5aWWL1+u0tPTlaZp6vTp00oppVxdXVWNGjWUk5OTqlSpkpo1a5ZSSqk5c+aowYMH655PVFSUatOmjXJyclKurq4qOjpaxcbGKnt7e6WUUqmpqapnz57K0tJSde3aVXl5eanff/9d2dvbqxEjRih3d3fVuHFj9emnn+quWbduXfXhhx8qV1dX9euvv6ro6GjVuHFjVbduXfXKK6+okydPqt27dytzc3NlbW2tnJ2d1ZQpU5SJiYmytbVVzs7OKi0tTYWFhakWLVooNzc31b59e5WQkPAE/tQIIZ41QJh6DHFUWf951NRA3YDZgCWwVtO0CKWUHzAaeBn4VNO0vHpZ7ZVSVx+lPSFeBBs3buTo0aO6Ebfk5GSioqIoX768rgIHoKvA0aNHD5YtW8a8efPIysriwoUL3Lp1i3379tGgQQP09PRYsmQJt2/f5vTp07i7u3Pp0iWsrKxo3rw5enp6XLlyhV69enHu3DkqVarEsWPHMDY2Zs+ePQWqhOzYsYPMzEwuXryIq6sr7777LhMnTqR8+fL8/fffTJw4kZUrV2JkZIShoSGmpqYMHTqUIUOGUKtWLbKysjh9+jSapmFra8u6deu4ffs2Hh4erFmzhtq1axMQEADkVi1ZtWqVru1+/foRFBREt27dyMjIICcnh6tX//c/Kd999x3m5uakpaUxadIkXfUTKLxMn5OTEwBVqlTR5QZs06YNoaGhNGjQgP379zNq1Ci2bt1K586d6dSpk25Kfd26dUybNg0PDw8yMzMZM2YMv//+O5aWlixdupQJEyZI4mohxAvjUVdzryZ3Kvvu7ZOByY9ybSGeN/mrg6RnZhN6OF43DXj27Fn09PSoWrUqSilmz56Nn59fgfO3b99eaAWOvMohwT/9zvd7r5A2ayRHIk9j6+BCQkICO3bsoE6dOkDuIoLWrVvz5ZdfYmhoSHh4OOnp6dy+fZuxY8cSHh5OjRo1GDNmDKtWrUIphY2NDYGBgXzwwQdA7nR1q1atWLBgAZBb0s7ExASABg0akJ6ezs2bN4Hcknb6+vpYWFhgZGREcnJygf5v3rwZPz8/XV5APT09XSWS/FVIbt26RXx8PN26dQPAyMgIyM0/GBMTg729PQYGBjRq1Ij09HT69++vK2uXnZ1N586dCQ8PR19fHyMjIyIjIzExMeHKlSssXLiQP/74g5kzZ7Jnzx58fHwoV64cVatW5fLlyzRu3JhLly4RFRVFjx49CA4O5tSpUwwZMoQbN27w7rvvEh4eTp06dTAwMKBOnTpYWVk9yh8VIYQoU6QCjhBPwd3VQZSC8auOEXo4nsTEREaMGMHo0aPRNA0/Pz++//57MjMzAThz5gypqalA4RU4bt68SXa58kzedI7z8QlkXruAgVUjjF7/DzXq2rBp0yaCg4MxNDRk//79XL16lTfffJPdu3ezbNkyTp8+jbW1ta6vkyZNYt++fZw6dYqmTZty4cIF3fuQqampukolhfH29iY9PZ2MjAzu3LlToMqJpmlkZ2djZ2eHUkpXDWXfvn20bNmyRM/1//7v/6hfvz5hYWGcPXuWXr166RY35d3TmTNnOH/+PAkJCbz66qs0bNiQjIwMhg0bRpUqVXT5A999913MzMwYMWIE7733HhEREZQrV47Dhw/TpUsXhg0bpms3PT2duXPnsmbNGoKCgrCxsSE9PZ02bdowefJkNm7cWKL7EUKIskhqcwvxFNxdHURl3SFm3ij6zsuhQfXKvPHGG7z33ntAbhWPuLg43NzcUEphaWlJaGgoUHgFjnLlypFasTYXvxuKfiULylvZcudyNClJ/2DQqB3t2rXD0tKScuXK8c033/DJJ58QFRXF77//zpQpU7CwsCAjIwNHR0dmzJjB5cuXef/995k8eTKLFy+mT58+LFy4kL/++guAuxfK+fj4sHDhQiA32KxSpQqvvPIKGRkZVK9encqVKxc4Pm8KvGfPnty4cYO0tDQGDBgAFL0IpmLFitSqVYvQ0FC6du3K7du3yc7OJiQkhOjoaJo2bcqdO3d0/YiMjNTVXK5Zsybm5uZUrlwZW1tb/vrrLzIyMtizZw/Z2dn4+vqir6/P7du3qVevHidOnKBJkyYopbC2tqZfv37cvHmzQIqlGjVqkJ6ejpeXFzk5OWRmZrJ3714cHR2JiYnhxIkT2Nvbl+wPixBClDGPVAHncZMKOOJ59aSrg9x9/dSTO0jetxyUwrH2S8yZM4fevXsTFhaGhYUFS5cu5auvviInJwcDAwPmzJlD06ZNWbduHf/v//0/cnJyqFq1Kps2beLMmTP06NGDcuXKMXv2bH766acC7w9ev36dQYMGcfbsWSpUqMDMmTNp2rQpEyZM4Oeff2bNmjW4ubnh4ODAn3/+ibW1NaamprpV4VOmTGHhwoWUL1+ejh078uWXXxZ6j1FRUQwfPpyzFy5xLS0bI/cuZBxag3l5xYWzZ3TT0zt37qRr166cPXuWGTNmMGbMGDw8PNizZ0/ui+L6+owdO5bPP/8cQ0ND3TMBiI2NpV27dqSlpWFmZsbrr79Oy5YtmTt3LqGhoTRq1IgWLVqQlJTE/v37MTY2Jioqit27dzN27FjdSOykSZMYOnToI3+vQohnm1TAySXBpBBPgfeUrYVWB6lpZszuoNbP/PUfRt++fYmMjCQjI4M333yT8ePHP7Zr570ukJ6ZTVrUPlKObKBun895y9WY9/u0548//qBXr17s3buXV199lQ0bNhAQEMDx48cBmDZtGikpKQQHB9O8eXPeffddevbsiVKKo0eP4uzsTHBwMKamprz33nucP38ea2trMjMzqVu3LpGRkcycORNTU1PdO6T5A+O8c/P2CSGebxJM5pJpbiGegnF+drogKM/jrA7ypK//MH755Zcndu38rwsY13Pn1uF1RH83lInV6+Dp6cmYMWMwMDDAxcWFZs2aUb58+SKvtWTJEkaOHMnkyZPJzMykd+/eODs76/ZnZ2fTv39/kpOTUUoxduzYByZmF0KIF5GMTArxlORfzW1lZsw4P7vHmtT5SV//aXnrrbfYvXt3gW1vv/02AwcOfOKvCwghxMOQkclcMjIpxFPS1bXmEw3unvT1H5avr68uF2NRZs6cybBhw6hQoQKQW+P7l19+KXIE0MrMuNDpfCsz40KPv9+0c/PmzR9YJrKk8laI36+W89y5c6lQoYJu8ZEQQpRVkhpICFFiSildSceSyF87HB5c43ucnx3GBnoFtpV0Ov9JBZLFNWLECAkkhRDPBQkmhRAPJS4uDjs7OwYMGICDgwOLFi16YL3v4tYOz1/je/r06Tg4OODg4MDMmTMBcDHP5NbiMaRv/Y6E/44iaeVEgju+TFfXmsyaNYvGjRvj5ORE7969dW1HRkbi6+uLjY0Ns2bN0m03NTUFckcRW7RoUWjN88IUdi8A69evp2HDhri5uekq9+Tk5GBtbU1SUpLuuAYNGnDlyhWCg4N1dcujo6Np27Ytzs7OuLm5ERMTA8DUqVN1NdrztyWEEM+U0q7nmP9HanML8eyLjY1VmqapvXv3qsTEROXj46NSUlKUUkpNmTJFffbZZ0oppVq2bKkOHjyolLq3dviRI0eUUrm1sRMTE3XXzvscFhamHBwcVEpKirp165Zq3LixOnTokIqNjVV6enrq8OHDSimlevbsqRYtWqSUUqpGjRoqIyNDKaXUjRs3lFJKTZw4UTVr1kxlZGSoxMRE9dJLL6k7d+4opf5XH/1+Nc8LU9i9pKenq1q1aqkzZ86onJwc1bNnT+Xv76+UUmrs2LFq/vz5Siml9u3bp9q0aaPr29SpU5VSSnl5ealVq1YppZRKT09XqampasOGDWro0KEqJydHZWdnK39/f/X3338/9PclhHhykNrcKKVkZFII8WChh+PxnrKVekFr6f79Hixr1KJp06bs27ePyMhIvL29cXFx4eeff+bcuXP3nL9s2TLc3NxwdXXlxIkTREZG3re9Xbt20a1bN0xMTDA1NSUgIICdO3cCuSUa8+puu7u760oxOjk50a9fPxYvXlygnri/vz+GhoZYWFhQtWpVrly5ck97eTXP9fT0dDXPi1LYvZw6dYp69erRoEEDNE2jf//+uuN79erF0qVLAfjtt9/o1atXgesVViqyQoUKbNy4kY0bN+Lq6oqbmxunTp0iKirqvs9NCCFKgyzAEULcV/7cjgBXbmaQlFmO0MPx6ClFu3bt+PXXX4s8P692+MGDBzE3NycwMJCMjIwS9yevdjfk1u9OT89dkLN27Vp27NjBH3/8wRdffKGrgHP38Xn1vvMrrOb547qXZs2aER0dTWJiIqGhoXz88cfFuk+lFOPHj2f48OHFOl4IIUqLjEwKIe7r7lKQkBvoTN1wmqZNm7J7925dne3U1NQC9bgBXS3vypUrc+XKFdatW6fbV7FiRW7dunVPmz4+PoSGhpKWlkZqaiqrV6/Gx8enyD7m5ORw4cIFWrVqxddff01ycnKh724WpbCa54Up6l4aNmxIXFyc7l3H/MG1pml069aN9957j0aNGlGlSpUC18xfKhLg9u3bpKWl4efnx/z583X3ER8fz9WrV4t9T0II8bTIyKQQ4r4SCknFk7fd0tKSkJAQ+vTpw+3btwGYPHkytra2uuOcnZ1xdXWlYcOG1K5dG29vb92+YcOG0aFDB6ysrNi2bZtuu5ubG4GBgXh5eQG59cpdXV11U9p3e9QE44XVPC9MUfdiZGTEvHnz8Pf3p0KFCvj4+BQIknv16oWnpychISGFXnfRokUMHz6cTz/9FAMDA5YvX0779u05efIkzZo1A3IXDC1evJiqVasW+76EEOJpkKTlQoj7epZKNT4JxckJKYQQhZGk5blkmlsIcV+PM7ejEEKI549Mcwsh7iuvqs7zUKqxML6+vvj6+t6zvUmTJrqp+zyLFi3C0dHxKfVMCCHKBpnmFkIIIYQoAZnmziXT3EIIIYQQosQkmBRCCCGEECUmwaQQQgghhCgxCSaFEEIIIUSJSTApRCmIi4vDwcGh2MevWbOGKVOmABAcHMy0adMe+ZqPKjU1FX9/f5ydnXFwcGDp0qWEh4fTsmVL3N3d8fPz49KlS2RlZeHp6cn27dsBGD9+PBMmTHhq/RRCCPFkSTApxBOmp6eHi4sLDg4O9OzZk7S0NAAyMzPp0qULDRo0oH79+rz99tvcuXMHyE2k3alTJ901OnfuTFBQEL6+viQkJDBr1ixcXFyoU6cOlpaWuLi40LFjRzIzM7G2tubatWsAXL58md69e1O/fn3c3d3p2LHjPeUOS2r9+vVYWVlx5MgRjh8/TocOHRgzZgwrVqwgPDycQYMGMWHCBPT19QkJCWHkyJFs3ryZ9evXM3HixMfSByGEEKVPgkkhnjBjY2MiIiI4fvw45cuXZ+7cuSilOH/+PFevXkVfXx8XFxeSkpLfLW82AAAYKklEQVSwtLTUBYJJSUm6/IchISGMHj1ad82xY8cSERHBoEGDUEqhlMLf3x8DAwMAzp8/j4+PD/Xr12fPnj0sWrSI8PBwAgIC6NmzJ/7+/tjZ2TFixAhycnKA3ODQzc0NZ2dn2rRp88D7cnR0ZNOmTXz00Ufs3LmTCxcucPz4cdq1a4eLiwuTJ0/m4sWLANjb2/PGG2/QqVMn5s+fT/ny5R/nIxZCCFGKJJgU4iny8fEhOjqaPXv2kJGRwbRp0zh58iSVK1emQYMGpKSk6EYui+Onn36iRYsWHDlypMB2CwsLJkyYgLu7O1u2bGHs2LEAvPzyy5w+fZrZs2cTGRlJTEwMq1atIjExkaFDh7Jy5UqOHDnC8uXLi2wz9HA83lO24jc/iqoDZnK7Yk0+/vhjVq5cib29PREREURERHDs2DE2btyoO+/YsWOYmZlx9erVh3xqQgghnmUSTArxlGRlZbFu3TocHR05c+YMFStWxNvbG4D+/fsTFhaGnp4esbGxxbpeUlISaWlpWFlZAfDGG28UaOuTTz4hMjKSnj17EhkZqdvn5eWFjY0Nenp69OnTh127drFv3z5atGhBvXr1AHjppZcKbTP0cDzjVx0jPimdzFv/cCVNseG2La8EDGL//v0kJiayd+9eIHca/8SJEwCsWrWK69evs2PHDsaMGUNSUtJDPj0hhBDPKimnKMQTlp6ejouLC8npmWRUseXI2RpkH9pLVnbB6lOapqFpGjk5Obr/ltTcuXOpWLEi/fv3Z9q0aRgZGRVo5+52i2vqhtOkZ2YDkJkYx9XtC0DT+MagPNtDF6Ovr8/YsWNJTk4mKyuLd955h2rVqhEUFMSWLVuoXbs2o0eP5u233+bnn38u8f0JIYR4dkgwKcQTEHo4XlfLGv3ydApexMrweNS/gVh6haqkp6UwJWQNQYGd+eWXX/Dw8ODPP/8kKSkJc3NzLl++TJ06dYpsw8zMjAoVKpCQkADAkiVLdPtu3ryJra0thw4dYtGiRWRnZ+v2HThwgNjYWOrWrcvSpUsZNmwYTZs2ZdSoUcTGxlKvXj2uX79e6OhkQlK67ndjG3eMbdwB0AAPj9yKYjt27LjnvPyLfvKm3IUQQjwfZJpbiPtISkriu+++e6hz8k8FK0ApWLLvvG5ED8CwZiPQM+CLieNp1KgR169fJzo6moCAAD766COGDx9erLYGDx7Mjh07cHFxQan/jXQOHDiQPXv2EBYWxm+//YaJiQkAMTEx2NraMnr0aBo1akS9evXo1q0blpaWzJs3j4CAAJydnenVq1eh7VmZGT/UdiGEEM8/Lf8/QKXNw8NDhYWFlXY3hNCJi4ujU6dOHD9+vNjneE/ZSny+Ebzz03tQ570V9xyXdTOR6xu/o6Z2g5ycHDp27Mi0adMwNDRk+/btvPrqq1SpUkV3/PLlyxk/fjzTpk3TjQKGhIQQFhbGt99+C0B2djb169cnLCwMCwsLEhISeOeddwgPD8fIyIiKFStibGzMtm3bSvQ88gLl/IGxsYEeXwU40tW1ZomuKYQQZZWmaeFKKY/S7kdpk2luIe4jKCiImJgYXFxcaNeuHVWrVmXZsmXcvn2bbt268dlnnwGwePFiZs2axZ07d4ihBi+1H4lWTo/z03tQ0aMzCfNHo+kbUrX7x+iZmJMWvZ/kPUsxIIe6jeuyZMkSqlWrRmJiIn379iUhIYF+/fqxadMmwsPDsbCwYPHixaSlpTFkyBCaNGnCd999R2BgIKNHj8bQ0JDNmzczZ84cevfuTYsWLdDX16d9+/YsW7ZMdz/bt28vNOF5ceUFjHlT+FZmxozzs5NAUgghXmAyMinEfeQfmdy4cSMrVqzghx9+QClF586d+fDDD7G0tOTDDz9k1apVGBgYUL1pF7Is6mPq0IZzX3fCsvsnVHi5CTe2zUczrIBZ895kZ6RgYlqJKd2duBa+jpMnT/Kf//yH0aNHU7NmTcaPH8/69et59dVXSUxMJDExsUAbo0aNomnTpgwYMABN01i6dCmvv/46//zzD82bN+fUqVNomkZSUhJmZmal/RiFEOK5JCOTuWRkUohi2rhxIxs3bsTV1RWAlJQUoqKiOHr0KOHh4Xh6egKgl3SLrPIVc0/S08e4vhfGBnq4+TRl99/b0IAq3IIt3/HJ79e5c+eOLiXPrl27WL16NQAdOnTA3NwcgC1bthRoIz09napVq+Y2oadH9+7dAahcuTJGRkYMHjyYTp06FaiiI4QQQjwJEkwKUYi81djnzsVx/VoqoYfjUUoxfvz4exbHzJ49mzfffJOvvvrqnvPPl9OnlnkFxvnZkRWTQqVrJwiZ4o+vry/vjX+fzp07s337doKDg+/bH6XUPW3kMTIyQk9PDwB9fX0OHDjAli1bWLFiBd9++y1bt2599AcihBBCFEFWcwtxl/yrsbXyxtxJT2X8qmNUrO/O/PnzSUlJASA+Pp6rV6/Spk0bVqxYoavscv36dVxfymJ3UGsqlNdjd1Dre94pTE5OpmbN3G358y16e3vr3nHcuHEjN27cACi0jXPnzt3T95SUFJKTk+nYsSMzZsy4pzKOEEII8bjJyKQQd8mfmFvPuBKGNRsTM3c4PzZqyod9+9KsWTMATE1NWbx4MY0bN2by5Mm0b9+enJwcDAwMmDNnDnXr1i2yjeDgYHr27Im5uTmtW7fWVb2ZOHEiffr0YdGiRTRr1ozq1atTsWJFLCwsitXGrVu36NKlCxkZGSilmD59+hN6SkIIIUQuWYAjxF3qBa2lsL8VGhA7xf+Jtn379m309PTQ19dn7969jBw5koiIiCfaphBCiJKRBTi5ZGRSiLtYmRkXyBOZf/uTdv78eV5//XVycnIoX748P/744xNvUwghhHgUEkwKcZdxfnaFJuYe52f3xNtu0KABhw8ffuLtCCGEEI+LBJNC3EUScwshhBDFJ6u5hShEV9ea7A5qTewU/0JXYz9NJakPnicwMJAVK+4t5fgs+fLLL0u7C0IIIR6BBJNCPOMeJZh8limlyMnJkWBSCCHKOAkmhXjG5a8PPm7cOKZOnYqnpydOTk5MnDhRd9zChQtxcnLC2dmZN954Q7d9x44dNG/eHBsbG90o5aVLl2jRogUuLi5UqFCBnTt3EhcXp0t+DrBixQoCAwMBqF69Ot27d8fDwwNbW1v+/PNPAEJCQujSpQu+vr40aNBAV6scYPr06Tg4OODg4MDMmTOB3PKUdnZ2DBgwAAcHBwYPHkx6ejouLi7069fviT1DIYQQT468MynEM27KlCkcP36ciIgIXX3wAwcO6OqD79ixgypVqjB58mT27NmDhYUF169f151/6dIldu3axalTp+jcuTM9evTgl19+wc/PjwkTJpCdnU1aWhr//PPPffuRkJDAgQMHiImJoVWrVkRHRwNw4MABjh8/ToUKFfD09MTf3x9N01iwYAH79+9HKUWTJk1o2bIl5ubmREVF8fPPP9O0aVMAli9fLumPhBCiDJNgUohSNmnSJBYvXoylpSW1a9fG3d2dtm3bMmLECNLS0qhevTrZ2bkrywcPHszNmzdZvHgx2dnZWFpaEhUVxeHDh3nppZd49dVXuX37Nm+99RbDhw9n3759dOzYkXLlyjFhwgRd1ZyrV6/y448/kpmZyddff016enqBYDI9PZ0ZM2Zw/PhxkpOTyc7Opl27dpQrV44GDRpgY2PDqVOnAGjXrh1VqlQBICAggF27dqFpGt26dcPExES3fefOnXTu3Jm6devqAkkhhBBln0xzC1GKDh48yMqVKzly5Ajr1q0jL2l/1559SHHqxa2OX3EsxYT4S5d153h5eZGWlsbq1atp2LAhgwcPZu/evRgaGnLw4EEOHjzIjz/+SGxsLNWqVdONIMbHx5OTkwPkBpPffvstNWvW5M6dOyxcuLBAv77//nv09fXp1q0bn332WaGjlpqmFfjv3duLkhdgCiGEeD5IMCnEE2Ztbc21a9cK3bd79266dOmCkZERFStW5LXXXiM8+hLxV/8h5SXb3Eo8DVpy61YKoYfjMTc3Jz4+npSUFNzd3YmOjubq1atcv36dvXv34uDgQJMmTbh69SpRUVFUq1aNkydPEhkZSVZWFpqmcenSJXbs2EHHjh0ZOnQo+vr6HDp0CAB9fX1OnjzJ33//TV51LCcnJ8zNzdmyZQtbt26lVatWnD17Fju73LybmzZt4vr166SnpxMaGoq3tzc+Pj6EhoaSlpZGamoqq1evxsfHp9BnYGBgQGZm5mN/7kIIIZ4OmeYWohSEHo5n6obTnNwUiQkZuB6O16Uf2h39D/mrnJYzMqVceSP6vvoKVQyyeP3112nWrBnZ2dlcunSJW7duUbFiRcaOHcv69etRStG6dWvat2/PL7/8QlpaGuvXrycyMhI9PT2WLVtGTk4Or7zyCgYGBmRlZTFmzBgAqlatSqdOnbhx4wYtWrQo0Ofq1aszcuRI4uPj+e233zAyMgJyR0q7d+/OxYsX6d+/Px4euZXFAgMD8fLyAmDIkCG4uroSFxd3z7MYNmwYTk5OuLm5sWTJksf9qIUQQjxhUptbiIcUFxdHhw4dcHd359ChQ9jb27Nw4UL27t3LBx98QFZWFp6ennz//fcYGhpibW1NWFgYJiYmBAQEUM+jFdtwIjFiMzf3LiMr+QqVHFox55sZfDakC1ExsZQzrIBW3ggDsxroVa5GxtlwND197GpZEBISgoODAwMHDmTlypXY29vTunVrYmJieO2111i7di2JiYlcunSJgIAArl69yqpVq0hNTaV+/fpcvHiR3r17s3v3bpo0acLixYuJjY1lwoQJrFy5EhsbG2rWrEmdOnX473//y9y5cxk5ciRWVlZ0796ds2fP8ueff5KamkqHDh04c+YMVlZWBAcH06VLF06cOMHAgQO5c+cOOTk5rFy5kgYNGpT21yaEEI+d1ObOJdPcQpTA6dOnGTVqFCdPnqRSpUpMnz6dwMBAli5dyrFjx8jKyuL777/XHZ+SksJrr71Gnz59OGLiwc1LcaSd3EGNgbOp1KQ7N0/uZEj3djg6OkJOFpW9+6JXwZzbl6JIj95P9cBvsH9zkm5kb86cOWiahpWVFb/++ivLly/H1taWiRMn6tL27N69m6VLl2Jvb4+pqSnGxsacOHECTdPw8vIiKiqKUaNGUaFCBerWrcsHH3xA/fr1OXr0KBkZGVy8eBE7Ozvee+89zM3NGTNmDJcv/+/dzS+++IJGjRrRs2dPtm3bxrhx40hNTWXu3Lm8/fbbREREEBYWRq1atZ7mVyOEEOIpk2BSiBKoXbs23t7eAPTv358tW7ZQr149bG1tAXjzzTfZsWOH7vguXbowcOBABgwYQEJSOunnIrhzJYZLC98l7cwe9EzMKf9yc86dO4eevj5Vm3SmxoD/UMmjM5XcO1PRrAqfDuiAvr4+Hh4e7Nq1iyFDhhAXF0fDhg2pW7cu/fv3Z/Lkybz55pvs3LmTatWq0bhxY5o1a0ZCQgKQ+35i3ohi3qrqlJQUAPbt20f58uVxdXXl5MmTuuDYw8OD69evExQURP/+/XX3tHHjRvbs2cOuXbvw9fUlIyOD8+fP06xZM7788ku+/vprzp07h7Gx8dP6WoQQQpQCCSaFKIbQw/F4T9lKvaC1dP9+DxmZOQX2m5mZ3fd8b29v3fuMVma5wZWJQ2usBs7GwMIaTb88mTH76N69O0aGhkzp7kRNM2PQNCqbVuCrAEcC3GuTlZX1wL4aGhrqftfT0yvynPyrqmNjY5k2bRpbtmzh6NGj+Pv7k5GRcd92lFKsXLmSiIgIIiIiOH/+PI0aNaJv376sWbMGY2NjOnbsyNatWx/YZyGEEGWXBJNCPEDo4XjGrzpGfFI6CrhyM4PEy/FMCVkDwC+//IKHhwdxcXG6NDyLFi2iZcuWumt8/vnnmJub89ZbbzHOzw6z+q6knd5NdmoSlp3HYf3G18xcsJzx48cD/6sN/k5bW0a3fvme2uA+Pj66xSpnzpzh/PnzutXVRbnfqumbN29iYmJC5cqVuXLlCuvWrQOgYcOGxMXFERMTA8Cvv/6qO8fPz4/Zs2frVn0fPnwYgLNnz2JjY8PYsWPp0qULR48effBDFkIIUWZJMCnEA0zdcJr0zOwC2/RfqsV/vplFo0aNuHHjBu+++y4LFiygZ8+eODo6Uq5cOUaMGFHgnG+++Yb09HT2/PoN04d3wqbDYK4s+4TEn8eQvXYSTlWKvxhu1KhR5OTk4OjoSK9evQgJCSkwIlmYvFXThZUtdHZ2xtXVlYYNG9K3b1/dFL6RkRHz5s3D398fNzc3qlatqjvnk08+ITMzEycnJ+zt7fnkk08AWLZsGQ4ODri4uHD8+HEGDBhQ7PsSQghR9shqbiEeoF7QWvL/LclKvsLVFZ9Rc/B3xE7xL7V+CSGEKF2ymjuXjEwK8QB57zgWd7sQQgjxIpFgUogHGOdnh7GBnu6zfuVq1B/xA+P87v+OohBCCPEikAo4QjxA3uKXqRtOk5CUjpWZMeP87O5ZFCOEEEK8iCSYFKIYurrWlOBRCCGEKIRMcwshhBBCiBKTYFIIIYQQQpTYIwWTmqb11DTthKZpOZqm3bM0XtO0OpqmpWia9sGjtCOEEEIIIZ5NjzoyeRwIAHYUsX86sO4R2xBCCCGEEM+oR1qAo5Q6CaBp2j37NE3rCsQCqY/ShhBCCCGEeHY9kXcmNU0zBT4CPnsS1xdCCCGEEM+GB45Mapq2GaheyK4JSqnfizgtGJihlEopbNTyrusPA4YB1KlT50HdEUIIIYQQz5AHBpNKqbYluG4ToIemaf8HmAE5mqZlKKW+LeT684B5kFubuwRtCSGEEEKIUvJEkpYrpXzyftc0LRhIKSyQFEIIIYQQZdujpgbqpmnaRaAZsFbTtA2Pp1tCCCGEEKIseNTV3KuB1Q84JvhR2hBCCCGEEM8uqYAjhBBCCCFKTIJJIYQQQghRYhJMCiGEEEKIEpNgUgghhBBClJgEk0IIIYQQosQkmBRCCCGEECUmwaQQQgghhCgxCSaFEEIIIUSJSTAphBBCCCFKTFNKlXYfdDRNSwTOPcIlLIBrj6k7onTId1i2yfdX9sl3WPbJd/j01FVKWZZ2J0rbMxVMPipN08KUUh6l3Q9RcvIdlm3y/ZV98h2WffIdiqdNprmFEEIIIUSJSTAphBBCCCFK7HkLJueVdgfEI5PvsGyT76/sk++w7JPvUDxVz9U7k0IIIYQQ4ul63kYmhRBCCCHEU/RcBZOapk3VNO2UpmlHNU1brWmaWWn3SRSPpmkdNE07rWlatKZpQaXdH/FwNE2rrWnaNk3TIjVNO6Fp2tul3SdRMpqm6WmadljTtD9Luy/i4WmaZqZp2op//y08qWlas9Luk3j+PVfBJLAJcFBKOQFngPGl3B9RDJqm6QFzgFeBxkAfTdMal26vxEPKAt5XSjUGmgJvyXdYZr0NnCztTogS+wZYr5RqCDgj36V4Cp6rYFIptVEplfXvx31ArdLsjyg2LyBaKXVWKXUH+A3oUsp9Eg9BKXVJKXXo399vkfsPWM3S7ZV4WJqm1QL8gf+Wdl/Ew9M0rTLQAvgJQCl1RymVVLq9Ei+C5yqYvMsgYF1pd0IUS03gQr7PF5FApMzSNM0acAX2l25PRAnMBD4Eckq7I6JE6gGJwIJ/X1X4r6ZpJqXdKfH8K3PBpKZpmzVNO17IT5d8x0wgd9ptSen1VIgXj6ZppsBK4B2l1M3S7o8oPk3TOgFXlVLhpd0XUWL6gBvwvVLKFUgF5B108cTpl3YHHpZSqu399muaFgh0AtooyXtUVsQDtfN9rvXvNlGGaJpmQG4guUQptaq0+yMemjfQWdO0joARUEnTtMVKqf6l3C9RfBeBi0qpvFmBFUgwKZ6CMjcyeT+apnUgd4qms1IqrbT7I4rtINBA07R6mqaVB3oDa0q5T+IhaJqmkfue1kml1PTS7o94eEqp8UqpWkopa3L/Dm6VQLJsUUpdBi5ommb376Y2QGQpdkm8IMrcyOQDfAsYApty/21jn1JqROl2STyIUipL07TRwAZAD5ivlDpRyt0SD8cbeAM4pmlaxL/b/p9S6q9S7JMQL6IxwJJ//4/5WWBgKfdHvACkAo4QQgghhCix52qaWwghhBBCPF0STAohhBBCiBKTYFIIIYQQQpSYBJNCCCGEEKLEJJgUQgghhBAlJsGkEEIIIYQoMQkmhRBCCCFEiUkwKYQQQgghSuz/Aw2vyiLb2oSdAAAAAElFTkSuQmCC\n", "text/plain": [ - "
" + "
" ] }, - "metadata": {}, + "metadata": { + "needs_background": "light" + }, "output_type": "display_data" } ], "source": [ - "plt.figure(figsize=(10, 10))\n", + "plt.figure(figsize=(10,10))\n", "plt.scatter(tsne_df[0].values, tsne_df[1].values)\n", "for i, txt in enumerate(tsne_df.index):\n", - " plt.annotate(txt, (tsne_df.values[i, 0], tsne_df.values[i, 1]))\n", + " plt.annotate(txt, (tsne_df.values[i,0], tsne_df.values[i,1]))\n", "plt.show()" ] }, @@ -5835,69 +19851,40 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 69, "metadata": {}, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAGdCAYAAAAvwBgXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABJYUlEQVR4nO3dd3xUVd4/8M+dmzIlhZJCgIQiKAgoHSkqTbHgs7grsIqNXXH1wUVFXcv6W8ujYmFZVnQRXBfLWlhceVSsPBbABaXZFRURgqRTMpnJZJK59/z+CBMIZCYzydy57fPeV15uZm7mHiaTud/5nu/3HEkIIUBERESkA4feAyAiIiL7YiBCREREumEgQkRERLphIEJERES6YSBCREREumEgQkRERLphIEJERES6YSBCREREuknRewDRqKqKkpISZGZmQpIkvYdDREREMRBCoKamBl27doXDET3nYehApKSkBIWFhXoPg4iIiNpg79696N69e9RjDB2IZGZmAmj8h2RlZek8GiIiIoqF1+tFYWFh03U8GkMHIuHpmKysLAYiREREJhNLWQWLVYmIiEg3DESIiIhINwxEiIiISDcMRIiIiEg3DESIiIhINwxEiIiISDcMRIiIiEg3DESIiIhIN4Ze0IyIjOOr8q/wyd5PkIIUTOw1Ed07deceUETUbgxEiCiqHw/8iMteuQyb9m1qui3FkYKL+16MxecuRqfsTjqOjojMjoEIEUVUWlOKMU+Nwf7A/ma3h9QQnv/ueZTVluGVGa8gIyNDpxESkdmxRoSIIlq0aRH2B/ZDEcpx96lQsXbvWrz77bsQQugwOiKyAgYiRBTRU58+1WIQEpYipWDVD6tQV1eXxFERkZUwECGiFqlCxcG6g1GPCYkQKgOVUJTIwQoRUTQMRIioRQ7JgRxXTtRjZElGvjsfsiwnaVREZDUMRIgooquGXgVZihxkKELBjL4z4HQ6kzgqIrISBiJEFNH80fPRxdOlxWBEgoT/6vlfmNRvEtcTIaI2YyBCRBHlenLx8ZyPMbHnREg4Emw4ZSeuHng1nv3ls2zdJaJ24ToiRBRV96zuePfyd7HrwC5s3rsZqVIqTi86HbnZucyEEFG7MRAhombqQnX42fsz3KludM3s2nR770690btTbx1HRkRWxKkZIgIAeINe3Pzuzch7JA99l/RFt0XdMHjpYPz7m3/rPTQisjAGIkSEmmANzlhxBhZ/vBg19TVNt39R8QUuWnURFm5YqOPoiMjKGIgQERZuXIgvK748bhVVgcal22/74DbsLN+px9CIyOI0DUT27duHSy+9FJ07d4bL5cKgQYOwdetWLU9JRHFShYqlW5dCFWrEYwQEln2yjHvKEFHCaVasevDgQYwdOxYTJkzAW2+9hdzcXPzwww/o2LGjVqckojbw1ftQWVsZ9RgJEnZW70RdXR1cLleSRkZEdqBZIPLQQw+hsLAQK1asaLqtV69eWp2OiNrImeKEQ3JEzYhIkJCRksE9ZYgo4TSbmnnttdcwfPhwTJ8+HXl5eRgyZAiefPLJqD8TDAbh9XqbfRGRttLkNEztMzXqUu4hEcK5Pc7lnjJElHCaBSK7du3C0qVL0bdvX7zzzju49tprMW/ePDzzzDMRf2bBggXIzs5u+iosLNRqeER0lDvPvBPS4f8dS5ZkDM8djjFdx3BPGSJKOEloVH2WlpaG4cOHY+PGjU23zZs3D1u2bMGmTZta/JlgMIhgMNj0vdfrRWFhIaqrq5GVlaXFMInosNVfrsYVr1+BmoYapDhSIISAIhSMzh+Nx898HCcWnQiPx9PsZ4QQqKurg6IokGUZTqeTq60SEbxeL7Kzs2O6fmtWI1JQUICTTz652W39+/fHv/8deXGk9PR0pKenazUkIoriwkEX4vTC0/Hstmfx7YFv4ZSdmFQ4CUPyhiAnJ+e4IMTv96OqqqpZ3Ygsyy0eS0QUiWaByNixY/Hdd981u+37779Hjx49tDolEbVTTocc3DjxxlazHH6/H+Xl5cf9vKIoKC8vR35+PoMRIoqJZoHIjTfeiDFjxuCBBx7AjBkzsHnzZixfvhzLly/X6pRElACSJEVt0RVCoKqqKupjVFVVwe12c5qGiFqlWbHqiBEjsHr1arz44osYOHAg/ud//geLFy/GrFmztDolESVBOFsSjaIoqKurS9KIiMjMNN19d+rUqZg6daqWpyCiJIt1LRGuOUJEseBeM0QUl1jXEuGaI0QUC00zIkR2YLcWVqfTCVmWo2Y8ws8DEVFrGIgQtYMdW1glSUJOTk6LXTNhOTk5lg7GiChxODVD1EbhFtZjMwPhFla/36/TyLTn8XiQn59/3PSLLMts3SWiuDAjQtQGbGFtDEbcbretpqWIKPEYiBC1QTwtrNHW5DC71tYcISJqDadmiNqALaxERInBQISoDdjCSkSUGJyaIWoDq7Ww2q0FmYiMg4EIURtYqYXVji3IRGQcnJohaiMrtLDauQX5WAd3HUTxf4pxcNdBvYdCZCvMiBC1g1YtrMFQEBuKN6AmWIMTO5+IAXkDEjTiI9iC3Gjvxr1495Z38fPGn5tu63paV0xZOAVFY4t0HBmRPTAQIWqnRLawCiHw10/+invX3YuDdUc+mY/qNgpPXvAkBuUPSsh5ALYgA8Ce9Xvw7ORnIRTR7PbSzaV4evzTmPH6DPQ7p59OoyOyB07NEBnIfevvw43v3NgsCAGArSVbMeYfY7CjakfCzmX3FmQhBNb8bg1URYVQmwciQhUQqsBbc9+Cz+fTaYRE9sBAhFokhEAgEIDP50MgEIAQovUfonYp85XhnnX3tHifIhQEGgK4Y+0dCTuf3VuQ923eh6odVYAa4QAV8O7y4tt3v+Xrn0hDnJqh47CLQh8vfPlC1AueIhS8+sOrOBg4iI6uju0+n9VakON16KdDMR3nLfZaenqKSG/MiFAz7KLQT/HBYjgc0f8kVaFi74G9CTlfuAU5GrO0ILeFq1NsgUVadpplp6eIjICBCDWJtYvCbGlqs0wz5bhyoIpI8wSNJEjokNYhYee0QgtyW/Uc37PVYCS9Yzq6jO5i2ekpIiNgIEJN4umiMAu/34/i4mKUlpaioqICpaWlKC4uNmRmZ+aAmUCUGEmWZEzsPhG5GbkJPa/H40FRUREKCgqQl5eHgoICFBUVWToIAQA5TcbE+ydGPWbw/MFIc6VZdnqKyAgYiFATM3VRxJLlMNs0U5/cPrh64NUt3ueQHEhxpODmoTdrclEMtyBnZGTA5XJZdjrmWMOvGY6JD0+E7GrMeEhy479bdskYfudwnHjJiZaeniIyAhartsHR+3KE5/RVVTX9Hh1m6aKIpZjWjIt1SZKEh6c8DKfsxLKvliGgBJru653VGw+Nfginn3i6YcZrFaffcjoGXjYQ21/YDn+5H65cF4rOLoIz28kCbaIkkIRRJ8wBeL1eZGdno7q6GllZWXoPB0DLF8Gjmbm7RAiB4uLiVrsoioqKdLsYhrMckYTrGgKBAEpLS1t9vIKCAsN1Q/j9fuwp3YN1P6+Dr8GH3lm9MSx/GHJzc035ujILbvxHlDjxXL+ZEYlDaxdB4Eja34yFfkbfyC2eLIeZppmO5fF40P+E/ujVrRcvikmUyBVyiSh2DERiFMtF8GhapP2T8Ykt3EVhxHVE4immNcs0UyS8KBKRXTAQiVEsF8GjJXqPjmQuMqbVRm7tFU+Ww+Px2HqxLiIis2DXTIzaksJPVNpfj+4PI3ZRxJPlsPtiXUREZsFAJEZtSeEnIu1v1UXG2iK8JHk0R2c57LxYFxGRWdhvaiZUCVH9HJS676EiG2rGDKRnDW31k3Es+3IcLVFpf27VfkRbimmNOs1ERESN7BWIHHwMonw+AAUyHJAhINU8DH/5+UDXZ+DJ6BzxR2O5CB4tUWl/M3d/aKEtxbQs/CQiMi77BCLelUD573EkNDiyp4dbfQu+fVfD3/3ZqJ+eI10Ej5boAlKzd39ogVkOIiLrsEcgIgRE5V1o3DLs+FoKCSoyxGrsK/8UlY7uUNUjQcqxgcWxF0GtV1a1+1btkTDLkTiqULHzwE7UK/U4oeMJcKXyeSWi5LFHsWr995AavmsxCDlCgkt5u1kQArTcmXJ0R4nb7Ybb7dasu4TdH6QVIQSWblmKXot74aTHTsKgpYOQtzAPN71zE/z1xtqHh4isyx6BiFoTw0EOSIj85qtnZwq7P0gLt6y9Bf/95n+j2FvcdJuv3ofFnyzGhKcnoC5knl2Wici87DE1k9YbAimQEIp4iIQQGqS+Ee/XuzOFdRGUSJ+VfYY/b/pzi/epQsXW0q1YsmkJbjn9liSPjIjsxh4ZEbkTkDkDAi0XdApIUNAJfmli1IfRuzMl1kXGDhUfwkeLPsJ7d7+Hz1/6HKH6yAEY2dOT25+ELEUvcH5i6xO2WJ+GiPRlj4wIACnvEai16wClDBKOBBSNwYmEipSFgJQW9TGM3pmi1Ct49epX8eWzXwIAJFmCCAm83fltnP/38zFw2kCdR0hGsaNiBxQRObAWENjr22uL9WmISF/2yIgAQGpXOHptRchzJVQ0dpgISKiVxqEs/V8IpkyI+uNm6Ex55YpXGoMQAUAAItT4abbuQB1euegV/PD+D/oOkAyjQ3oHOKTof/6eFI/uWUAisj77BCIAkNIFqYV/h9T3AOq6fQ9/171wFL2FLj2nITc3N+qPGr0zpfLbSnzz0jdosTHo8G0f3vMhU+0EALio/0VQhRrxflmS8YvevzB8FpCIzM9egchhkuyCM7MvMrK6NdVamL0z5fPnP4ckRw6UhCJQsqEEB0sOJnFUZFS/Gvgr9O/Yv8U6EYfkgFN24qoBVxk+C0hE5mfLQCQSj8eDoqIiFBQUIC8vDwUFBSgqKjJ8EAIAgYMBSI5WMjYCqD1Qm5wBmYgQAoFAAD6fD4FAwBZZo7SUNLx58ZsYmjsUQGMGJEVqLBnLd+Xjn2f/E0N6DTF0FpCIrME2xaqxasuKnUII3dtqO/buCDUUOdUOAI40B7K6ZiVpRObg9/vj2rfGSnrm9sQHl3+AD3Z8gPf3vo8GtQGn5pyKiYUTkZ9n/CwgEVmDJAz88c/r9SI7OxvV1dXIyjLmBdQoFzJ/lR+Lui6C2tByMCLJEvpc1AcXv3gxP+Ue5vf7o25iaIYpuUQwQiBNRNYSz/WbUzPtEL6QHdtZ0NKy8Frz5Hgw4cHDnT/HXEMkWYKzsxMT/2di0i4wRp/uEEKgqqoq6jF6rqabTLGuT0NEpAVOzbRRrBcyt9udtDf2cfPHwdXJhfX3rId3txcAIDkk9DinB87+89no0rdLUsZhlCxRNOEMQDR6r6ZLRGQHDETayKgXsmFXDsOQy4dg32f7UFddh04ndEKnwk5JC4YiTXeEs0RGme6IdX0MrqNBRKQtBiJtFOsFyu/3J/0TtcPhQOHQwqSeEzBmliiSWNfH4DoaRETaYo1IG8V6gfL5fLaoMwDiyxLpzel0tvo7NMNqukREZsdApI2cTiccjtafPlVVDXHhTQYzTXdIkoScnJyoxxh9NV0iIitgINJGkiQhIyMjpmONcOFNBrNNd5h9NV0iIitgjUg7eDweeL3eVo8zyoVXa+HpjmiBl9GmOzweD9xut+7raHAtDyKyKwYi7WDGC6+WwtMd0RYJM+J0R1tW000kM7Q7ExFphVMz7cA6g+O1d7rD6AuhJZqRFsUjItIDMyLtFL7w8hPtEW2d7rBbZsBM7c5ERFphIJIARqkzMJJ4pzvMshBaIhl1UTwiomRiIJIgetcZmJldMwNmancmItIKAxETslqHhV0zA2Zrd9aT1V7zRHQEAxGT0bqOQo83fLtmBth1FRu71Q4R2Q0DERPRuo5Crzd8u2YGzNrunEx2rB0ishu275pErHUUbW131bON1M77vnB118i0fs1TYtit5Z4SjxkRk9CyjkLvYlG7ZwbYddUyu9YOmQmnzSgRmBExCS3rKIywa67dMwPhrquMjAy4XC7bByGAfWuHzIKL8VGiJC0QefDBByFJEm644YZkndJStKyjMMobvsfjQVFREQoKCpCXl4eCggIUFRVZPgihltm1dsgMOG1GiZSUqZktW7Zg2bJlOOWUU5JxOkvSssPCSG/4XI+FwthVZFycNqNE0jwj4vP5MGvWLDz55JPo2LGj1qezLC33tbFzsSgZF/dyMq5QKJTQ48jeNA9E5s6di/PPPx+TJ09u9dhgMAiv19vsi47Qqo6Cb/jtw64B7di9dsiojDKdS9ag6dTMSy+9hO3bt2PLli0xHb9gwQLcc889Wg7J9LTqsODmfW3DrgHtsavIeIw0nUvmp1kgsnfvXlx//fVYu3ZtzCn922+/HfPnz2/63uv1orCwUKshmpZWdRR8w48PF9tKHtYOGUtKSmyXjliPI3vT7FWybds2VFRUYOjQoU23KYqC9evX47HHHkMwGDwuWk5PT0d6erpWQzI8I+ynwTf82Oi99koieINerPp6FX468BM6Ozvjl/1+iaLORYYdLxkHC4kpkTQLRCZNmoQvv/yy2W2zZ89Gv379cOuttzJldwym+BNL66DO7F0Dy7Yuw43v3IhAKIAUKQWKUHDL+7fg2kHXYsFZC5CRkaH3EMnA7L4IISWWZoFIZmYmBg4c2Ow2j8eDzp07H3e73THFn1jJCOrMXKz3/BfP45o3rmn6PiQaOxsUoeCxLx6DDBn3n30/X3MUFevKKFE4gaczK6T4tdDWjEaygjqzFuupQsUd790R9Zgnvn4Cvx34WwzsM9A0rzkjTGvaEevKKBGSGoh8+OGHyTydKZg9xa+FtmY0khnUmXWOfGvJVhR7i6MeE1SCeK/4PfTp3ue415w36MW/vv4Xfqz6EZ2dnfGr/r9Cz5yeul54OK2pL9aVUXsxI6IzM6f4tdCejEYygzqzzpEfDBxs9RgJErz13uOeyye2PoEb37kRwVAQskOGqqq49cNbMWfAHDwy5RFkZmRqNeyIOK1JZH7c9E5nZk3xa6G9+1ckO6gz42JbJ3Q6odVjBASKMoua/bue/+J5XPvGtagL1UFAIKSGoEKFKlQs+2oZ7nj3jqRvcsb9ToisgYGIzri8+hHt3QVYj6DObBv19enUB6cXnQ5Zavk5kCAh35WPM7qd0fSaU4WKP77/x6iPu/zr5fhx349JvegbYddoImo/BiI6S+by6kZfiry9GQ29grrwHHlGRgZcLpfhpmOO9fh5j8OZ4jwuGHFIDjgkBx4c8yDy8/Kb/h3bSrZhT/WeqI9Zr9bj/4r/L6kXfU5rElkDAxEDSEaK3+/3o7i4GKWlpaioqEBpaSmKi4uTnk6Ppr0ZDe6ZE5veGb3x8jkv44yuZ0DCkediSM4Q/POsf+KC/hc0e80dqjvU6mNGqivREqc1iayBxaoGoWUbnFkK+hLRicK1DaIL11Wc2OFE/H3i31EZqERZbRk6Ozujq6crgMYdszt16tT02mtrXYnWzNq5RETNMRAxEC3a4My0TkmiOlG4tkFkx9ZV5LpykevKbXbMsZ1FvTv2xvie47FhzwYo4viLvgQJua5cjO8+PqkXfbN2LhFRc5yasTizFfQlaprKbHUbydLWuorHzn0MrlRX5LqS0c3rSpLFjJ1LRNQcMyIWZ8aCPmY0tNPWuooBeQOw+arNuOWdW/Dmj29CoLHQeWjOUPxh+B9w3oDzdLvo8/VCZG4MRCzOrAV9XK1RG+2pq+if2x9rLl2DCl8FdlXtQsf0jijqUGSIiz5fL0TmxUDE4ljQR0dLRF1FXkYe8jLytBgeEdkQa0Qsji2tdCzWVRCRkTAjYgNsaaVjsa6CiIyCgYhN8MJDx2JdBREZAQMRG+GFh4iIjIY1IkRERKQbBiJERESkGwYiREREpBsGIkRERKQbBiJERESkGwYiREREpBsGIkRERKQbBiJERESkGwYiREREpBsGIkRERKQbLvFuY0II7j2TALvX7caWx7egZGsJUlwpOHHaiRg1dxSyumbpPTQiIsNjIGJTfr+fu/G2kxAC/3fr/2HjIxshyRKEIgAA+3fsx5ZHt2DG6zPQZ3wfnUdJRGRsnJqxIb/fj/Ly8mZBCAAoioLy8nL4/X6dRmYuX6/8Ghsf2QgATUEIAAhVIFQbwqoLV+FQ5SGdRkdmJYRAIBCAz+dDIBCAEKL1HyIyMQYiNiOEQFVVVdRjqqqq+OYXg40LN0b8CxKqQP2hemx9eiufS4qZ3+9HcXExSktLUVFRgdLSUhQXF/PDAVkaAxGbCdeERKMoCurq6pI0InMKBUMo3VYKqJGPkWQJpRtL+VxSTJipJLtijYjNtBaExHtcJKqi4qf3f0LVziqkdUjDCVNOQGbHTFsWw7b3uSTrizVT6Xa7bfk3RNbGQMRmZFlO6HEt+eHNH/Da1a/Bt8/XdFtqRiqG3jwU424ah4yMjDY/tlGkpKcgf0g+yj8vj5gVEYpA3qi8dj2XZA/xZCpdLleSRkWUHJyasRmn09nqhTHcytsWP679ES9e8CJ8Jb5mtzf4GvDJ3Z/gwwc/tEyKeezNYyNPzTiAtOw09JnWp83PJdlHsjKVREbEQMRmJElCTk5O1GNycnLalP4VQmDtzWsbizMj1Gd+tugzlO4ptUQB58CLB2L49cMBNNaDhEkOCSmuFEx8aiK6FHZhKp1alYxMJZFRcWrGhjweD/Lz8xO+jkjVt1Uo/6I86jFKQMHud3ajW69upk8xS5KE8xefj95n98Ynj32C/V/th+yU0eOcHuh3aT8UDSjimiw6S9aife09TzhTGS3j0Z5MJZGRMRCxKY/HA7fbndA3aV+5r9VjJIeEuqro8+FmW/G1/3n90e/cfqYasx0ka9G+RJwnnKksL48cyLc1U0lkdAxEbEySpIRmJbK6t76kuVAF3F3cEVPMZl3xNdHPJbVPuBX2WOFW2Pz8/IS8nhJ5Hq0ylURGx0CEEqZz387oNqobSraUQKgt14CkZqaix9k9WkwxJ+viQdaWrFZYLc6jRaaSyOhYrEoJdc7icyClSBFfWcPvHI4u3Y8v4OSKr5QoyVq0T6vzhLNrGRkZcLlcDELI8hiIUEJ1P607rnj/CuQOyG12u7vAjTP+egbGzh3bYlaDK75SoiSrFZYtt0SJwakZSriisUW49vNrUfZ5GSp/qER6h3R0H90dbk/kFDXf1ClRktUKy5ZbosRgIEKakCQJBYMLUDC4IKbj+aZOiZKsVli23BIlBqdmyBC0XvGV7EPLRfv0OA+R1TEQIUPgmzolUrgV9tjgVpblhHZfJes8RFbGqRkyDK6jQImUrFZYttwStQ8DETIUvqlTIiVroTkuaEfUdgxEyHD4pk5EZB+sESEiIiLdMCNCRGRS334r8MwzIVRWAkVFwG9+k4LCQk5jkrkwELEps+1wS0RHNDQAv/lNA/75z1TIcgokCVBV4N57gVtvrcf996eBf85kFgxEbMisO9ySvhi8Gsf11zfg+ecb374VpfnvYMGCNOTkBDF/froeQyOKmyQMvIuY1+tFdnY2qqurkZXV+hbz1LpIO9yGce0DagmDV+OoqBDo1g0IhSIHgTk5CkpKHEhNNVagyGDWPuK5fjMjYiPJ2h6drCVS8KooCsrLyxm8JtmrrzYgFEqLekxVlYwNG4KYONE4WREGsxQJu2ZshDvcUrxiDV4NnFi1HJ9PhSS1/nzX1KhJGE1swsHsse8/4WDW7/frNDIyAgYiNsIdbileDF6N5+STJQgRPWMpSQL9+xsjq8lgllrDQMRGuMMtxYvBq/GcdVYaevRogMPR8oVblgXOPDOAvn2NMS3DYJZaw0DERrjDLcWLwavxOBwSVqwIITVVQJabByOyLJCdrWLJEhimzovBLLWGgYiNcIdbiheDV2OaMMGF9evrMGVKbVMwkp6uYsYMHzZuDGLgQLfOIzyCwSy1hl0zNsMdbike4eA1Wss3g1d9jBzpxpo1Avv31+HgQRU5OQ506JBhuN9FOJiNlvFgMGtvDERsiDvcUjwYvBpXY6DoQiuJzphpsc4Hg1lqDQMRm+IOtxQPBq/Wp+U6HwxmKRpNa0QWLFiAESNGIDMzE3l5eZg2bRq+++47LU9JRBoJB68ZGRlwuVwMQiwkGet8eDweFBUVoaCgAHl5eSgoKEBRURGDENI2EFm3bh3mzp2Ljz/+GGvXrkVDQwPOPvtsLl5DRGQQyVzng8EstUTTqZm333672fdPP/008vLysG3bNpxxxhlanpqIiGIQzzofnM4lLSS1RqS6uhoA0KlTpxbvDwaDCAaDTd97vd6kjIuIyK64zgfpLWnriKiqihtuuAFjx47FwIEDWzxmwYIFyM7ObvoqLCxM1vCIqAVCCAQCAfh8PgQCAS7DbUFc54P0JokkvbNce+21eOutt/DRRx+he/fuLR7TUkaksLAwpm2Eyd64vXjicbdUexBCoLi4GIqi4CfvT9hYuhGKUDAkdwgGdR4EoPH3XlRUxL8pipnX60V2dnZM1++kTM1cd911WLNmDdavXx8xCAGA9PR0pKcbY38EMg9eMBMv3EVxrHAXRX5+Pp9bi5AkCQ6PA7P/dzY+2PcBJDQGGwICgzoPwqOnP4oRfUYwCCHNaDo1I4TAddddh9WrV+P9999Hr169tDwd2RC3F0887pZqLw1KA37x8i+wvmQ9gMYARKDxd/vNgW/w63d+jara6K8HovbQNBCZO3cu/vnPf+KFF15AZmYmysrKUFZWhkAgoOVpySZ4wdQGd0u1l1e+fQWfln8KRRz/O1eEgsq6Sjy66VH+HZFmNA1Eli5diurqaowfPx4FBQVNXytXrtTytGQTvGBqg10U9vLsZ8/CIUW+FKhCxaofVvHviDSjaY0II2jSEi+Y2mAXhb5CagivffcaPtr9ESRImNxrMqacOAUOhzafG8v95VCFGvWYg8GD/DsizXCvGTItXjC1wd1S9bOtZBv+68X/QomvBClS49vzos2LcFKHk7B6+mr079o/4efs3aE3Piv/rMWpGQCQIKGbpxv/jkgzSVtHhCjRwhfMaHjBjF94t9RouFtq4u2t3osJz0xAmb8MABASIYRECACws3onzn7xbFQeqkz4eecMnxMxCAmbddIs/h2RZhiIkGnxgqmd8G6pxwZ6siyzdVcjSzYvQW1DbYvTJIpQ8LPvZ6zYsiLhU96Te0/GhSde2NS2ezRZkjGw00Bce9q1/DsizTAQocQIlUNU3Qdl9xSEdp+D+vKHIUIHND8tL5ja4W6pyfXCly9EzUxIkPD6T68nvGhUkiSsnLESN426CZ6UI7/bVEcqpveZjndmvYOcDtEDfqL2SNrKqm0Rz8pspKOa1yH2TQfQACD8aU6CgAfBvFfg6nSW5kPgyqpkdh0e7IDqYHXUY07tfCo+uvIjZGRkaDIGf70fH+/5GPWhepza5VQUdCjg3xG1ieFWViULC+6A2PcrACFIODqmFQBqkV4xDf6UL+HJ6q3pMMLbixOZ1YmdTsS20m1Q0XIHiyzJ6Nuhr6ZFo540Dyb1naTZ4xO1hFMz1C7i4F8BqMcEIY0kqJBQh/qKpWzlJmrF3JFzIwYhQGOdyCUnXcKiUbIcBiLULqLmdUiIVnGvwqX8HxdDImrFrFNm4Zze5xxXNBr+/sp+V+Lsk8/mVAlZDgMRah9RH/VuCYCEei6GRNSKFEcKXrvkNfy/cf8PnZ2dm24vzCjE/aPvx5Lzl7BQmCyJxarULsqeqXAE3o6YFRGQ4XVcjLTC5azhIIpRg9KAHyp+gCQk9OjQAy6Xi5kQMhUWq1LSODrfAOnnN6IcocKfehmyOK9NFLNUORUnF5ys9zCIkoJTM9QuUsZk1GfeDKAx+xEW/v/75f+H7Lyx/DRHREQtYiBC7ZbW7RHU5fwLdY6RUJEGFU7USmeiLO1FuLr+gfPaREQUEadmKCGcOdMhOl/UbFGxfC4qRkRErWAgQgnDRcWIiChenJohIiIi3TAQISIiIt0wECEiIiLdMBAhIiIi3TAQISIiIt0wECEiIiLdMBAhIiIi3TAQISIiIt0wECEiIiLdMBAhIiIi3TAQISIiIt0wECEiIiLdMBAhIiIi3XD3XSIispQfD/yIZz5/Bj8f+hl57jzMGjgLA7sOhCRJeg+NWsBAhIiILEEVKm565yYs/mQxZEmGBAkCAg99/BAu73c5lpy3BFmZWXoPk47BqRkiIrKE+9ffj8WfLAYAKEJBSISgCAUA8OyOZ3HH2jvg9/t1HCG1hIEIERGZnr/ej4c3Phz1mH988w/sLt0NIUSSRkWxYCBCRESmIoRAIBCAz+dDIBCAEALr9qyDr94X9ecCSgAbft6Aurq6JI2UYsEaESIiaiKEQF1dHRRFgSzLcDqdhiry9Pv9qKqqgqIoTbfJsoz9Nftj+vlAKNDsZ0l/DESIiAhA5It8Tk4OPB6PjiNr5Pf7UV5eftztiqIgD3kxPUbfDn0hy3Kih0btwKkZIiJqusgfmy1QFAXl5eW6F3kKIVBVVRXx/r4d+mJI7hDIUstBhizJGNhpIAblDoLT6dRqmNQGDESIiGyutYs8AFRVVela5BmeLormwdEPIiMt47hgRJZkuFPceHjMw8jJyTHUVBMxECEisr1YLvKKouha5BlLXUef7D7YcOkGXHzyxUh1pAIAUh2p+EWvX+D1C17HGf3OMMQUEzXHGhEiIpuLtXhTzyLPWOs6+nTug+cueg7LG5aj7FAZMlMz4Un3GK7olo5gIEJ0DKN3DRBVVQm88UYDamtVnHoqMHp0erteo7Fe5PUs8nQ6nZBlOWowFP57BQBXqgu9cnsla3jUDgxEiI5i9K4Bsrf6euCGGxrw5JMpCIXSmm4fNCiIf/xDwfDh7jY9brwXeT1IkoScnJwWu2bCWP9hTqwRITrM6F0DRLNmhfDEEykIhZpfbL/5Jg2TJqXj229r2/S44Yt8NEa4yHs8HuTn5x+XmZFlGfn5+fywYFLMiBAh9q4Bt9ut+5sx2dOWLQIvv1cMTFoOFGwFlHTg+6nAF5dCqc+E3+/AAw8oePZZ0abXaPgib/SMoMfjgdvt5vSphUjCwIvue71eZGdno7q6GllZ3DGRtBMIBFBaWtrqcQUFBXC5XEkYEVFzE29ehg88/w1AAhwKIA5feP05wHP/B5SfAqdTRVVVEB5P21+jrJGiRIjn+s2pGSKYo2uA7Gv9nvX4IONawKE2BiEAIInGL/cB4LKzgNRa1NU54PO17zUqSRJcLhcyMjLgcrkYhJDmGIgQwRxdA61paSMwsoaFGxdCivR27VAATwUw8EVkZKjIzDTua5SoJawRIYI5ugaiYbePdQkh8PbOtyGkKJkO4QD6vo2ZQy+Cy8VpbDIXZkSIYJ6ugZaw28f6FNHKdIukIs1ZjxtvDBnyNUoUDQMRosPM2Bpohj1CqH0kScLg/MFwRH27duDyySeid++2rSNCpCdOzRAdxWytgfHsEcJuH/O6cfSNuGz1ZRHulZAqy/jdiJmGnTokioYZEaJjmKlrgN0+9jBr0CxcMegKAIBDOvK2LUsyHJKEv4z7C/oX9jf0a5UoEgYiRCZmhW4fap0kSVhx4Qo8M/UZDM0dinQ5HRmpGbig5wV4bepruHLklYacOiSKBadmiEzM7N0+FDtJknD5sMtx2dDLTDN1SBQLZkSITMzM3T7UNmaaOiSKBQMRIpMzY7cPEVEYp2aILMBs3T5ERGEMRIgsIpyyJ6L4cbM//TAQISIiW+MWCfpijQgREdkWt0jQHzMiRERkOlW1VfjHp//A5r2bkepIxbl9zsXMU2YiPSU95seIdYsEt9vNaRoNMRAhIqI2EQL45huguhro3Rvo0iU55335m5dx6SuXokFpANBYH/XSty/h9vdvxxsz38DgwsExPQ63SDAGzadmHn/8cfTs2RNOpxOjRo3C5s2btT4lERFpbNUqoF8/gYEDgbFjgW7dBKZNE/jpJ23Pu7VkK3798q9Rr9RDPfy/8O7E5bXlOPelc1F1KHqWI4xbJBiDpoHIypUrMX/+fNx1113Yvn07Tj31VEyZMgUVFRVanpaIiDS0fDkwYwbwww9HblNVCWvWACNGCOzZo925/7zxzwAAgeN3lFaEgrLaMjy77dmYdpzmFgnGoGkgsmjRIsyZMwezZ8/GySefjCeeeAJutxv/+Mc/tDwtERG1kRACgUAAPp8PgUDguAv6oUPAvHni8LHN6yYURcKhQ8Af/hDSbHyvfvdqUwakJQ44sLZ4Lerq6lp9rPAWCdFwiwTtaVYjUl9fj23btuH2229vus3hcGDy5MnYtGlTiz8TDAYRDAabvvd6vVoNj4iIjhFLG+uLLwrU10d+DEWR8MorMg4dEujQIfEFnuG6kEhUqKhTWq/9AI5skVBeXh7xGG6RoD3NMiLhF3N+fn6z2/Pz81FWVtbizyxYsADZ2dlNX4WFhVoNj4iIjhJrG+vOnSGktPIRNhSS8NNPwegHtdGA3AFwRLl0OSQHBnQaEPN0CrdI0J+h1hG5/fbbUV1d3fS1d+9evYdERGR5sbaxCiGQna0iltrNrCxtCjznnTYPKtTIBwjgkpMuiWs6xePxoKioCAUFBcjLy0NBQQGKiooYhCSJZoFITk4OZFk+LuVVXl6OLhF6vNLT05GVldXsi4iItBVPG+v06QLR6kAdDoFRowLo2lWby8sVp16BC0+8ENLh/4XJUmNG495R9+LUnqfGPZ3CXY31o1kgkpaWhmHDhuG9995ruk1VVbz33nsYPXq0VqclIqI4xdPG2q9fOn79ax8k6fhoJHzbTTdVa1bgKTtkrJq5CovPWow+2X0azwsJ4wrG4YUpL+CG029gJsNkNF3QbP78+bjiiiswfPhwjBw5EosXL4bf78fs2bO1PC2R7XDDLmqPeNpYJUnC3/7mQEqKF88/nwUhAIejsUi1Y0cVDz9cgfPOy9L09Sc7ZMwbMw+/H/17eP1eQAXSUtP4ujcpTQORmTNnorKyEn/6059QVlaGwYMH4+233z6ugJWI2o4bdlF7hdtYo2VGjm5j7dDBg6VL/fj97/fh7bfT4fM50LNnA846K4iCguS97iRJQnZGdlLOpRV+iAAkEcuqLzrxer3Izs5GdXU160WIWhDudIiEVf8Uq7a8lngRbR8rf4iI5/ptqK4ZIopdPJ0ORK1pSxsrCzzbjrv+HsFN74hMiht2UaJ5PB643W5mOTTGXX+bYyBCZFLcsEsfVp+OCGc5SDv8ENEcAxEik+KGXcln5Tl9Sh5+iGiONSJEJsUNu5LLjnP6rW2AR23DDxHNMSNCZFLcsCt57Dinz+yPduJtl7Y6ZkSITIwbdiVHPHP6VmDH7E8yhT9ERGOnDxHMiBCZHDsdtGenOX07Zn/0EP4QwawTAxEiS2Cng7bsNKdvhI4OvTqTkn1efohoxECEiKgVdprT1zv7o1dtil7n5YcI1ogQEbXKTnP6emZ/9KpNYU2MvhiIEBHFwC6FwXq1heu1ZQG3StAfp2aIiGJkhzl9vdrC9apNMUJNTHuZfbVfBiJERHGww5y+Hh0detWm6F0T015WWO+FgQgRER0n2dkfvWpTzNwRFa5tOVa4tsUsU4YMRIiIqEXJzP7o1Zlk1o4oK633wmJVIiLSnV6dSWbtiLLSar8MRIiIyBD06kwyY0eU2WtbjsapGSKyLSGAdesEtm9vQFqainPPFejd21wdB1ajV2eS2TqizFzbciwGIkRkS9u3AzNnqti50wFJSoUQgMMBXHSRD3/7mwOdOxvvU7Bd6NWZZKaOKLPWtrSEUzNEZDvffw+ccYbArl2Nn3aFkABIUFUJL7+cgUsuEVxNkwzNrLUtLWEgQkS2s2CBQF0doKrHv0mrqoR3383Ae+95uZqmCQkhEAgE4PP5EAgELP07NGNtS0s4NUNEtqIowIsvAooS+ZOiLAu88oobZ51l3NU06XiJWtzLTCuVmq22pSUMRIjIVmprgWAw+pu0EMDBgw5TdBxQo0Qt7mXGlUrNVNvSEk7NEJGteDxAVlb0dL0kAd26hUzRcUCJ27iOu/Dqg4EIEdmKwwFcdVXj9EskiiJhxoxaU3QcWIYQEJs2oeG++xC85x4EP/gAQlVj+tFELO7FXXj1w6kZIrKd226TsGqVipKSlmtF5sw5hBEjOphqnt3UiouhXHgh5O3bkXI4CyUpCoIDBkD517/gPvnkqD+eiMW9rLALr1kxI0JEtpObC3z8sQMXXKDA4TjyCbdTJwV33nkAixalGrYeoD0M2VFSUwP1jDPg+PxzAI0BiHQ4IEjbsQNpkyfDX1IS9SESsbiXlVYqNRtmRIjIlrp2BVavTkFJicDnnweRlqZgxAgJmZkdLZkJMWoRplixAlJxMaQWgiJJUSCXlSG4dCnc994b8feSiMW9rLRSqdkwI0JEtta1q4Rzz03HpEluZGW5LBuEGLUIU33uuVaP8bzyStT6jkQs7hUOZqIxy0qlsTJKhowZESIiCzP6dvHSgQMtZkOa7hcCjkOH0NDKlEh4ca+2Zn3CwUxLLcBhZlmpNBZGypAxECEisjCjF2GKPn0g9uxpqgs57n6HAw09e8Y0JdLexb3aG8yYRaLWXEkUBiJERBZmyCJMVW3sowbguOYaSO++G/FQSVXhnzULOTFOibR3cS8rrFQajREzZKwRISKyMMMUYVZWArffDpGbC8gyRMeOCM2bBwwbhtDUqRAtXPSEJME/fjzcl16a1EAgHMxkZGTA5bJW3VAi1lxJNGZEiIgszBDbxf/8MzBmDERJSdMUjHToEOS//Q3qCy+g/p13kDJwIOS//Q2y1wsAUDIz4bviCqTcey88WVnajc1mjJghYyBCRGRhhijC/N3vmgUhTWNTFDgOHYLjqqvQ8NFHSL3rLtR9+ilURYE0YACyOnBRuUQzTIbsKJyaISKyOF23i9+zB+KttyIWo0qKAudnn6F6/XogPR3O0aPhHjcOro7WXM9Fb0ZsU2ZGhIjIBnQrwvz886jtuWEpX36JuvHjuXy6xgyRITsGAxEiIpvQZbv49PSYDhNpaVw+PUmM1qbMQISIiLQzdiyE2w2ptjbiISIlBYGxY5HF5dOTxkhtyqwRISIi7WRkANdf32J7LtC4YJl3xgxIeXna1yV89RXEzTcj9Otfo37ePNRt2WKMjf90YpQ2ZQYiRESkKeneexG6+GIAgJBlCDRmQQCgdtIk7L/zTm3rEhQFmDMHGDQIWLwY8qpVSP3b3+AcORL+GTPgr67W5rzx2LABYuZMqIWFUHv3RmjePIidO/UeVVJIwsDhoNfrRXZ2Nqqrq5HFPnIiIlMLfPQRGp58EnJJCZTcXNT88pcIDR+OnNxcbesS7rwT4oEHWiyaFZKE6jlzkLpoUZvHIPbsQf1HH0EFgLFj4ezRI76g6u67gXvugZDlpu4iIcuALCO4ciWc06a1aVx6iuf6zUCEiIiSRgiR3LoEnw8iPz9qjYqano6ft25F4YAB8Y2lqgqh2bMhv/FGU5AjZBn+adMgPf44PPn5rT/Gm28C55/f4l1CkiDS0hDYsQOenj1jH5cBxHP95tQMERElTdLrEj78MGoQAgCOYBBpGzbEt6y5zwd13DjIb73VLNMiKQo8q1fDMXVqTFM+4i9/acx+tEASAlJ9PeqfeMLStSwMRIjIEoQQCAQC8Pl8CAQCln7jthRVBaqrgYYGbR4/EIjpMCkYjKt9WDz1FKTvv29xoTZJVeHauhW1L77Y+utww4aIi701nkgg7ZNPkrr3S7IxECEi0/P7/SguLkZpaSkqKipQWlqK4uJi+P1+vYdGkRw6BPzxj42b4HXoAOF2IzR9OsRnnyX2PKecEtNh9SedFNey5uLvf49+v8MB98qViQkgJMnSa6wwECEiU/P7/SgvLz/ujVpRFJSXlzMYMaIDB4DRoyEeegjSgQMAACkUgrx6NTBqFAJvvJG4c510EsQZZ0Sc/hCyjLohQ6CefHJ87cOlpVFXjJVUFSllZa0GEOqZZ0YcW+MDSagbPTqpe78kGwMRIjItIQSqqqqiHlNVVcVpGqP54x8hfvihxU3wEAoh7fLL4T90KGGnk556CqJjx+Mu+EKWoWZlofLhh+NvHy4oiLg2CtCYEQl16dJqAOG4+eaIUzNCkiBcLtTOmJHUvV+SjYEIEZlWuPsiGkVRLD2/bjo+H8TTT0feBE9VIR84EFt9Raz69IHj008RuuoqqG43AEB1ueC9+GKUrlmDTqNHx926K82ZE/1+VUXtzJmtBhDSWWcheP/9ANAsUBIOB4TTibInn0SnPn0svQEg23eJyLR8Ph8qKipaPS4vLw8ZGRlJGBG16osvgFNPjXqISEnBoWuvhfOhhxK+N45QFNTt3w/F6YScmtr29mGfD8qIEXC0kNkRDgfqhg2DunYtPNnZMT1c4D//QejRR5G+bRtEWhpqJ02C/9JL0XHgwKTv/ZII8Vy/udcMEZlWrPPmVp5fN51YAgtVhep0alKgKckyXHl57X+gjAzIGzYg9JvfQF6zpvk6IhdeCOmxx2IOQgDANXYsxJgxTVk+pyyjg057vyQbAxEiMi2n0wlZlqNesMKLZpFB9OkDtU8fSD/+GLHYU1JV1E6aBLfGAaT4/ns0vPUWREMDxMiRSB83DpIjjoqFnBykvPYaRHExghs2NK6sOm4cPEVFbQogdNkd2QAYiBCRaUmShJycHJSXl0c8RtM9TCh+kgTpT3+CdPnlLd4tHA7UTpgAtV8/7QLIQ4cQmjULKW++idTDrw1JCAQHDIDywgtwx9jyGyYVFSF91qyYjk36yrImwECEiEzN4/EgPz8fVVVVzTIjsiwjJyfHlPPrVidddhnq9+xB6p/+BEhS05cUCiFw2mmoWLQIeVoFkKEQlMmTIR9er+TorEzajh1QJk2Cf/NmeHr1Svip/X4/X6ctYLEqEVkCP2kmRjKfx9rvvkP9smWQf/oJamYmfFOnIjRypKab4ImXX4Y0fXrk+x0OVF93HbIXL07ovzu83k0k+fn5lgpGuOkdEZHFNTQAr70msG1bCOnpKi64QGDIkPR2XTz1+MSe7ABS+cUv4FizBpKqRjymoaAAoR9/TFi9hhACxcXFrdYyFbWxtsSI2DVDRKQRI2RePvwQmDFDRWWlAykpKVBV4O67JUyaVIvnnhMoKIg/aIj0iT28Qq1Wn9iTXqBZURE1CAEA+dAhBBPYsRPPejcsViUiooiMMMf/xRfAlCkCoVBj8BP+LwB8+KELv/xlHdau9SMjI/bxxLpCrdvtNv0ndtGrF8SWLVFXMw117ZrQlu9Y25CtvJ9MNFxZlegw7t5qcHV1ECtWQDn9dKh9+kCZMAHihRe027X1GEbZ02bBAgFFAVT1+IBAUSR8/LELb71VE9fr104r1MpXXx19t1sAvksvTWjHDte7iU6TQGT37t347W9/i169esHlcuGEE07AXXfdhfr6ei1OR9Ru3L3V4A4cgDJqFKTf/AaO//wHjh9/hGP9ekizZkE580zA59P09EbZ0yYUAl5+uTHgiESWBV57zRVX0GCnT+zSmWciNGNGi/vECFlG/cknI33u3IRmfsLr3URj5/VuNJma2bFjB1RVxbJly9CnTx989dVXmDNnDvx+PxYuXKjFKYnaTK+5cSOprarFJ49+gu1PbUdtZS3ceW4Mu2oYRs0bBVcn/eesQ1deCfnrrwEcabcMz/M7PvkEDdddh9Snn9bs/EaZ4w8Emk/FtEQIoKbGEVfQYKtP7JKElOefR/0JJyDlscfgqKkBAIjUVPh++Us4Fi2CJzc3wafkejfRJK1r5pFHHsHSpUuxa9eumH+GXTOkNTtWsx+rem81nhrzFHwlPgj1yNuB5JCQWZiJqzZehcyumbqNT+zeDfTuHXXLdZGS0rgte06OJmMwyp42QgC5uQL790fPiMydewgPPuiMOSiy69+BqK1F/ccfQ6mvh3TKKXAWFGj67zNCjVGyxHP9TlqNSHV1NTp16pSs0xHFxE5z45G8cvkr8JU2D0IAQKgCNT/XYPXs1TqNrFHD++9HDUIAQAqFUL9unWZjMErGQJKAa64BHI7Iz4eqAjNm+ONK84c/sUdjxU/sktuN9IkT4T7nHLi6dtX83+fxeFBUVISCggLk5eWhoKAARUVFlgtC4pWUQGTnzp1YsmQJfve730U9LhgMwuv1Nvsi0pKd5sZbUvVdFYo/LIZQWr6wCUXgp7U/4cCuA0ke2RFqjM99rMe1hZHm+G+5RULfvgKy3Px3JkmN399000EMHtwx7otqeIXaY/+dsizbYnoyWcLtyhkZGXC5XJYL7toirkDktttugyRJUb927NjR7Gf27duHc845B9OnT8ecOXOiPv6CBQuQnZ3d9FVYWBj/v4goDkb5pKuXPZv2tH6QAIo/LtZ+MJGMHdtiYeHRREoKcNppmg3BSBmD7Gxg0yYHZs9uXMgsrEePBixaVIW7705vc9DAT+ykh7hqRCorK7F///6ox/Tu3RtpaWkAgJKSEowfPx6nnXYann76aTha2dUwGAwiGAw2fe/1elFYWMgaEdJMLHP/VpwbD9v+z+14/bLXWz3uFy/9AoNnDtZ+QC0QQiAweTJc69a12HYpZBn+adPgWbVK89+R0eb4vV6BHTuCSE1VcNJJDrhc1l/W3ggLylHrNFtZNTc3F7kxVhPv27cPEyZMwLBhw7BixYpWgxAASE9PR3p6ejxDImozIUSrgTUAdOrUybJvdD3O7AFHigNqKPJKk440B3qc0SOJo2pOkiSIp55Cw1lnIXXnzsbN0YRozJIc3jFVWrIkKb8jj8cDt9ttmAthVpaEkSPt0/JptECQEkOTGpF9+/Zh/PjxKCoqwsKFC1FZWYmysjKUlZVpcTqiNomlUBUA9u/fb9n1RDp174S+M/tGfieQgJMuOQkdunRI5rCO4+nZEw0bN+LAffchOGgQQgUFqBs6FPsffhjKBx/AU1CQtLFwjl8fRllQjhJPk/bdp59+GrNnz27xvnhOx/Zd0lKsLZlhVi3Yq95fjVXTV2HfB/sgyRKEIpr+231yd0z/13RkdUzO319raXem5e3Jru3FZsbdd4liEAgEUFpaGvPxVn6j8/l8+Or1r/DDqh9QW14Ldxc3+k7vi4FTB2q6LsbRmHanSGL9Wy0oKLDlpnFGxN13iWIQbsmMp4XXqrtjZmRkYNSvR+HUaafqkm3g6rYUjd3b7K2Om96RbcXSknksK7/R6VX7YJR9XCjxErWRpN3b7K2OGRGytfAiTpWVlVDVyJ0jYXyjSzyj7ONCiZXIqbZYspd23jTO7JgRIdsLL+LUWos53+i0wbS79SS6w8VIC8pR4jEQIQLgcDhaXSOHb3TaYNrdWrSaauMS9NbFqRmiw8JvdOzcSC6m3a1Fy6k2oy0oR4nBQIToKHyjS75w2r2lrpkwZqPMQ+uptnBRNVkHAxGiY/CNLvmYjbIOTrVRvBiIEJEhMBtlDZxqo3ixWJWIDIP7uJgfO1woXgxEiIgoodjhQvHg1AwRESUcp9ooVgxEiIhIEyz8plhwaoaIiIh0w0CEiIiIdMOpGSKytAMHBP73fxtw6JBA377AueemISWFdQpERsFAhIgsSVWB226rx1//mor6+jRIkoAQEgoKQli+vAFTp7J2gcgIODVDRJY0f349HnkkFfX1jdkPIRr/W1Ym48ILnfjgg4Cew7OEYLAx4CNqDwYiRGQ5+/YJLFmSCuD4KRghJKgqcOediHsHWALq6oCFC4GePQWcTiA9XeDCCxVs3sznktqGgQgRWc7zzzdEvV9VJWzc6MLu3XVJGpE11NUBU6YAf/iDwJ49jbeFQhJef92BMWOAlSv5fFL8GIgQkeVUVACOGN7dKiv5KT4eCxcCH30kDk9zHck2KUpjlmn27DSUlvr1GyCZEgMRIrKcwkKgtV3mHQ6Brl3ZPRMrVQUee0xAVVt+zoSQUFcn4amn6jjlRXFhIEJElnPZZalITY18vywLnH12Lbp14w6wsdq/Hygvjx64yTLw9depqKvjFA3FjoEIEVlOp04S7ruv/vB3zT+dy7KA263igQck7nsSh/T0WI8TUFpLRxEdhYEIEVnSLbekY8kSH7p0aX5RHDGiDm+/XYMhQ9w6jcycsrKA0aMVOByRp11CIQmTJtUet+suUTRc0IyILMnv9+O88yowZQrwxRfp8Pkc6NGjAUVFocP3p3I7+jj98Y8OTJ3achZJlgVOOqkep59eD6eTU14UO2ZEiMhyhBCoqqoC0Fi3MGRIEKefHmgKQgCgqqqKRZVxOv98CYsXByHLAg5H45csNz6HffrU46mnypCbm8MpL4oLMyJEZDl1dXWt1ikoioK6ujpuUx+n669Px3nn1WLp0np8/30KXC6BKVP8mDgxiPz8HGaZKG4MRIjIcmItlmRRZdv07evGn//sagr4ZDkbTqeTmRBqEwYiRGQ5sRZLsqiy7SRJYjaJEoI1IkRkOU6ns9UgQ5ZlFlUSGQADESKyHEmSkJOTE/WYnBwWVRIZAQMRIrIkj8eD/Pz84zIjsiwjPz+fRZVEBsEaESKyLI/HA7fbfVRRpcyiSiKDYSBCRJbGokoiY+PUDBEREemGgQgRERHphoEIERER6YaBCBEREemGgQgRERHphoEIERER6YaBCBEREemGgQgRERHphoEIERER6cbQK6sKIQAAXq9X55EQERFRrMLX7fB1PBpDByI1NTUAgMLCQp1HQkRERPGqqalBdnZ21GMkEUu4ohNVVVFSUoLMzExLbFLl9XpRWFiIvXv3IisrS+/h6IbPQyM+D0fwuWjE56ERn4dGZn4ehBCoqalB165d4XBErwIxdEbE4XCge/fueg8j4bKyskz3otICn4dGfB6O4HPRiM9DIz4Pjcz6PLSWCQljsSoRERHphoEIERER6YaBSBKlp6fjrrvuQnp6ut5D0RWfh0Z8Ho7gc9GIz0MjPg+N7PI8GLpYlYiIiKyNGREiIiLSDQMRIiIi0g0DESIiItINAxEiIiLSDQMRHb3xxhsYNWoUXC4XOnbsiGnTpuk9JF0Fg0EMHjwYkiThs88+03s4SbV792789re/Ra9eveByuXDCCSfgrrvuQn19vd5D09zjjz+Onj17wul0YtSoUdi8ebPeQ0qqBQsWYMSIEcjMzEReXh6mTZuG7777Tu9h6e7BBx+EJEm44YYb9B6KLvbt24dLL70UnTt3hsvlwqBBg7B161a9h6UJBiI6+fe//43LLrsMs2fPxueff47//Oc/uOSSS/Qelq7+8Ic/oGvXrnoPQxc7duyAqqpYtmwZvv76a/zlL3/BE088gTvuuEPvoWlq5cqVmD9/Pu666y5s374dp556KqZMmYKKigq9h5Y069atw9y5c/Hxxx9j7dq1aGhowNlnnw2/36/30HSzZcsWLFu2DKeccoreQ9HFwYMHMXbsWKSmpuKtt97CN998gz//+c/o2LGj3kPThqCka2hoEN26dRN///vf9R6KYbz55puiX79+4uuvvxYAxKeffqr3kHT38MMPi169euk9DE2NHDlSzJ07t+l7RVFE165dxYIFC3Qclb4qKioEALFu3Tq9h6KLmpoa0bdvX7F27Vpx5plniuuvv17vISXdrbfeKsaNG6f3MJKGGREdbN++Hfv27YPD4cCQIUNQUFCAc889F1999ZXeQ9NFeXk55syZg+eeew5ut1vv4RhGdXU1OnXqpPcwNFNfX49t27Zh8uTJTbc5HA5MnjwZmzZt0nFk+qqurgYAS//uo5k7dy7OP//8Zq8Lu3nttdcwfPhwTJ8+HXl5eRgyZAiefPJJvYelGQYiOti1axcA4O6778add96JNWvWoGPHjhg/fjwOHDig8+iSSwiBK6+8Etdccw2GDx+u93AMY+fOnViyZAl+97vf6T0UzVRVVUFRFOTn5ze7PT8/H2VlZTqNSl+qquKGG27A2LFjMXDgQL2Hk3QvvfQStm/fjgULFug9FF3t2rULS5cuRd++ffHOO+/g2muvxbx58/DMM8/oPTRNMBBJoNtuuw2SJEX9CtcCAMAf//hH/OpXv8KwYcOwYsUKSJKEVatW6fyvSIxYn4slS5agpqYGt99+u95D1kSsz8PR9u3bh3POOQfTp0/HnDlzdBo56WHu3Ln46quv8NJLL+k9lKTbu3cvrr/+ejz//PNwOp16D0dXqqpi6NCheOCBBzBkyBBcffXVmDNnDp544gm9h6aJFL0HYCU33XQTrrzyyqjH9O7dG6WlpQCAk08+uen29PR09O7dG8XFxVoOMWlifS7ef/99bNq06bi9FIYPH45Zs2aZ/hNArM9DWElJCSZMmIAxY8Zg+fLlGo9OXzk5OZBlGeXl5c1uLy8vR5cuXXQalX6uu+46rFmzBuvXr0f37t31Hk7Sbdu2DRUVFRg6dGjTbYqiYP369XjssccQDAYhy7KOI0yegoKCZtcHAOjfvz/+/e9/6zQibTEQSaDc3Fzk5ua2etywYcOQnp6O7777DuPGjQMANDQ0YPfu3ejRo4fWw0yKWJ+LRx99FPfdd1/T9yUlJZgyZQpWrlyJUaNGaTnEpIj1eQAaMyETJkxoypA5HNZOWKalpWHYsGF47733mlrXVVXFe++9h+uuu07fwSWREAK///3vsXr1anz44Yfo1auX3kPSxaRJk/Dll182u2327Nno168fbr31VtsEIQAwduzY41q4v//+e8tcH47FQEQHWVlZuOaaa3DXXXehsLAQPXr0wCOPPAIAmD59us6jS66ioqJm32dkZAAATjjhBFt9Kty3bx/Gjx+PHj16YOHChaisrGy6z8rZgfnz5+OKK67A8OHDMXLkSCxevBh+vx+zZ8/We2hJM3fuXLzwwgt49dVXkZmZ2VQfk52dDZfLpfPokiczM/O4uhiPx4POnTvbrl7mxhtvxJgxY/DAAw9gxowZ2Lx5M5YvX27ZLCkDEZ088sgjSElJwWWXXYZAIIBRo0bh/ffft26fOEW1du1a7Ny5Ezt37jwuABMW3iB75syZqKysxJ/+9CeUlZVh8ODBePvtt48rYLWypUuXAgDGjx/f7PYVK1a0Oq1H1jRixAisXr0at99+O+6991706tULixcvxqxZs/QemiYkYeV3OSIiIjI0a09CExERkaExECEiIiLdMBAhIiIi3TAQISIiIt0wECEiIiLdMBAhIiIi3TAQISIiIt0wECEiIiLdMBAhIiIi3TAQISIiIt0wECEiIiLdMBAhIiIi3fx/bR2l7aXClBoAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD4CAYAAAAJmJb0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAgAElEQVR4nO3deXxU5b348c93lqwkEEhYBCJriyiLEHfE2gJSlypoUate297Kq9Yq1i7qT1u1q4pLbfVqudbqta24XS8opYpbxR1QQcWlihoFhAARJuss5/n9kQRIMiEzmTlzlnzfrxcvnTOTc74zyXzPc57zfZ5HjDEopZTyp4DTASillLKPJnmllPIxTfJKKeVjmuSVUsrHNMkrpZSPhZwOYG/l5eVmxIgRToehlFKesmbNmm3GmIpkz7kqyY8YMYLVq1c7HYZSSnmKiHzS1XPaXaOUUj6mSV4ppXxMk7xSSvmYJnmllPIxTfJKKeVjmuSVUsrHNMkrpZSPaZJXSikfc9VgKKWU6qlIJEJtbS3xeJxQKERZWRklJSVOh+U4TfJKKc+LRCJs27aNtkWQ4vE427ZtA3Blos/lCUm7a5RSnldbW0vHVe6MMdTW1ib/gXgcHngAzjwTfvADeP31HETZou2EFI/HW0NpOSFFIhFbjqcteaUcFI/Dpk0wYAAUFzsdjXe1JcyUtsdiMHMmZvVqpL4eEwxi/vIXotdeS8GCBTZHuu8Tkh2teW3JK+WQP/0JyssN48ZZDBhgcc45EbZvt6c153ehUPL2atLtDzyAWbUKqa8HQBIJAk1N5F16KZHPPrMzTCDNE1IW2J7kRWS2iLwnIh+IyGV2H08pL1iyBC65xLBzp9DYGKC5OcBDDxVz0UXGtst2X4nH4bHH4A9/gJUrKevXDxFp9xIRoaysrPPPPvgg0tDQabMJh2l64gm7It4trRNSFtia5EUkCNwGfB0YD5wpIuPtPKZSXvCrX0FDQ/uk1NQU4OGH+7Bp0xeOxBSJRKiurmbDhg1UV1dn72RjDNx5J4wbBxUVcPrp8OGHPd/fxo1Yo0djnXEG1k9/ijV7NkWzZlFeVLQ7UYZCIcrLy5N3f5SWYjqcENrE8vJ6HleKysrKUj8hZYHdLflDgQ+MMRuMMVFgMXCyzcdUyvU+/TT5dhHYts3KbTDYfDPw0ksxCxbAe+/Btm2Yhx7CTJ3a9YfQjfjZZyMbNxKorycQjRJoaEDeeIO8666jsrKSUaNGUVlZ2XX/9vz5mIKCztvz8ogfeWSPYkpHSUkJ5eXlqZ2QssDuJD8U2Ps3+Vnrtt1EZL6IrBaR1TU1NTaHo5Q7HHYYiJhO2/PzDUOGJG9l2int6pTUd4z54x/bdY+IZUFDA9Hf/S79/dXXE3z+eSSRaLc50NxM6K9/TW0f06YRvfRSrPx8rOJiEn36kCgr4/O776asIuniSllXUlKS2gkpCxyvrjHGLAIWAVRVVXX+q1fKh37zG3jqKWhsNBjTktQLCy0uvXQHFRXdX7Znu87atpuB69djwmGkqandZonF4Lnn0t9fh+TeThqxFlx1FXXnnEPT8uXEioqITZ9O2cCBtiRbpwdp2Z3kNwLD93o8rHWbUr3ahAnw8svCZZfFePXVAPvtF2PBggjf/GZBtwnAjoE/oVAoaULP+GZgZSVEo502GxFigwaRdg94aSnRAw8kb906ZK8rDxMK0XD88aTz7vuMGkWfCy5IN4K0uGGQlt3dNauAsSIyUkTygDOApTYfUylPmDABli0LU1MTZO3aAr773YqUvvh2dK3YdjNw+HCajjwSq+MNTWMoXLmS5ilTqPvgg7R2Gb/zTqzSUqzCQgCsoiLi++0Hv/51ZrHawLZusDTY2pI3xsRF5IfA40AQuMsY87adx1TK7+zoWmk7udjRrZD4299o+P73Kf7HPyAeR6DlXyxG3rp1cMopRF56KeVjFVdVEXnrLWJ3303gww+JT55M/llnUVJennGs2ZbrmvhkpONZxklVVVVm9erVToehlKtVV1d32bVSWVnpQETdi0QiBE4/naLly+l4W9kqLGTLkiUMmTnTkdjslKvflYisMcZUJXtOR7wq5TG5rrPOhpKSEoojkU4JHlr6083nn+c8plxww+9Kk7xSHpP1Outdu+C66+Coo2DuXPjXv7IY7V5mz8bKz++0WaJREpMnp7Ur2wZuZVmua+KTcbyEUimVvpKSkuwkil274OCDMZs2IU1NGMD8859Ef/lLCn7yk8z3v7cLLoDbb8eqqSHQWnFjFRbyxfnn02/EiJR344aKlXRk7XfVQ5rklerNbr99d4KH1huijY3k/fznRM46i5IhQ7J3rH79CKxdS/Pvfoc89hiJsjLqvvc9CubNSysJ5noWx+44XQffHU3ySrlIzhPGkiWdBipBa935ypWUzJuX3eMNGED+DTfADTcAUNiDXbihYqWNF64qNMkr5RKOJIxBgzDQ6YaoJBJEbU5SPT2h2TZwqwfcdlWRjN54VRnzyk0wt3Nk4MyCBZjC9u1pQ0sN++Af/ACuvz7piNVMZTIhWlcVK4WFhTn/O3TTVUVXNMmrjOR6KTO3eecdmDkT8vOhf3+4/PKe50RHEsZXvkL0mmuwCgpIFBe3TMErgsTjhD/+GOuqq4gff3zLdMF7MwYefhhmzYLp01tWQEnjjWdyQktWsdKnTx/q6upy/neY67nhe0IHQ6mMdDXY4/PP83n55aEkEnDyyfClLzkQnM02boTx4yES2TPJWEGBxcyZCZYuDae9PycHOUU2bSKxcCGld9xBoEMfvVVYSOOyZRQfe+yejeefj7n33t2rK1mFhVgHH0zouecgGOz2eBs2bOjyuVGjRqUdv1OfXccuNmi5qsh1maQOhlK2SfbFuu++Er761SFcfrnFFVcYJk60+MUvmh2Izl633gpNTXsSPLQs/PHEE0HWratLe39ODpwp2W8/+llWpwQPgGXRvHLlnsfvv4+5++7dCR4g0NhI4I03aHzwwZSOl+0WsFPdJm6og++OJnmVkY5fys8/D/KrXw2guTlANBogFhOamwMsXBjm1Vfru9iLN61aBdFo5zGceXmGNWs6Ly/XHccTxsiRWMkW0wiFiA4cuOfxv/6VdGWlQEMD8WXLUjpUtk9oTnab5HJu+J7QJK8y0vHL+uSTxSRbWS0WExYvjuUwMvtNngzhcOfuzlhMqKxM0iJOgaMJ45xzoENSNIEAVnEx0b3nlRkwAAKdU4cVDhMbMCClQ2X7hOaG6QPcSpO8ykjHL2sgyZe/TSKR+2Xt7HThhS2t9r3l51scemgTY8c6FFQmBgygcdkyYiNHtqyalJdH84EHsvmBByjbuyV//PGYcJhOp7dgkMY06uqzeUJz/CrIxfTGq8qqjRth9GiL5ub2yb6gwOLRR7cwY0YWR1C6wPPP13PhhUHWrs0nP98wd26EK6+sZdiwAZ5NMJFdu9j1zjvERZDBg5PWr9e/8AL5p51GYNcuCAQwItTcdBPFp5/e6bVuHxHqB/u68apJXmXdLbc0cemleRgDxgiBgOGCC77g6qvDvvxyt1RY1GJZccLh3pPEIjt3Ur9yJYmGBhJTplA2aFDSBO+G6pOOMfntpKNJXuXcW2/VsXhxjFjMYvbsZqqq7J+kyY9f3o689h7dNve9G0862bCvJO+ein3lKwcd1Cenq7F5YQ6RTHnxPWa7tHFX8y4eXv8wW+u3csyIYzhs6GGdbrjuixemIcg2TfLKF3rDl9eL7zGb88ys2riKr/3P10hYCaKJKOFgmGP3P5al31pKMND9ACzwxjQE2WZbdY2ILBSRd0VknYg8IiL97DqWUr3hy+vF95it0kbLWJyy+BQi0QgN8QbiJk5jvJFnPn6GO16+I+X9eGEagmyzs4RyBXCQMWYi8D5wuY3HUjbywgRkveHL68X3mK3Sxje3vMkXTV902t6YaOSuN+5KeT+9sZ7etr8OY8wTez18GTjNrmMp+2TcD/zFF3DbbbB8OQwbBhdfDIcfnvU4y8rKkt5Q89OX16vvMRsrI5nOVfm7WSb18RdtcXjp5nWmctUE+C5wf7InRGQ+MB9w7UrzvVlG/cA7dsDkyZiampal5UQwS5fSfNNNFH7/+1mNszd8ee16j16o2Jk4aCJ9wn1oiLefLqIwWMi8sektbOL0cny5llEJpYg8CQxO8tQVxpglra+5AqgC5ppuDqYllO6T0WyBV16JWbgQ6TAFrVVcTP3HH1NSXp6NEFUGvFRSuOLdFcx5eA4Jk6Ap0URRqIgpFVNYMm8J/fv1dzo8R9lWQmmMmdHNgb8NnAh8rbsEr9wpo+qIRx/tlOABEKHulVcoOeGELESoMpHJldo//gE33ww1NXDCCXDJJS3T2thl5riZvH3e29z7+r1srd/KEfsdwQnjT6C0tNS+g/qAbd01IjIb+BlwjDEm/Sn5lCtk1A/cVUs9FiPWp0/Sp7zQdeAnPa3Yue46+OUvDQ0NLTcx33nH4p574M03A9h5i2D/gftz5XFX2ncAH7KzuuZWoARYISJviEjqdU7KNTKqjvjRj7A6Li0XDBI94AAYObLTy3v7KlNO6EnFzq5dcPXVexI8QDQaYNs2uOkm76wbsKNxBwtfWMjpD57Ob1f+lpr6GqdDsoVOa+BiVtxi8+ubCYaDDJo0KK2RfW7RdNVV5F13HSYvD4nHiY0cyZa//IWyAw7odKJw2xD43iBZnzy0zCZqWVbSq6lnn4VvfMMiEuncRpwypZk1a/LtDrtb3V0RbqjdwKH/fSgNsQYa443kB/PJD+bz9LeeZur+Ux2MvGd0WgMP2vDUBh6c9yCJ5gTGGPL65jHn/jmMOXqM06GlpeCaa4h873s0PP880bIyzPjxXXbBeHGwj9d1rNgREYwxWFZLWWKyktlBgyDZr0TEMGhQDHA2yadS9nvR8ouobardXX7ZnGgmmohywfILWHHWCl91Eep88i4U2Rzhvm/cR9OOJmL1MeINcRo2N/DgiQ+yY8sOp8NLW8nw4Qw680yGz569z3nDvTjYxw/2ntc9mGR91o4LbB9wAIweHSMYbN/6LygwfO976S97mG2pLBL+5IYnO9XXGwyrtqxi+47tOYkzVzTJu9C6v67DSrLAhpWwWPfAOgciyo1cjkbctg2uuAKmTIETT4Rnnsn6ITwp1aupRx6JMWFCMwUFFn36JCgutrjqqu3MnFmcizD3KZX3kB9KfrURCoRIxBO2xOUUbSI5LFnfYf2WeqzmJEk+ZlFf4691UveWqwFNNTUwaRJs326IRoXXXzc8/bTht7+NcvHFSdY47UVSLZkdM6YPzz0XYe3a7WzbZhg/3mLIEHdUQqXyHs6ddC6L1iyiObHnRnFeII+TRp5EOBzOSZy5okneAXsn9r219R0OPmIwoaIQ8Yb2z0tQGHrU0FyGmnO5GI144417EnwLobFRuOKKPL71rQgDBzqfqJySTslsSUkJ06a577NK5T1cO+Na3tj8Bqs3rSYgASxjMa5sHL845BeunyYiXZrkc6yraoY2xhj6Tu3L4CmD2bxmM4nGlkvHUFGI4TOGM/rI0bkM15eWL2evBL9HIGB45ZV6Tjop/cTll/p+P0wPkcp7KAoX8dx3n+P5D59ndfVqRvQZwcEDD6Z///6eeq+p0CSfY8luCnWUsBJ8+6lv89J/vcS6e9chIWHcmeOY+p2pvvsDdMKQIbAuya2NWEzo2zfJCN1ueHExj33xw9wuqb6HaaOnMW30tBxE5BxN8jmWSjlgKBQimBdk2sXTmHaxv/8AnXDJJfDccxaNjXvqDkIhw0EHNTNiRPr78+JiHqr30OqaHOuuHNALU8d63axZcM01UQoLWypDCgosJk5s4o47tvbos9f6/j28sPZAb6Mt+RxLdlOojRf7P73qpz8t4JxzIrz0Uj1lZVFGjYKysp71x2ZziTsv81u3lV/0rr9CF/DDjS2/GDy4hDlzMv/cvbqYR7Zpt5U7aZJ3gB9ubKk9vHDiNsZgGYvGhiBLlkBtLXz1qzB+fPaOod1W7qRJXqkscOuJO2El+PmKn3PrmluJxCLIji+R99Qt8MEsROCMM+LcdVce2Zj7Trut3ElvvCrlYwuWLeD3q35PJNZyA9T0f5/mk+fSXPEKTU0BHnggxOLFjVk5Vm9cJNsLNMkr5VN10Tr+vPbPNCY6JPFwI3zlGgAaGgLceWd2jpfR2gPKNnodpZRPbYpsIiBJ2nECVKzf/bC5OXtrSri126o305a8Uj41rHRY8tHVlsDnkwAoLLQ47TRdndPPNMkr5VNF4SIumHoBhcH2SzASL4Rnr6KoyOLQQ5s491znV3JS9rG9u0ZEfgzcAFQYY7bZfTyl1B7Xz76e8qJybnrlJmqbaxnd5wAO3vI7io4fw7HHbuPEEwvp29c73SvVO6vZHNnM+IrxlOR7J24n2brGq4gMB+4ExgFTu0vyusarUiqZnU07mXPfHF7c+CLhQJi4Fednh/+Ma2Zc43RoruDkGq83Az8Dlth8HJUDfplOV3nPmQ+eyQufvUDUiu5e6GPhywsZWzaWs6ee7XB07mZbn7yInAxsNMastesYKnfa5iVpG+zSNi+JTkCl7LatYRtPffwUUav9NNCNiUZufPnGnMfjtUnYMmrJi8iTwOAkT10B/D9gVgr7mA/MB6isrMwkHGUjnZdEOWVH4w5CgVCnJA9Q01iT01i8OAlbRkneGDMj2XYRmQCMBNa2joAbBrwmIocaYz7vsI9FwCJo6ZPPJB5lH52XRDllVNkowoHO664GJci0Ibldb8GLjR1bumuMMW8aYwYaY0YYY0YAnwFTOiZ45R1dzT+i85Iou4UCIa4+9Op2paDhQJiScAk/nPjDnMbixcaO776hHW8OFhYW0tjYqDcLM6TT6SonzR07l/2K92PR24vYWLeRIwYfwXkHnsfQ0twubO/FSdhyEllra952NTU17W6CxOPxTo/d3n/mVl6YTlfl1qc7P+XC5Rey/IPlhAIhzjjwDG487kb6FfTL+rHKyso4JHEIVQP3VAk60cjwYmPHvaefNEUikZTucru9/8zNsjUviZZiel9dtI5D7zyUmvoaEiZBNBHl3nX3smrjKtaev7bTbJSZcksjwy1xpMM3Sb62tjbl17q5/8zvvFidoDr7+5t/Z1fTLhImsXtbzIrxYe2HLH9nOcePPz7rx2xrZOzaBS++CH37wmGHQSDHk7N4bRI238xdk07idnP/md/tqzpBecfaz9fSEO88sVnCJHjt09dsO+7tt8OgQYZ58yxmzrQYPjzO6tX1th3PD3yT5FNN3G7vP/M7L1YnqM4mDJpAYaiw0/agBBnRZ4Qtx3z1Vfjxjw1NTUIkEqC+PsDmzUFOPDGfnTvdPSDJSb5J8slWpQHIz8/XRQxcREsx/eGsCWdRFCpqN199OBBmeJ/hHD3saFuOefvt0NTUfpsxQl1dgCee0OmSu+KbJJ9sVZqKigqGDh1KZWUlo0aNorKyUhO8w3SJOH8oyS/h2bOe5eghRxOUIHmBPI7f/3gWH7eY/v3723LMmpqWpN6RiKG2VsdRdsVXzSev3RDpjbxYnaCSO2jYQTx6xqPs2LGDeDxOOBy29Xc5dy48/bRFY2P7tmksJhx+uHb3dcVXSV5ll12ljnoy9o9c/i7POgv++EfDe++1JHoRQ0GBYcGCWkaO7JuTGLxIk7xKSksdldvk58NLLwVZtKiJhx4y9O2b4D/+o57Zs4v0b3IfNMmrpLw4EZPyv4ICuOiiAi66qG1Lnx7tpzcNyNMkr5LSUkflV73tKtU31TUqu7TUUflVbxuQp0leJaWljsqvettVqjbLVFJa6qiyzS394F6cLjgT/nxXKiu01FFli5v6wb04XXAmtLtGKWU7N/WDJxsd7+fpTrQlr5Syndv6wXvTVaq25JVSttNqLedokldK2U6rtZxja5IXkQtF5F0ReVtErrfzWEop9+pt/eBuYtu1kogcC5wMTDLGNIvIQLuOpZRyv97UD+4mdnaInQ9ca4xpBjDGbLXxWEqpLrilPl05w84k/yXgaBH5DdAE/MQYs6rji0RkPjAfoLKy0sZwlOp93FSf7mVePlFmlORF5ElgcJKnrmjdd3/gcOAQ4AERGWU6FMsaYxYBiwCqqqp0eRelskhnE82c10+UGSV5Y8yMrp4TkfOB/21N6q+KiAWUAzWZHFMplTq31ad7kddPlHZW1/wfcCyAiHwJyAO22Xg8pVQHWp+eOa+fKO1M8ncBo0TkLWAxcG7HrhqllL20Pj1zXj9R2halMSYKnG3X/pVSe3R1Y1BnE82c1yc088apSCnVpe5uDGp9ema8fqLUJK+Ux3n9xqAXePlEqXPXKOVxXr8xqOylSV4pj/P6jUFlL03ySnmcVtCofdFTvVIe5/Ubg8pemuSV8gEv3xhU9tLuGuU7zfFmrnz6SgbdMIjS35Vy+oOnU72z2umwlHKEtuSV75z6wKk89dFTNMWbAHho/UM8/dHTvH/h+5QVdt9P7eUZB5XqSFvyylfW16zn6Y+e3p3gASws6qJ1/NfL/9Xtz7cNLGorP2wbWBSJRGyL2c8ikQjV1dVs2LCB6upq/RwdoC155SvrtqwjIJ3bLk2JJp7/5Pndj3c27eT/3v0/6mP1zB4zm1FlowAdWJSpva+CRKTdZ+m1KXr9QpO88pXRZaM7JWmA/EA+Y0vHArDiwxWccv8pYMAyFpc8fgkXTr2QhV9fqAOLMtBxeoVkvwc9YeaedtcoX6nar4ox/cYQDoTbbQ8FQpwz/hwaYg3MvX8uDbEGGuINNCWaaE40c9ua23ji3Sd0YFEGkl0FJaMnzNzSJK98RUR4dN6jzBo+i3AgTFCCHNT/IBYft5hxQ8ex4sMVSX+uKdHEXa/dpQOLMpBq8tYTZm7pp618p7KikvtOvY8t27bQHGumpKBkd4VMzIol70bA0Bxv7vHAIq9U5MQSMX75r19yx+o7qIvVccz+x/D72b9nXPm4jPcdCoW6TfR6wsw9TfLKl7oaHDRj1AwSJtFpe1GoiFPGnLLPn+2Kl9YAPfuRs1n63tLd1UdPfPgEh/33Yay/YD1DS4dmtO9k864DBAIBLMty9cnPz7S7RvUq/Qr6ccvMW8gP5hMOhBGEwlAhM4bPYM5Bc3q0z31V5KRrS90WXt34KrWN6f9sdz754hOWvLukXXmpwdAUb+KGlTdkvP+SkhLKy8t3d8eEQiEqKioYMWIEo0aNorKyUhO8A7Qlr3qd+YfN57D9DuOe1+4hEo0wa/9ZHDfuOEpLS3u0v2xU5DTHmzn7obNZ+u+l5AfziVpRvjvxu9x60q1JS0J7Yn3NevICeTQnmtttj1pRXvnslawcQ6dXcB/bkryITAbuAAqAOPADY8yrdh1PqXRMGj6Jm4bflJV9ddUXnc4NxgXLFvDYB48RtaJErSgAd6+7m8rSSi77ymVZiXPsgLG79723sIT5cr8vZ+UYyn3s7K65HrjGGDMZ+EXrY6V8J9OKnISV4J4376Ep0dRue2OikT+s+kPW4hzTfwyHDz6c/GB+u+3hYJj/POg/s3Yc5S52JnkDtF3/9gU22XgspRyTrC+6vLw85W6L5kQzMSuW9Lkvmr/IWpwA98+9n7mj5pIfzEcQDup/EH+f9XcmVU7K6nGUe0gqgxd6tGORA4DHAaHlZHKkMeaTJK+bD8wHqKysnPrJJ51eopTvjbp5FB/t+qjT9mlDprFy/sqsHisSibB9x3aaY80U5hVqxYsPiMgaY0xVsucyasmLyJMi8laSfycD5wM/MsYMB34E/DnZPowxi4wxVcaYqoqKikzCUcqzbpl1CwXBAgKtX8mgBCkKFXH9jOz3chYVF/HU9qc499lzOfWfp3LPO/cQTXTuq1f+YGdLfifQzxhjpKXDcqcxZp/lC1VVVWb16tW2xKOU273w4Qtc+/y1vP/F+0yumMzl0y5ncuXkrB9n7v1zefyDx2mINwBQECxgUvkk/jrzr+SF87Rl70H7asnbWUK5CTgGeBb4KvBvG4+llGP+vf3fbIxsZOKgifQv7N/j/Rw1+igeHf1oFiPrbNXGVe0SPLRM6fDm9jd5fvPzTN9vumsHcqmesTPJnwfcIiIhoInWfnel/GJH4w5O+OsJvLHlDcKBMFEryoVTL+T62dd3qrZxi5XVK4lbncs9G+INvPL5K0zfb7rOFOkztiV5Y8zzwFS79q+U0+bdP481n68hZsV2lz/e9tptjC8fz3cO+Y7D0SU3sHggoUCoU718QbCAisI998R0pkj/0GkNlOqBmvoaVn66slPpY2O8kZtfudmhqLo3Z9wcQoHObbuABDhp5Em7H+tMkf6hSV6pHvii6YukyRJgR9OOHEeTuuK8YpbNW8bQ4qEUhYooChVRUVDBXV+9iwEFAwCdKdJv9HStVA+MKhtFfjC/3Q1MgJCEOHb4sQ5FlZppo6exfv56Xv3oVZpjzYzrP45gIKgzRfqUJnmleiAYCPKHmX/gvOXn0ZxoxmDIC+RRklfCz6f/3OnwulVaWsqMSTOcDkPlgCZ55Wp2L8aRyf7Pnno2lX0rufHFG6mOVDNtv2lccuQljBw0MmvxKZUpTfLKtexejCMb+58+ZjrTx0zPOBal7KI3XpVrZXMxDif2r5QbaJJXrpWNxTic3L9SbqBJXrlWV7Xa2arhtnv/SrmBJnnlWpkuxuH0/pVyA22yKNdqu/lpV3WN3ftXyg00yStXs3thaF14WvmdJnmlPMDu8QLKvzTJK+Vydo8XUP6mN16Vcjmt51eZ0CSvlMtpPb/KhHbXKNUq3X7vXPWTh0KhpAld6/lVKrQlrxR7+r3bkmlbv3ckEsnK6zOh9fwqExkleRH5poi8LSKWiFR1eO5yEflARN4TkeMyC1Mpe6Xb753LfvKSkhLKy8t3t9xDoRDl5eV601WlJNPrvbeAucCf9t4oIuOBM4ADgf2AJ0XkS8aYRIbHUyol6XalpNvvnet+cq3nVz2VUUveGPOOMea9JE+dDCw2xjQbYz4CPgAOzeRYSqWqJ10p6c5jo/PeKK+wq09+KPDpXo8/a92mlO160pWSbr+39pMrr+i22SEiTwKDkzx1hTFmSaYBiMh8YOVLr/4AAA0OSURBVD5AZWVlprtTqkddKenOY6Pz3iiv6DbJG2N6shDkRmD4Xo+HtW5Ltv9FwCKAqqoqk+w1SqWjpyWH6fZ7az+58gK7OhCXAn8XkZtoufE6FnjVpmMp1U5ZWVm7aQDAvV0pvWlOmk2rN7Hq9lXUb6nnyyd/mUnnTCJUoPcw7JbRJywic4A/AhXAMhF5wxhznDHmbRF5AFgPxIELtLJG5YpXulJ605w0r/35NZZfuJx4cxws+Ojpj3jlj69w3svnES4KOx2er0nHG1ROqqqqMqtXr3Y6DKVyorq6ustuJTfdn8r0aiNaH2XhwIXEG9q/12BhkGN+dQxH//jobIfc64jIGmNMVbLndMSrUg7xwpw02RjZu/GVjUhQOm1PNCZY/+D6rMWqktMk73exTVBzFWycBztuhsROpyNSrbxQa5+Nkb35pfmYRPIeg3Bf7aqxm3v+mlT2Na3BfPIVjIkSIIoVeQy2XUdg5BoI9+5hC9m84dnTfXnhBnE2rjaGTB1CYXkhkU8jsFeuDxYGGX/u+ExDVN3QlryPJT77NmLqCBAFIEAjYm0jtumnDkfmrGxOLpbJvrwwJ002rjZEhNMeOY3iIcWEikOE+4QJ5AWY+MOJHHjSgdkKVXVBW/J+ldhFIP5up81CgkDjPxwIyD321QWRboLNdF9ur7XP1tVG5ZRK5q+fz3tPvkfD9gaGHDqEIaOHuPq9+4Umeb+SMND5ZheAoSCtXfmtljubNzztunnqls88m+WopX1LOeTUQ7IdouqGJnm/ChTSGDiWQusZhNjuzRYF1IW+Rb8Ud+O1Wu5UkmM2F+GwY0EPt33mbr/aUPumffI+lhj4J6IyBosiEhRjUUBj4EiCA69IeR9eWl801f7xbE4uZsdEZV76zJX7aUvex0r6jSASeJna7U8j8Y9JhA+iZMBhabXKvFDL3SbV/vFsdkHYMbrWS5+5cj9N8j5XUlpKSekpPf55L60vmk5yzGYXRMd9JaIJtqzbQmH/QkqHlaa9Py995sr99K9G7VM2a7ntvpnohuT4xv+8wfIfLsdYhkQ8QcXkCubcN4dBIwelvA8v1M8r79A+ebVP2arlzsXC104v5PHpi5+y7PvLiEaixOpjWM0WW9ds5aG5D6X1Pr1QP6+8Q1vyqlvZ6NrIZm16V5yeffKlm18i3tT+SsLEDbXv1vLxmo+Z8JUJKe9LK1pUtmiSVzmRq5uJTibHXZ/uajdsv00gHKDu87rcB6QUmuRVjqTTX+6WgUDpGj1rNJtf34wVtdptt6IWFRMqHIpK9XbaJ69yItX+8kz77iORCNXV1WzYsIHq6uqs9vl357AFh1HYv5BAeM/XKlgYZOKFExlUmfqNV6WySVvyKidS7S/PpO/e6ZGiRQOKOH/t+Tzz62f4YPkH5PfPZ8L8CUyaN8kTVyLKnzTJq5xJpb88k777XNzc7U7xwGJO/MOJOTlWJra+vZXnfvkcm1ZvYsCXBjD959MZfuRwp8NSNtAkr1wlk1p3HSmams2vb+YvR/+FeGMcYxlqN9Ty0bMfcdI9JzFp3iSnw1NZllGfvIh8U0TeFhFLRKr22j5TRNaIyJut//1q5qGq3iCTWncvrLTkBit+soJYfQxj7bnqSTQlWHHJCnbt2uVgZMoOmd54fQuYCzzXYfs24CRjzATgXODeDI+jeolMBgI5PRjKKza+ujHp9oYtDdR8VpPjaJTdMmriGGPeATp9sYwxr+/18G2gUETyjTHNmRxP+U9X5ZJumSzMj4oHFhOti3baHggFMKHka7Eq78rFdeypwGtdJXgRmQ/MB6isrMxBOMoOPaltt6MaRkeKdu+onx3FP3/0T+KNe+5VBAuCjD1zLHkFeQ5GpuzQbZIXkSeBwUmeusIYs6Sbnz0QuA6Y1dVrjDGLgEUAVVVV2ozwoJ4mazdUw7Tx6gCsnpgyfwrbPtrGqltWIUHBilmM/MZIqi6r0q4tH+o2yRtjZvRkxyIyDHgE+A9jzIc92Yfyhp4ma7dUwzhdX59rIsJx1x7HIRcfwqdvfUp+eT5F/Yt8fWLrzWzprhGRfsAy4DJjzAt2HEO5R0+TdU/KJY0xvLX4LV664SUatjcw5rgxTP/FdEqHpj9vexs3XVHkUv/B/ek/uL/TYSibZVpCOUdEPgOOAJaJyOOtT/0QGAP8QkTeaP03MMNYlUv1tHSxJ9Uwz/z8GZZ+bymbX9vMzk928tpdr3HHpDuo29LzCcDcckWhlB0ySvLGmEeMMcOMMfnGmEHGmONat//aGFNsjJm817+t2QlZuU1PSxfTLZdsrG3kxRtfJN6wJ/mauKE50sxz13es4k2d1tcrP9O/YpWxTEoX06mG2frmVgLhAImmRLvtVtTio6c/Sj/wVroSk/IzTfIqK3JRuliyXwlWzOr8hEDx0OKe71fr65WPaZJXntF/TH/KJ5VT81pNu2QfzA8y8fsTM9q31tcrv9L55JWnnPrAqQw5agiBvADBwiD5/fOZdtM0xh4z1unQlHIlbcmrrMjVYKKKygpOX3o6mzdspnFHI8XDigkEA9TW1gL+rGtXKhOa5FWP7Z3Y92b3YKKSkhIYBdtKe88AJqV6SrtrVI90XKavo7bBRHbZ1wAmpdQe2pJXnaTS9ZIsyXZk52AiHcCkVGq0Ja/aSXUh7VSSqZ2DiXQAk1Kp0SSv2km1G6S7ZGr3YCJdIESp1GiSV+2k2g2SLMm2SWc1p57KZAUppXoTvbZV7aQ6M6QbRonqACaluqdJXrWTzjwuqSTZ3rQYh1JupEletdOuhR5rokT+RWneJ+SbCWCdAoH8Ln+2Y0IvLCykrq5Oa9mVcpAmedVJSUkJJUVR+HgGJrYZovVYkSIsuZimQc/Qp9+4Tj+TbHWljhU50DsW41DKTfTGq0pu608wsY8RU4dgCFBP0GwlsPX8pMk7lbr5NlrLrlTuaJJXyUUeRoi12yRYFForqd2xrdPL00ncWsuuVO5okldpS3dd1r1pLbtSuZXpGq/fFJG3RcQSkaokz1eKSJ2I/CST4ygHlJyGIdxukyFIg0wnFO5887WrwUklJSVay66UgzK9bn4LmAv8qYvnbwKWZ3gM5YSBC7HqVyLxzQgNGIqwKGF7+DddllOCrq6klNtklOSNMe8ASUc+isgpwEdAfSbHUA4JDiA4+h0aax6kObKGZmt/mvO+Tln/QV0mbh2cpJT72HIHTET6AJcCM4F9dtWIyHxgPkBlZaUd4aiekhCFA8+kcOCZTkeilOqhbvvkReRJEXkryb+T9/FjVwM3G2Pqutu/MWaRMabKGFNVUVGRRuhKKaW6021L3hgzowf7PQw4TUSuB/oBlog0GWNu7cG+lFJK9ZAt3TXGmKPb/l9ErgbqNMErpVTuZVpCOUdEPgOOAJaJyOPZCUsppVQ2ZFpd8wjwSDevuTqTYyillOo5HfGqlFI+pkleKaV8TJO8Ukr5mKQ6PWwuiEgN8EkaP1IOdJ4SsXfSz6KFfg576GfRojd8DvsbY5IONHJVkk+XiKw2xnSaGK030s+ihX4Oe+hn0aK3fw7aXaOUUj6mSV4ppXzM60l+kdMBuIh+Fi30c9hDP4sWvfpz8HSfvFJKqX3zekteKaXUPmiSV0opH/N8kheRhSLyroisE5FHRKSf0zHlkojMFpH3ROQDEbnM6XicIiLDReQZEVnfuu7wAqdjcpKIBEXkdRF5zOlYnCQi/UTkodYc8Y6IHOF0TLnm+SQPrAAOMsZMBN4HLnc4npwRkSBwG/B1YDxwpoiMdzYqx8SBHxtjxgOHAxf04s8CYAHwjtNBuMAtwD+NMeOASfTCz8TzSd4Y84QxJt768GVgmJPx5NihwAfGmA3GmCiwGNjXil2+ZYzZbIx5rfX/I7R8mYc6G5UzRGQYcAJwp9OxOElE+gLTgT8DGGOixpgvnI0q9zyf5Dv4LrDc6SByaCjw6V6PP6OXJra9icgI4GDgFWcjcczvgZ8BltOBOGwkUAP8pbXr6k4RKXY6qFzzRJJPZZ1ZEbmClkv2vzkXqXJa6yLyDwMXG2N2OR1PronIicBWY8wap2NxgRAwBbjdGHMwUA/0uvtWtiz/l23drTMrIt8GTgS+ZnpX4f9GYPhej4e1buuVRCRMS4L/mzHmf52OxyFHAd8QkeOBAqBURP5qjDnb4bic8BnwmTGm7YruIXphkvdES35fRGQ2LZem3zDGNDgdT46tAsaKyEgRyQPOAJY6HJMjRERo6Xt9xxhzk9PxOMUYc7kxZpgxZgQtfw9P99IEjzHmc+BTEfly66avAesdDMkRnmjJd+NWIB9Y0fI952VjzPedDSk3jDFxEfkh8DgQBO4yxrztcFhOOQo4B3hTRN5o3fb/jDH/cDAm5bwLgb+1NoI2AN9xOJ6c02kNlFLKxzzfXaOUUqprmuSVUsrHNMkrpZSPaZJXSikf0ySvlFI+pkleKaV8TJO8Ukr52P8HcxO/1JMyhKgAAAAASUVORK5CYII=\n", "text/plain": [ - "
" + "
" ] }, - "metadata": {}, + "metadata": { + "needs_background": "light" + }, "output_type": "display_data" } ], "source": [ - "games = [\n", - " \"battlefield3\",\n", - " \"Diablo\",\n", - " \"DotA2\",\n", - " \"Guildwars2\",\n", - " \"leagueoflegends\",\n", - " \"magicTCG\",\n", - " \"Minecraft\",\n", - " \"pokemon\",\n", - " \"skyrim\",\n", - " \"starcraft\",\n", - " \"tf2\",\n", - " \"wow\",\n", - "]\n", + "games = [\"battlefield3\", \"Diablo\", \"DotA2\", \"Guildwars2\", \"leagueoflegends\", \"magicTCG\", \"Minecraft\", \"pokemon\", \"skyrim\", \"starcraft\", \"tf2\", \"wow\"]\n", "trade = [\"Dota2Trade\", \"pokemontrades\", \"SteamGameSwap\", \"tf2trade\", \"Random_Acts_Of_Amazon\"]\n", "sports = [\"baseball\", \"CFB\", \"hockey\", \"MMA\", \"nba\", \"nfl\", \"soccer\"]\n", "\n", - "link_aggregators = [\n", - " \"AskReddit\",\n", - " \"WTF\",\n", - " \"pics\",\n", - " \"gifs\",\n", - " \"aww\",\n", - " \"funny\",\n", - " \"todayilearned\",\n", - " \"AdviceAnimals\",\n", - "]\n", + "link_aggregators = [\"AskReddit\", \"WTF\", \"pics\", \"gifs\", \"aww\", \"funny\", \"todayilearned\",\n", + " \"AdviceAnimals\"]\n", "relationships = [\"AskMen\", \"AskWomen\", \"relationships\", \"relationship_advice\", \"OkCupid\"]\n", "\n", "plt.scatter(tsne_df[0].values, tsne_df[1].values, color=\"#dddddd\")\n", - "plt.scatter(\n", - " tsne_df[0].values,\n", - " tsne_df[1].values,\n", - " color=[\n", - " \"green\"\n", - " if l in games\n", - " else \"gold\"\n", - " if l in trade\n", - " else \"purple\"\n", - " if l in relationships\n", - " else \"red\"\n", - " if l in link_aggregators\n", - " else \"blue\"\n", - " if l in sports\n", - " else \"#00000000\"\n", - " for l in tsne_df.index\n", - " ],\n", - ")\n", + "plt.scatter(tsne_df[0].values, tsne_df[1].values, color=[\n", + " \"green\" if l in games else\n", + " \"gold\" if l in trade else\n", + " \"purple\" if l in relationships else\n", + " \"red\" if l in link_aggregators else\n", + " \"blue\" if l in sports else\n", + " \"#00000000\"\n", + " for l in tsne_df.index])\n", "plt.show()" ] }, @@ -5938,40 +19925,41 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 70, "metadata": { "scrolled": true }, "outputs": [], "source": [ - "dists = pairwise_distances(subreddit_df.values, metric=\"cosine\")\n", + "dists = pairwise_distances(subreddit_df.values, metric='cosine')\n", "flat_dists = np.ravel(dists)\n", "idx1, idx2 = np.unravel_index(np.arange(len(flat_dists)), dists.shape)\n", - "pairwise_dist_df = pd.DataFrame.from_dict(\n", - " {\"p1\": subreddit_df.index[idx1], \"p2\": subreddit_df.index[idx2], \"dist\": flat_dists},\n", - " orient=\"columns\",\n", - ")" + "pairwise_dist_df = pd.DataFrame.from_dict({'p1': subreddit_df.index[idx1],\n", + " 'p2': subreddit_df.index[idx2],\n", + " 'dist': flat_dists},\n", + " orient='columns')" ] }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 71, "metadata": {}, "outputs": [], "source": [ - "def print_nearest_neighbors(pairwise_dist_df, test_subreddits=[], top_N=10):\n", + "def print_nearest_neighbors(pairwise_dist_df, \n", + " test_subreddits=[],\n", + " top_N=10):\n", " for subreddit in test_subreddits:\n", - " subset_df = pairwise_dist_df[\n", - " (pairwise_dist_df.p1 == subreddit) & (pairwise_dist_df.p2 != subreddit)\n", - " ]\n", + " subset_df = pairwise_dist_df[(pairwise_dist_df.p1 == subreddit)\n", + " & (pairwise_dist_df.p2 != subreddit)]\n", " print(subreddit)\n", - " print(subset_df.sort_values(\"dist\")[[\"p2\", \"dist\"]].head(top_N))\n", + " print(subset_df.sort_values('dist')[['p2', 'dist']].head(top_N))\n", " print()" ] }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 72, "metadata": { "scrolled": true }, @@ -6114,21 +20102,10 @@ } ], "source": [ - "print_nearest_neighbors(\n", - " pairwise_dist_df,\n", - " [\n", - " \"apple\",\n", - " \"politics\",\n", - " \"leagueoflegends\",\n", - " \"AskWomen\",\n", - " \"Music\",\n", - " \"pics\",\n", - " \"australia\",\n", - " \"Random_Acts_Of_Amazon\",\n", - " \"Bitcoin\",\n", - " \"MensRights\",\n", - " ],\n", - ")" + "print_nearest_neighbors(pairwise_dist_df, ['apple', 'politics', 'leagueoflegends',\n", + " 'AskWomen', 'Music', 'pics',\n", + " 'australia', 'Random_Acts_Of_Amazon',\n", + " 'Bitcoin', 'MensRights'])" ] }, { @@ -6140,7 +20117,7 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 73, "metadata": {}, "outputs": [], "source": [ @@ -6149,7 +20126,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 74, "metadata": {}, "outputs": [], "source": [ @@ -6172,7 +20149,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 75, "metadata": {}, "outputs": [ { @@ -6208,7 +20185,7 @@ " \n", " \n", " \n", - " e5q0p88\n", + " e60kzm8\n", " -0.963049\n", " 0.069720\n", " 0.012004\n", @@ -6216,10 +20193,10 @@ " -0.253023\n", " -0.035112\n", " 0.045334\n", - " Christianity\n", + " MensRights\n", " \n", " \n", - " e5d4yag\n", + " e5q0p88\n", " -0.963049\n", " 0.069720\n", " 0.012004\n", @@ -6230,7 +20207,7 @@ " Christianity\n", " \n", " \n", - " e60kzm8\n", + " e5d4yag\n", " -0.963049\n", " 0.069720\n", " 0.012004\n", @@ -6238,7 +20215,7 @@ " -0.253023\n", " -0.035112\n", " 0.045334\n", - " MensRights\n", + " Christianity\n", " \n", " \n", " e5m9w22\n", @@ -6263,7 +20240,7 @@ " Music\n", " \n", " \n", - " e6r4v8m\n", + " e68nlvd\n", " -0.935494\n", " 0.050881\n", " 0.060026\n", @@ -6271,10 +20248,10 @@ " -0.250417\n", " -0.055384\n", " 0.220780\n", - " POLITIC\n", + " Christianity\n", " \n", " \n", - " e5o4hip\n", + " e6r4v8m\n", " -0.935494\n", " 0.050881\n", " 0.060026\n", @@ -6282,10 +20259,10 @@ " -0.250417\n", " -0.055384\n", " 0.220780\n", - " gonewild\n", + " POLITIC\n", " \n", " \n", - " e68nlvd\n", + " e5ojhdr\n", " -0.935494\n", " 0.050881\n", " 0.060026\n", @@ -6293,10 +20270,10 @@ " -0.250417\n", " -0.055384\n", " 0.220780\n", - " Christianity\n", + " AskMen\n", " \n", " \n", - " e5ojhdr\n", + " e5o4hip\n", " -0.935494\n", " 0.050881\n", " 0.060026\n", @@ -6304,7 +20281,7 @@ " -0.250417\n", " -0.055384\n", " 0.220780\n", - " AskMen\n", + " gonewild\n", " \n", " \n", " e6jkkjw\n", @@ -6323,27 +20300,27 @@ ], "text/plain": [ " 0 1 2 3 4 5 6 \\\n", + "e60kzm8 -0.963049 0.069720 0.012004 0.014941 -0.253023 -0.035112 0.045334 \n", "e5q0p88 -0.963049 0.069720 0.012004 0.014941 -0.253023 -0.035112 0.045334 \n", "e5d4yag -0.963049 0.069720 0.012004 0.014941 -0.253023 -0.035112 0.045334 \n", - "e60kzm8 -0.963049 0.069720 0.012004 0.014941 -0.253023 -0.035112 0.045334 \n", "e5m9w22 -0.963049 0.069720 0.012004 0.014941 -0.253023 -0.035112 0.045334 \n", "e59g1nk -0.956579 0.050546 -0.028281 0.006982 -0.281819 -0.045649 -0.006813 \n", - "e6r4v8m -0.935494 0.050881 0.060026 0.064334 -0.250417 -0.055384 0.220780 \n", - "e5o4hip -0.935494 0.050881 0.060026 0.064334 -0.250417 -0.055384 0.220780 \n", "e68nlvd -0.935494 0.050881 0.060026 0.064334 -0.250417 -0.055384 0.220780 \n", + "e6r4v8m -0.935494 0.050881 0.060026 0.064334 -0.250417 -0.055384 0.220780 \n", "e5ojhdr -0.935494 0.050881 0.060026 0.064334 -0.250417 -0.055384 0.220780 \n", + "e5o4hip -0.935494 0.050881 0.060026 0.064334 -0.250417 -0.055384 0.220780 \n", "e6jkkjw -0.923477 0.027897 0.067691 0.035644 -0.276119 -0.062107 0.245887 \n", "\n", " subreddit \n", + "e60kzm8 MensRights \n", "e5q0p88 Christianity \n", "e5d4yag Christianity \n", - "e60kzm8 MensRights \n", "e5m9w22 atheism \n", "e59g1nk Music \n", - "e6r4v8m POLITIC \n", - "e5o4hip gonewild \n", "e68nlvd Christianity \n", + "e6r4v8m POLITIC \n", "e5ojhdr AskMen \n", + "e5o4hip gonewild \n", "e6jkkjw POLITIC " ] }, @@ -6422,7 +20399,7 @@ " -0.129918\n", " \n", " \n", - " prop-nonzero[indegree over c->c mid-thread responses]\n", + " prop-nonzero[indegree over C->c responses]\n", " -0.785073\n", " -0.120033\n", " -0.107805\n", @@ -6452,7 +20429,7 @@ " 0.042901\n", " \n", " \n", - " prop-nonzero[indegree over C->c responses]\n", + " prop-nonzero[indegree over c->c mid-thread responses]\n", " -0.785073\n", " -0.120033\n", " -0.107805\n", @@ -6491,10 +20468,10 @@ "prop-nonzero[indegree over C->C responses] -0.945359 -0.101535 \n", "count[reciprocity motif over mid-thread] -0.809754 0.105072 \n", "count[reciprocity motif] -0.809152 0.173066 \n", - "prop-nonzero[indegree over c->c mid-thread resp... -0.785073 -0.120033 \n", + "prop-nonzero[indegree over C->c responses] -0.785073 -0.120033 \n", "prop-nonzero[indegree over C->c mid-thread resp... -0.785073 -0.120033 \n", "prop-nonzero[indegree over c->c responses] -0.785073 -0.120033 \n", - "prop-nonzero[indegree over C->c responses] -0.785073 -0.120033 \n", + "prop-nonzero[indegree over c->c mid-thread resp... -0.785073 -0.120033 \n", "entropy[indegree over C->c responses] -0.752376 -0.339732 \n", "is-present[reciprocity motif over mid-thread] -0.746503 -0.388562 \n", "\n", @@ -6503,10 +20480,10 @@ "prop-nonzero[indegree over C->C responses] 0.238357 -0.078078 \n", "count[reciprocity motif over mid-thread] -0.429701 0.326016 \n", "count[reciprocity motif] -0.206560 0.449086 \n", - "prop-nonzero[indegree over c->c mid-thread resp... -0.107805 -0.327275 \n", + "prop-nonzero[indegree over C->c responses] -0.107805 -0.327275 \n", "prop-nonzero[indegree over C->c mid-thread resp... -0.107805 -0.327275 \n", "prop-nonzero[indegree over c->c responses] -0.107805 -0.327275 \n", - "prop-nonzero[indegree over C->c responses] -0.107805 -0.327275 \n", + "prop-nonzero[indegree over c->c mid-thread resp... -0.107805 -0.327275 \n", "entropy[indegree over C->c responses] -0.136839 -0.383152 \n", "is-present[reciprocity motif over mid-thread] 0.299804 0.194661 \n", "\n", @@ -6515,10 +20492,10 @@ "prop-nonzero[indegree over C->C responses] -0.107471 0.146344 \n", "count[reciprocity motif over mid-thread] 0.036748 -0.019626 \n", "count[reciprocity motif] 0.224681 -0.060130 \n", - "prop-nonzero[indegree over c->c mid-thread resp... -0.495253 0.058358 \n", + "prop-nonzero[indegree over C->c responses] -0.495253 0.058358 \n", "prop-nonzero[indegree over C->c mid-thread resp... -0.495253 0.058358 \n", "prop-nonzero[indegree over c->c responses] -0.495253 0.058358 \n", - "prop-nonzero[indegree over C->c responses] -0.495253 0.058358 \n", + "prop-nonzero[indegree over c->c mid-thread resp... -0.495253 0.058358 \n", "entropy[indegree over C->c responses] -0.323822 0.136627 \n", "is-present[reciprocity motif over mid-thread] 0.101641 -0.001041 \n", "\n", @@ -6527,10 +20504,10 @@ "prop-nonzero[indegree over C->C responses] -0.010486 \n", "count[reciprocity motif over mid-thread] -0.201477 \n", "count[reciprocity motif] -0.129918 \n", - "prop-nonzero[indegree over c->c mid-thread resp... 0.042901 \n", + "prop-nonzero[indegree over C->c responses] 0.042901 \n", "prop-nonzero[indegree over C->c mid-thread resp... 0.042901 \n", "prop-nonzero[indegree over c->c responses] 0.042901 \n", - "prop-nonzero[indegree over C->c responses] 0.042901 \n", + "prop-nonzero[indegree over c->c mid-thread resp... 0.042901 \n", "entropy[indegree over C->c responses] 0.171623 \n", "is-present[reciprocity motif over mid-thread] -0.391977 " ] @@ -6750,7 +20727,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.7.6" } }, "nbformat": 4, diff --git a/examples/hyperconvo/predictive_tasks.ipynb b/examples/hyperconvo/predictive_tasks.ipynb index 6e47e659..28259133 100644 --- a/examples/hyperconvo/predictive_tasks.ipynb +++ b/examples/hyperconvo/predictive_tasks.ipynb @@ -9,7 +9,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -27,14 +27,14 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Dataset already exists at /Users/seanzhangkx/.convokit/downloads/reddit-corpus-small\n" + "Dataset already exists at /Users/calebchiam/Documents/GitHub/ConvoKit/convokit/tensors/reddit-corpus-small\n" ] } ], @@ -53,18 +53,16 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ - "top_level_utterance_ids = [\n", - " utt.id for utt in corpus.iter_utterances() if utt.id == utt.meta[\"top_level_comment\"]\n", - "]" + "top_level_utterance_ids = [utt.id for utt in corpus.iter_utterances() if utt.id == utt.meta['top_level_comment']]" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -73,7 +71,7 @@ "10000" ] }, - "execution_count": 4, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -84,16 +82,13 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ - "threads_corpus = corpus.reindex_conversations(\n", - " source_corpus=corpus,\n", - " new_convo_roots=top_level_utterance_ids,\n", - " preserve_convo_meta=True,\n", - " preserve_corpus_meta=False,\n", - ")" + "threads_corpus = corpus.reindex_conversations(new_convo_roots=top_level_utterance_ids, \n", + " preserve_convo_meta=True,\n", + " preserve_corpus_meta=False)" ] }, { @@ -124,13 +119,13 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "hc = HyperConvo(prefix_len=10, min_convo_len=10, invalid_val=-1)\n", "hc.fit_transform(threads_corpus)\n", - "feats = list(threads_corpus.get_vector_matrix(\"hyperconvo\").columns)" + "feats = list(threads_corpus.get_vector_matrix('hyperconvo').columns)" ] }, { @@ -142,18 +137,18 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "['max[indegree over c->c responses]',\n", - " 'argmax[indegree over c->c responses]',\n", - " 'norm.max[indegree over c->c responses]']" + "['2nd-argmax[indegree over C->C mid-thread responses]',\n", + " '2nd-argmax[indegree over C->C responses]',\n", + " '2nd-argmax[indegree over C->c mid-thread responses]']" ] }, - "execution_count": 7, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -171,36 +166,34 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ - "hyperconvo_matrix = threads_corpus.get_vector_matrix(\"hyperconvo\")" + "hyperconvo_matrix = threads_corpus.get_vector_matrix('hyperconvo')" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ - "reply_tree_matrix = hyperconvo_matrix.subset(\n", - " columns=[c for c in hyperconvo_matrix.columns if \"c->c\" in c]\n", - ")" + "reply_tree_matrix = hyperconvo_matrix.subset(columns=[c for c in hyperconvo_matrix.columns if 'c->c' in c])" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ - "reply_tree_matrix.name = \"reply-tree\"" + "reply_tree_matrix.name = 'reply-tree'" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -209,13 +202,13 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "for convo in threads_corpus.iter_conversations():\n", - " if convo.has_vector(\"hyperconvo\"):\n", - " convo.add_vector(\"reply-tree\")" + " if convo.has_vector('hyperconvo'):\n", + " convo.add_vector('reply-tree')" ] }, { @@ -227,15 +220,13 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "## volume is the number of unique users in the first 10 comments\n", "for convo in threads_corpus.iter_conversations():\n", - " convo.meta[\"volume\"] = len(\n", - " set([utt.speaker for utt in convo.get_chronological_utterance_list()[:10]])\n", - " )" + " convo.meta['volume'] = len(set([utt.user for utt in convo.get_chronological_utterance_list()[:10]]))" ] }, { @@ -270,34 +261,46 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ "for convo in threads_corpus.iter_conversations():\n", - " convo.meta[\"comment-growth\"] = len(list(convo.iter_utterances())) >= 15\n", - "\n", + " convo.meta['comment-growth'] = len(list(convo.iter_utterances())) >= 15\n", + " \n", " convo_utts = convo.get_chronological_utterance_list()\n", " if len(convo_utts) >= 20:\n", " first_10_spkrs = len(set([utt.speaker.id for utt in convo_utts[:10]]))\n", " first_20_spkrs = len(set([utt.speaker.id for utt in convo_utts[:20]]))\n", - " convo.meta[\"commenter-growth\"] = (first_20_spkrs / first_10_spkrs) >= 2.0\n", + " convo.meta['commenter-growth'] = (first_20_spkrs / first_10_spkrs) >= 2.0\n", " else:\n", - " convo.meta[\"commenter-growth\"] = None" + " convo.meta['commenter-growth'] = None" ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 16, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "ConvoKitMeta({'original_convo_meta': {'title': 'Coming Soon: Sibling Rivalry Podcast Season 2 with Bob The Drag Queen & Monét X Change', 'num_comments': 19, 'domain': 'youtube.com', 'timestamp': 1536033837, 'subreddit': 'rupaulsdragrace', 'gilded': 0, 'gildings': {'gid_1': 0, 'gid_2': 0, 'gid_3': 0}, 'stickied': False, 'author_flair_text': 'Miz Cracker'}, 'original_convo_id': '9cs8tg', 'volume': 6, 'comment-growth': False, 'commenter-growth': None})" + "{'original_convo_meta': {'title': 'Daily Discussion, September 15, 2018',\n", + " 'num_comments': 97,\n", + " 'domain': 'self.Bitcoin',\n", + " 'timestamp': 1537002016,\n", + " 'subreddit': 'Bitcoin',\n", + " 'gilded': 0,\n", + " 'gildings': {'gid_1': 0, 'gid_2': 0, 'gid_3': 0},\n", + " 'stickied': False,\n", + " 'author_flair_text': ''},\n", + " 'original_convo_id': '9g03ho',\n", + " 'volume': 4,\n", + " 'comment-growth': True,\n", + " 'commenter-growth': True}" ] }, - "execution_count": 15, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -315,7 +318,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -324,33 +327,32 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ - "pairer_1 = Pairer(\n", - " obj_type=\"conversation\",\n", - " pairing_func=lambda convo: convo.meta[\"original_convo_id\"],\n", - " pos_label_func=lambda convo: convo.meta[\"comment-growth\"],\n", - " neg_label_func=lambda convo: not convo.meta[\"comment-growth\"],\n", - " pair_id_attribute_name=\"pair_id_1\",\n", - " label_attribute_name=\"pair_obj_1\",\n", - " pair_orientation_attribute_name=\"pair_orientation_1\",\n", - ")" + "pairer_1 = Pairer(obj_type=\"conversation\", \n", + " pairing_func=lambda convo: convo.meta['original_convo_id'],\n", + " pos_label_func=lambda convo: convo.meta['comment-growth'],\n", + " neg_label_func=lambda convo: not convo.meta['comment-growth'],\n", + " pair_id_attribute_name=\"pair_id_1\",\n", + " label_attribute_name=\"pair_obj_1\",\n", + " pair_orientation_attribute_name=\"pair_orientation_1\"\n", + " )" ] }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 19, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 18, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } @@ -361,33 +363,32 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ - "pairer_2 = Pairer(\n", - " obj_type=\"conversation\",\n", - " pairing_func=lambda convo: convo.meta[\"original_convo_id\"],\n", - " pos_label_func=lambda convo: convo.meta[\"commenter-growth\"],\n", - " neg_label_func=lambda convo: not convo.meta[\"commenter-growth\"],\n", - " pair_id_attribute_name=\"pair_id_2\",\n", - " label_attribute_name=\"pair_obj_2\",\n", - " pair_orientation_attribute_name=\"pair_orientation_2\",\n", - ")" + "pairer_2 = Pairer(obj_type=\"conversation\", \n", + " pairing_func=lambda convo: convo.meta['original_convo_id'],\n", + " pos_label_func=lambda convo: convo.meta['commenter-growth'],\n", + " neg_label_func=lambda convo: not convo.meta['commenter-growth'],\n", + " pair_id_attribute_name=\"pair_id_2\",\n", + " label_attribute_name=\"pair_obj_2\",\n", + " pair_orientation_attribute_name=\"pair_orientation_2\"\n", + " )" ] }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 21, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 20, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } @@ -405,7 +406,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ @@ -414,7 +415,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 23, "metadata": {}, "outputs": [ { @@ -427,22 +428,22 @@ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 22, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } ], "source": [ "bow = BoWTransformer(obj_type=\"conversation\", vector_name=\"bow_1\")\n", - "bow.fit_transform(threads_corpus, selector=lambda convo: convo.meta[\"pair_id_1\"] is not None)" + "bow.fit_transform(threads_corpus, selector=lambda convo: convo.meta['pair_id_1'] is not None)" ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 24, "metadata": {}, "outputs": [ { @@ -455,22 +456,22 @@ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 23, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } ], "source": [ "bow2 = BoWTransformer(obj_type=\"conversation\", vector_name=\"bow_2\")\n", - "bow2.fit_transform(threads_corpus, selector=lambda convo: convo.meta[\"pair_id_2\"] is not None)" + "bow2.fit_transform(threads_corpus, selector=lambda convo: convo.meta['pair_id_2'] is not None)" ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 25, "metadata": {}, "outputs": [ { @@ -479,7 +480,7 @@ "{'bow_1', 'bow_2', 'hyperconvo', 'reply-tree'}" ] }, - "execution_count": 24, + "execution_count": 25, "metadata": {}, "output_type": "execute_result" } @@ -497,7 +498,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -536,7 +537,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 39, "metadata": {}, "outputs": [ { @@ -549,22 +550,21 @@ { "data": { "text/plain": [ - "0.5774812343619683" + "0.5445037531276065" ] }, - "execution_count": 26, + "execution_count": 39, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedVectorPrediction(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"hyperconvo\",\n", - " pair_id_attribute_name=\"pair_id_1\",\n", - " label_attribute_name=\"pair_obj_1\",\n", - " pair_orientation_attribute_name=\"pair_orientation_1\",\n", - ")\n", + "pp = PairedVectorPrediction(obj_type=\"conversation\",\n", + " vector_name=\"hyperconvo\",\n", + " pair_id_attribute_name=\"pair_id_1\",\n", + " label_attribute_name=\"pair_obj_1\",\n", + " pair_orientation_attribute_name=\"pair_orientation_1\"\n", + " )\n", "pp.summarize(threads_corpus)" ] }, @@ -577,7 +577,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 40, "metadata": {}, "outputs": [ { @@ -590,22 +590,20 @@ { "data": { "text/plain": [ - "0.5973144286905754" + "0.5974145120934111" ] }, - "execution_count": 27, + "execution_count": 40, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedVectorPrediction(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"reply-tree\",\n", - " pair_id_attribute_name=\"pair_id_1\",\n", - " label_attribute_name=\"pair_obj_1\",\n", - " pair_orientation_attribute_name=\"pair_orientation_1\",\n", - ")\n", + "pp = PairedVectorPrediction(obj_type=\"conversation\",\n", + " vector_name=\"reply-tree\",\n", + " pair_id_attribute_name=\"pair_id_1\", \n", + " label_attribute_name=\"pair_obj_1\",\n", + " pair_orientation_attribute_name=\"pair_orientation_1\")\n", "pp.summarize(threads_corpus)" ] }, @@ -618,7 +616,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 41, "metadata": {}, "outputs": [ { @@ -631,22 +629,21 @@ { "data": { "text/plain": [ - "0.5993327773144287" + "0.5955796497080901" ] }, - "execution_count": 28, + "execution_count": 41, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedPrediction(\n", - " obj_type=\"conversation\",\n", - " pred_feats=[\"volume\"],\n", - " pair_id_attribute_name=\"pair_id_1\",\n", - " label_attribute_name=\"pair_obj_1\",\n", - " pair_orientation_attribute_name=\"pair_orientation_1\",\n", - ")\n", + "pp = PairedPrediction(obj_type=\"conversation\",\n", + " pred_feats=[\"volume\"],\n", + " pair_id_attribute_name=\"pair_id_1\",\n", + " label_attribute_name=\"pair_obj_1\",\n", + " pair_orientation_attribute_name=\"pair_orientation_1\"\n", + " )\n", "pp.summarize(threads_corpus)" ] }, @@ -659,7 +656,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 42, "metadata": {}, "outputs": [ { @@ -672,28 +669,27 @@ { "data": { "text/plain": [ - "0.8160633861551293" + "0.8342285237698082" ] }, - "execution_count": 29, + "execution_count": 42, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedVectorPrediction(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"bow_1\",\n", - " pair_id_attribute_name=\"pair_id_1\",\n", - " label_attribute_name=\"pair_obj_1\",\n", - " pair_orientation_attribute_name=\"pair_orientation_1\",\n", - ")\n", + "pp = PairedVectorPrediction(obj_type=\"conversation\",\n", + " vector_name=\"bow_1\",\n", + " pair_id_attribute_name=\"pair_id_1\",\n", + " label_attribute_name=\"pair_obj_1\",\n", + " pair_orientation_attribute_name=\"pair_orientation_1\"\n", + " )\n", "pp.summarize(threads_corpus)" ] }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 38, "metadata": {}, "outputs": [ { @@ -706,10 +702,10 @@ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 30, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -734,7 +730,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 43, "metadata": {}, "outputs": [ { @@ -747,22 +743,21 @@ { "data": { "text/plain": [ - "0.5655737704918031" + "0.5815970386039133" ] }, - "execution_count": 31, + "execution_count": 43, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedVectorPrediction(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"hyperconvo\",\n", - " pair_id_attribute_name=\"pair_id_2\",\n", - " label_attribute_name=\"pair_obj_2\",\n", - " pair_orientation_attribute_name=\"pair_orientation_2\",\n", - ")\n", + "pp = PairedVectorPrediction(obj_type=\"conversation\",\n", + " vector_name=\"hyperconvo\",\n", + " pair_id_attribute_name=\"pair_id_2\",\n", + " label_attribute_name=\"pair_obj_2\",\n", + " pair_orientation_attribute_name=\"pair_orientation_2\"\n", + " )\n", "pp.summarize(threads_corpus)" ] }, @@ -775,7 +770,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 44, "metadata": {}, "outputs": [ { @@ -788,22 +783,20 @@ { "data": { "text/plain": [ - "0.5296668429402432" + "0.509941829719725" ] }, - "execution_count": 32, + "execution_count": 44, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedVectorPrediction(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"reply-tree\",\n", - " pair_id_attribute_name=\"pair_id_2\",\n", - " label_attribute_name=\"pair_obj_2\",\n", - " pair_orientation_attribute_name=\"pair_orientation_2\",\n", - ")\n", + "pp = PairedVectorPrediction(obj_type=\"conversation\",\n", + " vector_name=\"reply-tree\",\n", + " pair_id_attribute_name=\"pair_id_2\", \n", + " label_attribute_name=\"pair_obj_2\",\n", + " pair_orientation_attribute_name=\"pair_orientation_2\")\n", "pp.summarize(threads_corpus)" ] }, @@ -816,7 +809,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 45, "metadata": {}, "outputs": [ { @@ -829,22 +822,21 @@ { "data": { "text/plain": [ - "0.5849814912744579" + "0.5653622421998942" ] }, - "execution_count": 33, + "execution_count": 45, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedPrediction(\n", - " obj_type=\"conversation\",\n", - " pred_feats=[\"volume\"],\n", - " pair_id_attribute_name=\"pair_id_2\",\n", - " label_attribute_name=\"pair_obj_2\",\n", - " pair_orientation_attribute_name=\"pair_orientation_2\",\n", - ")\n", + "pp = PairedPrediction(obj_type=\"conversation\",\n", + " pred_feats=[\"volume\"],\n", + " pair_id_attribute_name=\"pair_id_2\",\n", + " label_attribute_name=\"pair_obj_2\",\n", + " pair_orientation_attribute_name=\"pair_orientation_2\"\n", + " )\n", "pp.summarize(threads_corpus)" ] }, @@ -857,7 +849,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 46, "metadata": {}, "outputs": [ { @@ -870,22 +862,21 @@ { "data": { "text/plain": [ - "0.7386567953463776" + "0.7154415653093601" ] }, - "execution_count": 34, + "execution_count": 46, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pp = PairedVectorPrediction(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"bow_2\",\n", - " pair_id_attribute_name=\"pair_id_2\",\n", - " label_attribute_name=\"pair_obj_2\",\n", - " pair_orientation_attribute_name=\"pair_orientation_2\",\n", - ")\n", + "pp = PairedVectorPrediction(obj_type=\"conversation\",\n", + " vector_name=\"bow_2\",\n", + " pair_id_attribute_name=\"pair_id_2\",\n", + " label_attribute_name=\"pair_obj_2\",\n", + " pair_orientation_attribute_name=\"pair_orientation_2\"\n", + " )\n", "pp.summarize(threads_corpus)" ] } @@ -906,7 +897,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.7.6" } }, "nbformat": 4, diff --git a/examples/merging/corpus_merge_demo.ipynb b/examples/merging/corpus_merge_demo.ipynb index 1ebcc136..25f212f1 100644 --- a/examples/merging/corpus_merge_demo.ipynb +++ b/examples/merging/corpus_merge_demo.ipynb @@ -38,24 +38,11 @@ "metadata": {}, "outputs": [], "source": [ - "corpus1 = Corpus(\n", - " utterances=[\n", - " Utterance(id=\"0\", conversation_id=\"0\", text=\"hello world\", speaker=Speaker(id=\"alice\")),\n", - " Utterance(\n", - " id=\"1\",\n", - " conversation_id=\"0\",\n", - " reply_to=0,\n", - " text=\"my name is bob\",\n", - " speaker=Speaker(id=\"bob\"),\n", - " ),\n", - " Utterance(\n", - " id=\"2\",\n", - " conversation_id=\"2\",\n", - " text=\"this is a sentence\",\n", - " speaker=Speaker(id=\"foxtrot\", meta={\"yellow\": \"food\"}),\n", - " ),\n", - " ]\n", - ")" + "corpus1 = Corpus(utterances = [\n", + " Utterance(id=\"0\", conversation_id=\"0\", text=\"hello world\", speaker=Speaker(id=\"alice\")),\n", + " Utterance(id=\"1\", conversation_id=\"0\", reply_to=0, text=\"my name is bob\", speaker=Speaker(id=\"bob\")),\n", + " Utterance(id=\"2\", conversation_id=\"2\", text=\"this is a sentence\", speaker=Speaker(id=\"foxtrot\", meta={\"yellow\": \"food\"})),\n", + " ])" ] }, { @@ -90,29 +77,11 @@ "metadata": {}, "outputs": [], "source": [ - "corpus2 = Corpus(\n", - " utterances=[\n", - " Utterance(\n", - " id=\"3\",\n", - " conversation_id=\"3\",\n", - " text=\"i like pie\",\n", - " speaker=Speaker(id=\"charlie\", meta={\"what\": \"a mood\", \"hey\": \"food\"}),\n", - " ),\n", - " Utterance(\n", - " id=\"4\",\n", - " conversation_id=\"3\",\n", - " reply_to=3,\n", - " text=\"sentence galore\",\n", - " speaker=Speaker(id=\"echo\"),\n", - " ),\n", - " Utterance(\n", - " id=\"2\",\n", - " conversation_id=\"2\",\n", - " text=\"this is a sentence\",\n", - " speaker=Speaker(id=\"foxtrot\", meta={\"yellow\": \"mood\", \"hello\": \"world\"}),\n", - " ),\n", - " ]\n", - ")" + "corpus2 = Corpus(utterances = [\n", + " Utterance(id=\"3\", conversation_id=\"3\", text=\"i like pie\", speaker=Speaker(id=\"charlie\", meta={\"what\": \"a mood\", \"hey\": \"food\"})),\n", + " Utterance(id='4', conversation_id='3', reply_to=3, text=\"sentence galore\", speaker=Speaker(id=\"echo\")),\n", + " Utterance(id='2', conversation_id='2', text=\"this is a sentence\", speaker=Speaker(id=\"foxtrot\", meta={\"yellow\": \"mood\", \"hello\": \"world\"})),\n", + " ])" ] }, { @@ -150,12 +119,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[91mWARNING: \u001b[0mMultiple values found for Speaker(id: 'foxtrot', vectors: [], meta: ConvoKitMeta({'yellow': 'food'})) for metadata key: 'yellow'. Taking the latest one found\n" + "\u001b[91mWARNING: \u001b[0mMultiple values found for Speaker(id: foxtrot, vectors: [], meta: {'yellow': 'food'}) for metadata key: 'yellow'. Taking the latest one found\n" ] } ], "source": [ - "corpus3 = Corpus.merge(corpus1, corpus2)" + "corpus3 = corpus1.merge(corpus2)" ] }, { @@ -199,7 +168,7 @@ { "data": { "text/plain": [ - "ConvoKitMeta({'yellow': 'mood', 'hello': 'world'})" + "{'yellow': 'mood', 'hello': 'world'}" ] }, "execution_count": 8, @@ -208,7 +177,7 @@ } ], "source": [ - "corpus3.get_speaker(\"foxtrot\").meta" + "corpus3.get_speaker('foxtrot').meta" ] }, { @@ -227,7 +196,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "[Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'alice', 'meta': ConvoKitMeta({})}), Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'bob', 'meta': ConvoKitMeta({})}), Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'foxtrot', 'meta': ConvoKitMeta({'yellow': 'mood', 'hello': 'world'})}), Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'charlie', 'meta': ConvoKitMeta({'what': 'a mood', 'hey': 'food'})}), Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'echo', 'meta': ConvoKitMeta({})})]\n", + "[Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': , 'id': 'alice'}), Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': , 'id': 'bob'}), Speaker({'obj_type': 'speaker', 'meta': {'yellow': 'mood', 'hello': 'world'}, 'vectors': [], 'owner': , 'id': 'foxtrot'}), Speaker({'obj_type': 'speaker', 'meta': {'what': 'a mood', 'hey': 'food'}, 'vectors': [], 'owner': , 'id': 'charlie'}), Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': , 'id': 'echo'})]\n", "\n", "Number of Utterances: 1\n", "Number of Conversations: 1\n" @@ -236,7 +205,7 @@ ], "source": [ "print(list(corpus3.iter_speakers()))\n", - "speaker_echo = corpus3.get_speaker(\"echo\")\n", + "speaker_echo = corpus3.get_speaker('echo')\n", "print()\n", "speaker_echo.print_speaker_stats()" ] @@ -270,27 +239,12 @@ "metadata": {}, "outputs": [], "source": [ - "corpus4 = Corpus(\n", - " utterances=[\n", - " Utterance(\n", - " id=\"0\",\n", - " conversation_id=\"0\",\n", - " text=\"hello world\",\n", - " speaker=Speaker(id=\"alice\"),\n", - " meta={\"in\": \"wonderland\"},\n", - " ),\n", - " Utterance(\n", - " id=\"1\",\n", - " conversation_id=\"0\",\n", - " reply_to=\"0\",\n", - " text=\"my name is bob\",\n", - " speaker=Speaker(id=\"bob\"),\n", - " meta={\"fu\": \"bu\"},\n", - " ),\n", - " ]\n", - ")\n", - "corpus4.add_meta(\"AB\", 1)\n", - "corpus4.add_meta(\"CD\", 2)" + "corpus4 = Corpus(utterances = [\n", + " Utterance(id='0', conversation_id='0', text=\"hello world\", speaker=Speaker(id=\"alice\"), meta={'in': 'wonderland'}),\n", + " Utterance(id='1', conversation_id='0', reply_to='0', text=\"my name is bob\", speaker=Speaker(id=\"bob\"), meta={'fu': 'bu'})\n", + " ])\n", + "corpus4.add_meta('AB', 1)\n", + "corpus4.add_meta('CD', 2)\n" ] }, { @@ -325,27 +279,12 @@ "metadata": {}, "outputs": [], "source": [ - "corpus5 = Corpus(\n", - " utterances=[\n", - " Utterance(\n", - " id=\"0\",\n", - " conversation_id=\"0\",\n", - " text=\"hello world\",\n", - " speaker=Speaker(id=\"alice\"),\n", - " meta={\"in\": \"the hat\"},\n", - " ),\n", - " Utterance(\n", - " id=\"1\",\n", - " conversation_id=\"0\",\n", - " reply_to=\"0\",\n", - " text=\"my name is bobbb\",\n", - " speaker=Speaker(id=\"bob\"),\n", - " meta={\"barrel\": \"roll\"},\n", - " ),\n", - " ]\n", - ")\n", - "corpus5.add_meta(\"AB\", 3)\n", - "corpus5.add_meta(\"EF\", 3)" + "corpus5 = Corpus(utterances = [\n", + " Utterance(id='0', conversation_id='0', text=\"hello world\", speaker=Speaker(id=\"alice\"), meta={'in': 'the hat'}),\n", + " Utterance(id='1', conversation_id='0', reply_to='0', text=\"my name is bobbb\", speaker=Speaker(id=\"bob\"), meta={'barrel': 'roll'})\n", + " ])\n", + "corpus5.add_meta('AB', 3)\n", + "corpus5.add_meta('EF', 3)" ] }, { @@ -378,15 +317,15 @@ "text": [ "\u001b[91mWARNING: \u001b[0mFound conflicting values for Utterance '0' for metadata key: 'in'. Overwriting with other corpus's Utterance metadata.\n", "\u001b[91mWARNING: \u001b[0mUtterances with same id do not share the same data:\n", - "Utterance(id: '1', conversation_id: 0, reply-to: 0, speaker: Speaker(id: 'bob', vectors: [], meta: ConvoKitMeta({})), timestamp: None, text: 'my name is bob', vectors: [], meta: ConvoKitMeta({'fu': 'bu'}))\n", - "Utterance(id: '1', conversation_id: 0, reply-to: 0, speaker: Speaker(id: 'bob', vectors: [], meta: ConvoKitMeta({})), timestamp: None, text: 'my name is bobbb', vectors: [], meta: ConvoKitMeta({'barrel': 'roll'}))\n", + "Utterance(id: '1', conversation_id: 0, reply-to: 0, speaker: Speaker(id: bob, vectors: [], meta: {}), timestamp: None, text: 'my name is bob', vectors: [], meta: {'fu': 'bu'})\n", + "Utterance(id: '1', conversation_id: 0, reply-to: 0, speaker: Speaker(id: bob, vectors: [], meta: {}), timestamp: None, text: 'my name is bobbb', vectors: [], meta: {'barrel': 'roll'})\n", "Ignoring second corpus's utterance.\n", - "\u001b[91mWARNING: \u001b[0mFound conflicting values for primary Corpus metadata key: 'AB'. Overwriting with secondary Corpus's metadata.\n" + "\u001b[91mWARNING: \u001b[0mFound conflicting values for Corpus metadata key: 'AB'. Overwriting with other Corpus's metadata.\n" ] } ], "source": [ - "corpus6 = Corpus.merge(corpus4, corpus5)" + "corpus6 = corpus4.merge(corpus5)" ] }, { @@ -416,7 +355,7 @@ { "data": { "text/plain": [ - "ConvoKitMeta({'AB': 3, 'CD': 2, 'EF': 3})" + "{'AB': 3, 'CD': 2, 'EF': 3}" ] }, "execution_count": 16, @@ -436,7 +375,7 @@ { "data": { "text/plain": [ - "Utterance({'obj_type': 'utterance', 'vectors': [], 'speaker_': Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'bob', 'meta': ConvoKitMeta({})}), 'owner': , 'id': '1', 'meta': ConvoKitMeta({'fu': 'bu'})})" + "Utterance({'obj_type': 'utterance', 'meta': {'fu': 'bu'}, 'vectors': [], 'speaker': Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': , 'id': 'bob'}), 'conversation_id': '0', 'reply_to': '0', 'timestamp': None, 'text': 'my name is bob', 'owner': , 'id': '1'})" ] }, "execution_count": 17, @@ -445,7 +384,7 @@ } ], "source": [ - "corpus6.get_utterance(\"1\")" + "corpus6.get_utterance('1')" ] }, { @@ -456,7 +395,7 @@ { "data": { "text/plain": [ - "Utterance({'obj_type': 'utterance', 'vectors': [], 'speaker_': Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'alice', 'meta': ConvoKitMeta({})}), 'owner': , 'id': '0', 'meta': ConvoKitMeta({'in': 'the hat'})})" + "Utterance({'obj_type': 'utterance', 'meta': {'in': 'the hat'}, 'vectors': [], 'speaker': Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': , 'id': 'alice'}), 'conversation_id': '0', 'reply_to': None, 'timestamp': None, 'text': 'hello world', 'owner': , 'id': '0'})" ] }, "execution_count": 18, @@ -465,7 +404,7 @@ } ], "source": [ - "corpus6.get_utterance(\"0\")" + "corpus6.get_utterance('0')" ] }, { @@ -483,8 +422,8 @@ { "data": { "text/plain": [ - "[Utterance({'obj_type': 'utterance', 'vectors': [], 'speaker_': Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'alice', 'meta': ConvoKitMeta({})}), 'owner': , 'id': '0', 'meta': ConvoKitMeta({'in': 'the hat'})}),\n", - " Utterance({'obj_type': 'utterance', 'vectors': [], 'speaker_': Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': , 'id': 'bob', 'meta': ConvoKitMeta({})}), 'owner': , 'id': '1', 'meta': ConvoKitMeta({'fu': 'bu'})})]" + "[Utterance({'obj_type': 'utterance', 'meta': {'in': 'the hat'}, 'vectors': [], 'speaker': Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': , 'id': 'alice'}), 'conversation_id': '0', 'reply_to': None, 'timestamp': None, 'text': 'hello world', 'owner': , 'id': '0'}),\n", + " Utterance({'obj_type': 'utterance', 'meta': {'fu': 'bu'}, 'vectors': [], 'speaker': Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': , 'id': 'bob'}), 'conversation_id': '0', 'reply_to': '0', 'timestamp': None, 'text': 'my name is bob', 'owner': , 'id': '1'})]" ] }, "execution_count": 19, @@ -498,11 +437,11 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "corpus6.dump(\"temp-corpus\", \"./\")" + "corpus6.dump('temp-corpus', './')" ] }, { @@ -515,9 +454,9 @@ ], "metadata": { "kernelspec": { - "display_name": "convokit_git", + "display_name": "temp-venv", "language": "python", - "name": "python3" + "name": "temp-venv" }, "language_info": { "codemirror_mode": { @@ -529,7 +468,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.7.6" } }, "nbformat": 4, diff --git a/examples/politeness-strategies/Politeness_Marker_and_Summarize_Demo.ipynb b/examples/politeness-strategies/Politeness_Marker_and_Summarize_Demo.ipynb index 9bca5746..a5bfb482 100644 --- a/examples/politeness-strategies/Politeness_Marker_and_Summarize_Demo.ipynb +++ b/examples/politeness-strategies/Politeness_Marker_and_Summarize_Demo.ipynb @@ -172,7 +172,7 @@ } ], "source": [ - "utt = corpus.get_utterance(\"480633\")\n", + "utt = corpus.get_utterance('480633')\n", "print(\"RAW TEXT: \" + utt.text + \"\\n\")\n", "\n", "print(\"Sentences: \")\n", @@ -180,15 +180,13 @@ " stra = \"\"\n", " for y in x[\"toks\"]:\n", " stra += \" \" + y[\"tok\"]\n", - "\n", - " print(str(i) + \" \" + stra[:50] + \"...\")\n", - "\n", + " \n", + " print(str(i) + \" \" + stra[:50] + \"...\") \n", + " \n", "print()\n", - "for (k, v), (k1, v2) in zip(\n", - " utt.meta[\"politeness_strategies\"].items(), utt.meta[\"politeness_markers\"].items()\n", - "):\n", + "for ((k,v),(k1,v2)) in zip(utt.meta[\"politeness_strategies\"].items(),utt.meta[\"politeness_markers\"].items()):\n", " if v != 0:\n", - " print(k[21 : len(k) - 2] + \" results:\")\n", + " print(k[21:len(k)-2] + \" results:\")\n", " print(\"Markers: \" + str(v2) + \"\\n\")" ] }, @@ -243,12 +241,10 @@ } ], "source": [ - "spacy_nlp = spacy.load(\"en_core_web_sm\", disable=[\"ner\"])\n", + "spacy_nlp = spacy.load('en_core_web_sm', disable=['ner'])\n", "\n", - "utt = ps.transform_utterance(\n", - " \"hello, could you please help me proofread this article?\", spacy_nlp=spacy_nlp\n", - ")\n", - "utt.meta[\"politeness_strategies\"]" + "utt = ps.transform_utterance(\"hello, could you please help me proofread this article?\", spacy_nlp=spacy_nlp)\n", + "utt.meta['politeness_strategies']" ] }, { @@ -298,7 +294,7 @@ } ], "source": [ - "data = ps.summarize(corpus, plot=True, y_lim=1.6)" + "data = ps.summarize(corpus, plot=True, y_lim = 1.6)" ] }, { @@ -331,8 +327,8 @@ } ], "source": [ - "query = lambda x: not x.meta[\"is-admin\"]\n", - "non_admin_data = ps.summarize(corpus, query, plot=True, y_lim=1.6)" + "query = lambda x : not x.meta[\"is-admin\"]\n", + "non_admin_data = ps.summarize(corpus, query, plot = True, y_lim = 1.6)" ] }, { @@ -361,8 +357,8 @@ } ], "source": [ - "query = lambda x: x.meta[\"is-admin\"]\n", - "admin_data = ps.summarize(corpus, query, plot=True, y_lim=1.6)" + "query = lambda x : x.meta[\"is-admin\"]\n", + "admin_data = ps.summarize(corpus, query, plot = True, y_lim = 1.6)" ] }, { diff --git a/examples/politeness-strategies/Politeness_Strategies_in_MT-mediated_Communication.ipynb b/examples/politeness-strategies/Politeness_Strategies_in_MT-mediated_Communication.ipynb index 17610c17..eed740de 100644 --- a/examples/politeness-strategies/Politeness_Strategies_in_MT-mediated_Communication.ipynb +++ b/examples/politeness-strategies/Politeness_Strategies_in_MT-mediated_Communication.ipynb @@ -35,7 +35,6 @@ "\n", "import seaborn as sns\n", "from matplotlib import pyplot as plt\n", - "\n", "%matplotlib inline" ] }, @@ -68,7 +67,7 @@ "metadata": {}, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# WIKI_ROOT_DIR = download('wiki-corpus', data_dir=DATA_DIR)\n", @@ -80,7 +79,7 @@ "corpus = Corpus(filename=WIKI_ROOT_DIR)\n", "\n", "# load parses\n", - "corpus.load_info(\"utterance\", [\"parsed\"])" + "corpus.load_info('utterance',['parsed'])" ] }, { @@ -99,7 +98,7 @@ } ], "source": [ - "# Overall stats of the dataset\n", + "# Overall stats of the dataset \n", "corpus.print_summary_stats()" ] }, @@ -168,7 +167,7 @@ "source": [ "ps_local = PolitenessStrategies(strategy_collection=\"politeness_local\", verbose=10000)\n", "\n", - "# By default, strategy extraction results are saved under \"politeness_strategies\".\n", + "# By default, strategy extraction results are saved under \"politeness_strategies\". \n", "corpus = ps_local.transform(corpus, markers=True)" ] }, @@ -236,9 +235,8 @@ "outputs": [], "source": [ "# utterance-level strategy uses\n", - "df_feat = pd.DataFrame.from_dict(\n", - " {utt.id: utt.meta[\"politeness_strategies\"] for utt in corpus.iter_utterances()}, orient=\"index\"\n", - ")" + "df_feat = pd.DataFrame.from_dict({utt.id: utt.meta['politeness_strategies'] \\\n", + " for utt in corpus.iter_utterances()}, orient='index')" ] }, { @@ -252,13 +250,11 @@ "sampled_ids, samples = set(), []\n", "\n", "for k in sorted_strategies:\n", - " df_sample = df_feat[(~df_feat.index.isin(sampled_ids)) & (df_feat[k] == 1)].sample(\n", - " 1000, random_state=42\n", - " )\n", - " df_sample[\"strategy\"] = k\n", - " samples.append(df_sample[[\"strategy\"]])\n", + " df_sample = df_feat[(~df_feat.index.isin(sampled_ids)) & (df_feat[k]==1)].sample(1000, random_state=42)\n", + " df_sample['strategy'] = k\n", + " samples.append(df_sample[['strategy']])\n", " sampled_ids.update(df_sample.index)\n", - "\n", + " \n", "df_en_sample = pd.concat(samples)" ] }, @@ -271,14 +267,12 @@ "# saving as a convokit corpus\n", "for i, info in df_en_sample.itertuples():\n", " utt = corpus.get_utterance(i)\n", - " utt.add_meta(\"selected\", True)\n", - " utt.add_meta(\"strategy\", info)\n", + " utt.add_meta('selected', True)\n", + " utt.add_meta('strategy', info)\n", "\n", - "# filter only selected utterances\n", + "# filter only selected utterances \n", "# (not that this does not maintain conversation structure)\n", - "wiki_sampled_en = corpus.filter_utterances_by(\n", - " lambda utt: \"selected\" in utt.meta and utt.meta[\"selected\"]\n", - ")" + "wiki_sampled_en = corpus.filter_utterances_by(lambda utt:'selected' in utt.meta and utt.meta['selected'])" ] }, { @@ -311,7 +305,7 @@ "df_utts = wiki_sampled_en.get_utterances_dataframe(exclude_meta=True)\n", "\n", "# translation model\n", - "model = EasyNMT(\"opus-mt\", cache_folder=\"/belafonte_sauna/liye_translations/easynmt/\")" + "model = EasyNMT('opus-mt', cache_folder=\"/belafonte_sauna/liye_translations/easynmt/\")" ] }, { @@ -328,14 +322,12 @@ } ], "source": [ - "df_utts[\"en-zh\"] = model.translate(\n", - " list(df_utts[\"text\"]),\n", - " target_lang=\"zh\",\n", - " source_lang=\"en\",\n", - " show_progress_bar=True,\n", - " batch_size=8,\n", - " perform_sentence_splitting=False,\n", - ")" + "df_utts['en-zh'] = model.translate(list(df_utts['text']), \\\n", + " target_lang='zh', \\\n", + " source_lang='en', \\\n", + " show_progress_bar=True,\n", + " batch_size=8, \\\n", + " perform_sentence_splitting=False)" ] }, { @@ -352,14 +344,12 @@ } ], "source": [ - "df_utts[\"en-back\"] = model.translate(\n", - " list(df_utts[\"en-zh\"]),\n", - " target_lang=\"en\",\n", - " source_lang=\"zh\",\n", - " show_progress_bar=True,\n", - " batch_size=8,\n", - " perform_sentence_splitting=False,\n", - ")" + "df_utts['en-back'] = model.translate(list(df_utts['en-zh']), \\\n", + " target_lang='en', \\\n", + " source_lang='zh', \\\n", + " show_progress_bar=True,\n", + " batch_size=8, \\\n", + " perform_sentence_splitting=False)" ] }, { @@ -384,11 +374,11 @@ "metadata": {}, "outputs": [], "source": [ - "for row in df_utts[[\"text\", \"en-zh\", \"en-back\"]].itertuples():\n", + "for row in df_utts[['text', 'en-zh', 'en-back']].itertuples():\n", " idx, trans, backtrans = row[0], row[2], row[3]\n", " utt = wiki_sampled_en.get_utterance(idx)\n", - " utt.add_meta(\"en-zh\", trans)\n", - " utt.add_meta(\"en-back\", backtrans)" + " utt.add_meta('en-zh', trans)\n", + " utt.add_meta('en-back', backtrans)" ] }, { @@ -397,14 +387,14 @@ "metadata": {}, "outputs": [], "source": [ - "# parser to parse back-translated English texts\n", - "en_parser = TextParser(output_field=\"en_parsed\", input_field=\"en-back\", verbosity=5000)\n", + "# parser to parse back-translated English texts \n", + "en_parser = TextParser(output_field='en_parsed', input_field='en-back', \\\n", + " verbosity=5000)\n", "\n", "# parer to parse translated texts in Chinese\n", - "spacy_zh = spacy.load(\"zh_core_web_sm\", disable=[\"ner\"])\n", - "zh_parser = TextParser(\n", - " output_field=\"zh_parsed\", input_field=\"en-zh\", spacy_nlp=spacy_zh, verbosity=5000\n", - ")" + "spacy_zh = spacy.load('zh_core_web_sm', disable=['ner'])\n", + "zh_parser = TextParser(output_field='zh_parsed', input_field='en-zh', \\\n", + " spacy_nlp=spacy_zh, verbosity=5000)" ] }, { @@ -469,7 +459,7 @@ } ], "source": [ - "wiki_sampled_zh = Corpus(download(\"wiki-sampled-zh-corpus\"))" + "wiki_sampled_zh = Corpus(download('wiki-sampled-zh-corpus'))" ] }, { @@ -490,8 +480,8 @@ ], "source": [ "# Inspect the meta data avaible, should have the following:\n", - "# 'parsed' contains the dependency parses for the utterance text\n", - "# 'zh-en' and 'zh-back' contains the translations and back translations for utterance texts respectively\n", + "# 'parsed' contains the dependency parses for the utterance text \n", + "# 'zh-en' and 'zh-back' contains the translations and back translations for utterance texts respectively \n", "# 'en_parsed' and 'zh_parsed' contain the respective parses, which we will use for strategy extractions\n", "wiki_sampled_zh.meta_index" ] @@ -538,13 +528,13 @@ } ], "source": [ - "# Download the data if Part 1 of the notebook is skipped\n", + "# Download the data if Part 1 of the notebook is skipped \n", "\n", - "# replace with where you'd like the corpora to be saved\n", - "DATA_DIR = \"/belafonte_sauna/liye_translations/convokit_mt/test/\"\n", + "# replace with where you'd like the corpora to be saved \n", + "DATA_DIR = '/belafonte_sauna/liye_translations/convokit_mt/test/'\n", "\n", - "wiki_sampled_en = Corpus(download(\"wiki-sampled-en-corpus\", data_dir=DATA_DIR))\n", - "wiki_sampled_zh = Corpus(download(\"wiki-sampled-zh-corpus\", data_dir=DATA_DIR))" + "wiki_sampled_en = Corpus(download('wiki-sampled-en-corpus', data_dir=DATA_DIR))\n", + "wiki_sampled_zh = Corpus(download('wiki-sampled-zh-corpus', data_dir=DATA_DIR))" ] }, { @@ -605,19 +595,15 @@ "metadata": {}, "outputs": [], "source": [ - "ps_zh = PolitenessStrategies(\n", - " parse_attribute_name=\"zh_parsed\",\n", - " strategy_attribute_name=\"zh_strategies\",\n", - " strategy_collection=\"politeness_cscw_zh\",\n", - " verbose=5000,\n", - ")\n", + "ps_zh = PolitenessStrategies(parse_attribute_name='zh_parsed', \\\n", + " strategy_attribute_name=\"zh_strategies\", \\\n", + " strategy_collection=\"politeness_cscw_zh\", \n", + " verbose=5000)\n", "\n", - "ps_en = PolitenessStrategies(\n", - " parse_attribute_name=\"en_parsed\",\n", - " strategy_attribute_name=\"en_strategies\",\n", - " strategy_collection=\"politeness_local\",\n", - " verbose=5000,\n", - ")" + "ps_en = PolitenessStrategies(parse_attribute_name='en_parsed', \\\n", + " strategy_attribute_name=\"en_strategies\", \\\n", + " strategy_collection=\"politeness_local\",\n", + " verbose=5000)" ] }, { @@ -695,32 +681,31 @@ "metadata": {}, "outputs": [], "source": [ - "# Mapping between strategy names in different collections\n", - "# Note that the collections are not exactly equivalent,\n", + "# Mapping between strategy names in different collections \n", + "# Note that the collections are not exactly equivalent, \n", "# i.e., there are strategies we can't find a close match between the two collections\n", "\n", - "en2zh = {\n", - " \"Actually\": \"factuality\",\n", - " \"Adverb.Just\": None,\n", - " \"Affirmation\": \"praise\",\n", - " \"Apology\": \"apologetic\",\n", - " \"By.The.Way\": \"indirect_btw\",\n", - " \"Conj.Start\": \"start_so\",\n", - " \"Filler\": None,\n", - " \"For.Me\": None,\n", - " \"For.You\": None,\n", - " \"Gratitude\": \"gratitude\",\n", - " \"Greeting\": \"greeting\",\n", - " \"Hedges\": \"hedge\",\n", - " \"Indicative\": \"can_you\",\n", - " \"Please\": \"please\",\n", - " \"Please.Start\": \"start_please\",\n", - " \"Reassurance\": None,\n", - " \"Subjunctive\": \"could_you\",\n", - " \"Swearing\": \"taboo\",\n", - "}\n", + "en2zh = {'Actually': 'factuality',\n", + " 'Adverb.Just': None,\n", + " 'Affirmation': 'praise',\n", + " 'Apology': 'apologetic',\n", + " 'By.The.Way': 'indirect_btw',\n", + " 'Conj.Start': 'start_so',\n", + " 'Filler': None,\n", + " 'For.Me': None,\n", + " 'For.You': None,\n", + " 'Gratitude': 'gratitude',\n", + " 'Greeting':'greeting',\n", + " 'Hedges':'hedge',\n", + " 'Indicative':'can_you',\n", + " 'Please': 'please',\n", + " 'Please.Start': 'start_please',\n", + " 'Reassurance': None,\n", + " 'Subjunctive': 'could_you',\n", + " 'Swearing': 'taboo'\n", + " }\n", "\n", - "zh2en = {v: k for k, v in en2zh.items() if v}" + "zh2en = {v:k for k,v in en2zh.items() if v}" ] }, { @@ -731,21 +716,22 @@ "source": [ "# add utterance-level assessing result to utterance metadata for the English corpus\n", "for utt in wiki_sampled_en.iter_utterances():\n", - " # strategy names in English and Chinese\n", - " en_name = utt.retrieve_meta(\"strategy\")\n", + " \n", + " # strategy names in English and Chinese \n", + " en_name = utt.retrieve_meta('strategy')\n", " zh_name = en2zh[en_name]\n", - "\n", + " \n", " # translations\n", " if zh_name:\n", - " trans_status = utt.retrieve_meta(\"zh_strategies\")[zh_name]\n", - " utt.add_meta(\"translation_result\", trans_status)\n", + " trans_status = utt.retrieve_meta('zh_strategies')[zh_name]\n", + " utt.add_meta('translation_result', trans_status)\n", " else:\n", - " # when a comparison isn't applicable, we use the value -1\n", - " utt.add_meta(\"translation_result\", -1)\n", - "\n", - " # back translations\n", - " backtrans_status = utt.retrieve_meta(\"en_strategies\")[en_name]\n", - " utt.add_meta(\"backtranslation_result\", backtrans_status)" + " # when a comparison isn't applicable, we use the value -1 \n", + " utt.add_meta('translation_result', -1)\n", + " \n", + " # back translations \n", + " backtrans_status = utt.retrieve_meta('en_strategies')[en_name]\n", + " utt.add_meta('backtranslation_result', backtrans_status)" ] }, { @@ -756,18 +742,19 @@ "source": [ "# add utterance-level assessing result to utterance metadata for the Chinese corpus\n", "for utt in wiki_sampled_zh.iter_utterances():\n", - " # strategy names in English and Chinese\n", - " zh_name = utt.retrieve_meta(\"strategy\")\n", + " \n", + " # strategy names in English and Chinese \n", + " zh_name = utt.retrieve_meta('strategy')\n", " en_name = zh2en[zh_name]\n", - "\n", + " \n", " # translations\n", " if en_name:\n", - " trans_status = utt.retrieve_meta(\"en_strategies\")[en_name]\n", - " utt.add_meta(\"translation_result\", trans_status)\n", - "\n", - " # back translations\n", - " backtrans_status = utt.retrieve_meta(\"zh_strategies\")[zh_name]\n", - " utt.add_meta(\"backtranslation_result\", backtrans_status)" + " trans_status = utt.retrieve_meta('en_strategies')[en_name]\n", + " utt.add_meta('translation_result', trans_status)\n", + " \n", + " # back translations \n", + " backtrans_status = utt.retrieve_meta('zh_strategies')[zh_name]\n", + " utt.add_meta('backtranslation_result', backtrans_status)" ] }, { @@ -801,15 +788,16 @@ "metadata": {}, "outputs": [], "source": [ - "# results for the English corpus\n", - "res_df_en = wiki_sampled_en.get_attribute_table(\n", - " obj_type=\"utterance\", attrs=[\"strategy\", \"translation_result\", \"backtranslation_result\"]\n", - ")\n", + "# results for the English corpus \n", + "res_df_en = wiki_sampled_en.get_attribute_table(obj_type='utterance', \\\n", + " attrs=['strategy', \\\n", + " 'translation_result', \\\n", + " 'backtranslation_result'])\n", "\n", - "res_df_en.columns = [\"strategy\", \"en->zh\", \"en->zh->en\"]\n", + "res_df_en.columns = ['strategy', 'en->zh', 'en->zh->en']\n", "\n", - "# strategy-level permeability, -1 means the strategy is not applicable\n", - "permeability_df_en = res_df_en.groupby(\"strategy\").sum() / 1000" + "# strategy-level permeability, -1 means the strategy is not applicable \n", + "permeability_df_en = res_df_en.groupby('strategy').sum() / 1000" ] }, { @@ -818,30 +806,14 @@ "metadata": {}, "outputs": [], "source": [ - "# As a reference, we include permeability computed through an informal small-scale human annotations\n", + "# As a reference, we include permeability computed through an informal small-scale human annotations \n", "# (50 instances, one annotator)\n", - "reference = {\n", - " \"Actually\": 0.7,\n", - " \"Adverb.Just\": 0.62,\n", - " \"Affirmation\": 0.8,\n", - " \"Apology\": 0.94,\n", - " \"By.The.Way\": 0.42,\n", - " \"Conj.Start\": 0.66,\n", - " \"Filler\": 0.58,\n", - " \"For.Me\": 0.62,\n", - " \"For.You\": 0.52,\n", - " \"Gratitude\": 0.86,\n", - " \"Greeting\": 0.52,\n", - " \"Hedges\": 0.68,\n", - " \"Indicative\": 0.64,\n", - " \"Please\": 0.72,\n", - " \"Please.Start\": 0.82,\n", - " \"Reassurance\": 0.88,\n", - " \"Subjunctive\": 0.0,\n", - " \"Swearing\": 0.3,\n", - "}\n", - "\n", - "permeability_df_en[\"reference\"] = [reference[name] for name in permeability_df_en.index]" + "reference = {'Actually': 0.7, 'Adverb.Just': 0.62, 'Affirmation': 0.8, 'Apology': 0.94, 'By.The.Way': 0.42,\n", + " 'Conj.Start': 0.66, 'Filler': 0.58, 'For.Me': 0.62, 'For.You': 0.52, 'Gratitude': 0.86,\n", + " 'Greeting': 0.52, 'Hedges': 0.68, 'Indicative': 0.64, 'Please': 0.72, 'Please.Start': 0.82,\n", + " 'Reassurance': 0.88, 'Subjunctive': 0.0, 'Swearing': 0.3}\n", + "\n", + "permeability_df_en['reference'] = [reference[name] for name in permeability_df_en.index]" ] }, { @@ -851,10 +823,8 @@ "outputs": [], "source": [ "# As further context, we can inlcude information about strategy prevalence on our plot\n", - "prevalence_en = dict(df_prevalence * 100)\n", - "permeability_df_en.index = [\n", - " f\"{name} ({prevalence_en[name]:.1f}%)\" for name in permeability_df_en.index\n", - "]" + "prevalence_en = dict(df_prevalence*100)\n", + "permeability_df_en.index = [f\"{name} ({prevalence_en[name]:.1f}%)\" for name in permeability_df_en.index]" ] }, { @@ -879,9 +849,7 @@ "\n", "# cells that are not applicable are masked in white\n", "with sns.axes_style(\"white\"):\n", - " sns.heatmap(\n", - " permeability_df_en, annot=True, cmap=\"Greens\", fmt=\".1%\", mask=permeability_df_en == -1\n", - " )" + " sns.heatmap(permeability_df_en, annot=True, cmap=\"Greens\", fmt=\".1%\", mask=permeability_df_en==-1)" ] }, { @@ -897,16 +865,17 @@ "metadata": {}, "outputs": [], "source": [ - "# results for the English corpus\n", - "res_df_zh = wiki_sampled_zh.get_attribute_table(\n", - " obj_type=\"utterance\", attrs=[\"strategy\", \"translation_result\", \"backtranslation_result\"]\n", - ")\n", + "# results for the English corpus \n", + "res_df_zh = wiki_sampled_zh.get_attribute_table(obj_type='utterance', \\\n", + " attrs=['strategy', \\\n", + " 'translation_result', \\\n", + " 'backtranslation_result'])\n", "\n", - "# convert names to make it easier to compare between directions\n", - "res_df_zh[\"strategy\"] = res_df_zh[\"strategy\"].apply(lambda name: zh2en[name])\n", + "# convert names to make it easier to compare between directions \n", + "res_df_zh['strategy'] = res_df_zh['strategy'].apply(lambda name:zh2en[name])\n", "\n", - "res_df_zh.columns = [\"strategy\", \"zh->en\", \"zh->en->zh\"]\n", - "permeability_df_zh = res_df_zh.groupby(\"strategy\").sum() / 1000" + "res_df_zh.columns = ['strategy', 'zh->en', 'zh->en->zh'] \n", + "permeability_df_zh = res_df_zh.groupby('strategy').sum() / 1000" ] }, { @@ -918,25 +887,13 @@ "# as the original dataset for the Chinese corpus is quite large\n", "# we present strategy prevalence results directly\n", "\n", - "prevalence_zh = {\n", - " \"apologetic\": 0.6,\n", - " \"can_you\": 0.3,\n", - " \"could_you\": 0.0,\n", - " \"factuality\": 0.4,\n", - " \"gratitude\": 3.1,\n", - " \"greeting\": 0.0,\n", - " \"hedge\": 42.8,\n", - " \"indirect_btw\": 0.1,\n", - " \"praise\": 0.4,\n", - " \"please\": 25.4,\n", - " \"start_please\": 17.7,\n", - " \"start_so\": 0.7,\n", - " \"taboo\": 0.4,\n", - "}\n", - "\n", - "permeability_df_zh.index = [\n", - " f\"{name} ({prevalence_zh[en2zh[name]]:.1f}%)\" for name in permeability_df_zh.index\n", - "]" + "prevalence_zh = {'apologetic': 0.6, 'can_you': 0.3, 'could_you': 0.0, \n", + " 'factuality': 0.4,'gratitude': 3.1, 'greeting': 0.0, \n", + " 'hedge': 42.8, 'indirect_btw': 0.1,\n", + " 'praise': 0.4, 'please': 25.4, \n", + " 'start_please': 17.7, 'start_so': 0.7, 'taboo': 0.4}\n", + "\n", + "permeability_df_zh.index = [f\"{name} ({prevalence_zh[en2zh[name]]:.1f}%)\" for name in permeability_df_zh.index]" ] }, { diff --git a/examples/politeness-strategies/politeness_demo.ipynb b/examples/politeness-strategies/politeness_demo.ipynb index 2af08c30..6003920a 100755 --- a/examples/politeness-strategies/politeness_demo.ipynb +++ b/examples/politeness-strategies/politeness_demo.ipynb @@ -111,7 +111,6 @@ "outputs": [], "source": [ "from convokit import TextParser\n", - "\n", "parser = TextParser(verbosity=1000)" ] }, @@ -150,7 +149,6 @@ "outputs": [], "source": [ "from convokit import PolitenessStrategies\n", - "\n", "ps = PolitenessStrategies()" ] }, @@ -438,7 +436,7 @@ } ], "source": [ - "wiki_corpus.get_utterance(\"434044\").meta" + "wiki_corpus.get_utterance('434044').meta" ] }, { @@ -493,13 +491,11 @@ } ], "source": [ - "utt = wiki_corpus.get_utterance(\"434044\")\n", + "utt = wiki_corpus.get_utterance('434044')\n", "print(\"RAW TEXT: \" + utt.text + \"\\n\")\n", - "for (k, v), (k1, v2) in zip(\n", - " utt.meta[\"politeness_strategies\"].items(), utt.meta[\"politeness_markers\"].items()\n", - "):\n", + "for ((k,v),(k1,v2)) in zip(utt.meta[\"politeness_strategies\"].items(),utt.meta[\"politeness_markers\"].items()):\n", " if v != 0:\n", - " print(k[21 : len(k) - 2] + \" results:\")\n", + " print(k[21:len(k)-2] + \" results:\")\n", " print(\"Markers: \" + str(v2) + \"\\n\")" ] }, @@ -565,8 +561,8 @@ } ], "source": [ - "query = lambda x: x.meta[\"politeness_strategies\"][\"feature_politeness_==HASPOSITIVE==\"] == 1\n", - "positive_data = ps.summarize(wiki_corpus, query, plot=True)" + "query = lambda x : x.meta[\"politeness_strategies\"][\"feature_politeness_==HASPOSITIVE==\"] == 1\n", + "positive_data = ps.summarize(wiki_corpus, query, plot = True)" ] }, { @@ -612,9 +608,7 @@ "metadata": {}, "outputs": [], "source": [ - "binary_corpus = Corpus(\n", - " utterances=[utt for utt in wiki_corpus.iter_utterances() if utt.meta[\"Binary\"] != 0]\n", - ")" + "binary_corpus = Corpus(utterances=[utt for utt in wiki_corpus.iter_utterances() if utt.meta[\"Binary\"] != 0])" ] }, { @@ -660,11 +654,9 @@ } ], "source": [ - "clf_cv = Classifier(\n", - " obj_type=\"utterance\",\n", - " pred_feats=[\"politeness_strategies\"],\n", - " labeller=lambda utt: utt.meta[\"Binary\"] == 1,\n", - ")\n", + "clf_cv = Classifier(obj_type=\"utterance\", \n", + " pred_feats=[\"politeness_strategies\"], \n", + " labeller=lambda utt: utt.meta['Binary'] == 1)\n", "\n", "clf_cv.evaluate_with_cv(binary_corpus)" ] @@ -705,11 +697,9 @@ } ], "source": [ - "clf_split = Classifier(\n", - " obj_type=\"utterance\",\n", - " pred_feats=[\"politeness_strategies\"],\n", - " labeller=lambda utt: utt.meta[\"Binary\"] == 1,\n", - ")\n", + "clf_split = Classifier(obj_type=\"utterance\", \n", + " pred_feats=[\"politeness_strategies\"], \n", + " labeller=lambda utt: utt.meta['Binary'] == 1)\n", "\n", "clf_split.evaluate_with_train_test_split(binary_corpus)" ] @@ -736,17 +726,10 @@ ], "source": [ "test_ids = binary_corpus.get_utterance_ids()[-100:]\n", - "train_corpus = Corpus(\n", - " utterances=[utt for utt in binary_corpus.iter_utterances() if utt.id not in test_ids]\n", - ")\n", - "test_corpus = Corpus(\n", - " utterances=[utt for utt in binary_corpus.iter_utterances() if utt.id in test_ids]\n", - ")\n", - "print(\n", - " \"train size = {}, test size = {}\".format(\n", - " len(train_corpus.get_utterance_ids()), len(test_corpus.get_utterance_ids())\n", - " )\n", - ")" + "train_corpus = Corpus(utterances=[utt for utt in binary_corpus.iter_utterances() if utt.id not in test_ids])\n", + "test_corpus = Corpus(utterances=[utt for utt in binary_corpus.iter_utterances() if utt.id in test_ids])\n", + "print(\"train size = {}, test size = {}\".format(len(train_corpus.get_utterance_ids()),\n", + " len(test_corpus.get_utterance_ids())))" ] }, { @@ -780,11 +763,9 @@ } ], "source": [ - "clf = Classifier(\n", - " obj_type=\"utterance\",\n", - " pred_feats=[\"politeness_strategies\"],\n", - " labeller=lambda utt: utt.meta[\"Binary\"] == 1,\n", - ")\n", + "clf = Classifier(obj_type=\"utterance\", \n", + " pred_feats=[\"politeness_strategies\"], \n", + " labeller=lambda utt: utt.meta['Binary'] == 1)\n", "clf.fit(train_corpus)" ] }, @@ -982,7 +963,7 @@ "for i, idx in enumerate(test_ids[0:5]):\n", " print(i)\n", " test_utt = test_corpus.get_utterance(idx)\n", - " ypred, yprob = test_utt.meta[\"prediction\"], test_utt.meta[\"pred_score\"]\n", + " ypred, yprob = test_utt.meta['prediction'], test_utt.meta['pred_score']\n", " print(\"test utterance:\\n{}\".format(test_utt.text))\n", " print(\"------------------------\")\n", " print(\"Result: {}, probability estimates = {}\\n\".format(pred2label[ypred], yprob))" diff --git a/examples/politeness-strategies/short-politeness-example.ipynb b/examples/politeness-strategies/short-politeness-example.ipynb index ba6b5594..0749087a 100644 --- a/examples/politeness-strategies/short-politeness-example.ipynb +++ b/examples/politeness-strategies/short-politeness-example.ipynb @@ -41,7 +41,7 @@ } ], "source": [ - "train_corpus = Corpus(filename=download(\"wiki-politeness-annotated\"))" + "train_corpus = Corpus(filename=download('wiki-politeness-annotated'))" ] }, { @@ -58,7 +58,7 @@ } ], "source": [ - "test_corpus = Corpus(filename=download(\"reddit-corpus-small\"))" + "test_corpus = Corpus(filename=download('reddit-corpus-small'))" ] }, { @@ -86,7 +86,7 @@ ], "source": [ "parser = TextParser()\n", - "parser.transform(train_corpus)" + "parser.transform(train_corpus)\n" ] }, { @@ -151,11 +151,8 @@ "metadata": {}, "outputs": [], "source": [ - "clf = Classifier(\n", - " obj_type=\"utterance\",\n", - " pred_feats=[\"politeness_strategies\"],\n", - " labeller=lambda utt: utt.meta[\"Binary\"] == 1,\n", - ")" + "clf = Classifier(obj_type='utterance', pred_feats=['politeness_strategies'], \n", + " labeller=lambda utt: utt.meta['Binary']==1)" ] }, { @@ -204,8 +201,8 @@ "metadata": {}, "outputs": [], "source": [ - "aww_vals = clf.summarize(test_corpus, selector=lambda utt: utt.meta[\"subreddit\"] == \"aww\")\n", - "politics_vals = clf.summarize(test_corpus, selector=lambda utt: utt.meta[\"subreddit\"] == \"politics\")" + "aww_vals = clf.summarize(test_corpus, selector=lambda utt: utt.meta['subreddit']=='aww')\n", + "politics_vals = clf.summarize(test_corpus, selector=lambda utt: utt.meta['subreddit']=='politics')" ] }, { @@ -214,8 +211,8 @@ "metadata": {}, "outputs": [], "source": [ - "print(aww_vals[\"pred_score\"].mean())\n", - "print(politics_vals[\"pred_score\"].mean())" + "print(aww_vals['pred_score'].mean())\n", + "print(politics_vals['pred_score'].mean())" ] }, { diff --git a/examples/prompt-types/prompt-type-demo.ipynb b/examples/prompt-types/prompt-type-demo.ipynb index 333e7213..bdad1062 100644 --- a/examples/prompt-types/prompt-type-demo.ipynb +++ b/examples/prompt-types/prompt-type-demo.ipynb @@ -84,7 +84,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# ROOT_DIR = download('parliament-corpus', data_dir=DATA_DIR)\n", @@ -94,7 +94,7 @@ "# ROOT_DIR = ''\n", "\n", "corpus = convokit.Corpus(ROOT_DIR)\n", - "corpus.load_info(\"utterance\", [\"parsed\"])" + "corpus.load_info('utterance',['parsed'])" ] }, { @@ -109,8 +109,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -152,7 +151,7 @@ }, "outputs": [], "source": [ - "test_utt_id = \"1997-01-27a.4.0\"\n", + "test_utt_id = '1997-01-27a.4.0'\n", "utt = corpus.get_utterance(test_utt_id)" ] }, @@ -295,7 +294,7 @@ } ], "source": [ - "get_arcs = TextToArcs(\"arcs\", verbosity=VERBOSITY)\n", + "get_arcs = TextToArcs('arcs', verbosity=VERBOSITY)\n", "corpus = get_arcs.transform(corpus)" ] }, @@ -341,7 +340,7 @@ } ], "source": [ - "utt.retrieve_meta(\"arcs\")" + "utt.retrieve_meta('arcs')" ] }, { @@ -416,7 +415,7 @@ "outputs": [], "source": [ "def question_filter(utt):\n", - " return utt.retrieve_meta(\"is_question\")" + " return utt.retrieve_meta('is_question')" ] }, { @@ -430,26 +429,13 @@ }, "outputs": [], "source": [ - "q_arc_pipe = ConvokitPipeline(\n", - " [\n", - " (\"censor_nouns\", CensorNouns(\"parsed_censored\", verbosity=VERBOSITY)),\n", - " (\n", - " \"shallow_arcs\",\n", - " TextToArcs(\n", - " \"arcs_censored\", input_field=\"parsed_censored\", root_only=True, verbosity=VERBOSITY\n", - " ),\n", - " ),\n", - " (\n", - " \"question_sentence_filter\",\n", - " QuestionSentences(\n", - " \"question_arcs\",\n", - " input_field=\"arcs_censored\",\n", - " input_filter=question_filter,\n", - " verbosity=VERBOSITY,\n", - " ),\n", - " ),\n", - " ]\n", - ")" + "q_arc_pipe = ConvokitPipeline([\n", + " ('censor_nouns', CensorNouns('parsed_censored', verbosity=VERBOSITY)),\n", + " ('shallow_arcs', TextToArcs('arcs_censored', input_field='parsed_censored', \n", + " root_only=True, verbosity=VERBOSITY)),\n", + " ('question_sentence_filter', QuestionSentences('question_arcs', input_field='arcs_censored',\n", + " input_filter=question_filter, verbosity=VERBOSITY))\n", + "])" ] }, { @@ -641,7 +627,7 @@ } ], "source": [ - "utt.retrieve_meta(\"question_arcs\")" + "utt.retrieve_meta('question_arcs')" ] }, { @@ -662,7 +648,7 @@ }, "outputs": [], "source": [ - "test_utt_id_1 = \"2015-06-09c.1041.5\"\n", + "test_utt_id_1 = '2015-06-09c.1041.5'\n", "utt1 = corpus.get_utterance(test_utt_id_1)" ] }, @@ -703,7 +689,7 @@ } ], "source": [ - "utt1.retrieve_meta(\"question_arcs\")" + "utt1.retrieve_meta('question_arcs')" ] }, { @@ -747,9 +733,8 @@ }, "outputs": [], "source": [ - "pm_model = PhrasingMotifs(\n", - " \"motifs\", \"question_arcs\", min_support=100, fit_filter=question_filter, verbosity=VERBOSITY\n", - ")" + "pm_model = PhrasingMotifs('motifs','question_arcs',min_support=100,fit_filter=question_filter,\n", + " verbosity=VERBOSITY)" ] }, { @@ -1031,7 +1016,7 @@ } ], "source": [ - "utt.retrieve_meta(\"motifs\")" + "utt.retrieve_meta('motifs')" ] }, { @@ -1058,7 +1043,7 @@ } ], "source": [ - "utt.retrieve_meta(\"motifs__sink\")" + "utt.retrieve_meta('motifs__sink')" ] }, { @@ -1130,7 +1115,7 @@ } ], "source": [ - "pm_model.dump_model(os.path.join(ROOT_DIR, \"pm_model\"))" + "pm_model.dump_model(os.path.join(ROOT_DIR, 'pm_model'))" ] }, { @@ -1154,7 +1139,7 @@ } ], "source": [ - "pm_model_dir = os.path.join(ROOT_DIR, \"pm_model\")\n", + "pm_model_dir = os.path.join(ROOT_DIR, 'pm_model')\n", "!ls $pm_model_dir" ] }, @@ -1176,9 +1161,8 @@ }, "outputs": [], "source": [ - "new_pm_model = PhrasingMotifs(\n", - " \"motifs_new\", \"question_arcs\", min_support=100, fit_filter=question_filter, verbosity=VERBOSITY\n", - ")" + "new_pm_model = PhrasingMotifs('motifs_new','question_arcs',min_support=100,fit_filter=question_filter,\n", + " verbosity=VERBOSITY)" ] }, { @@ -1205,7 +1189,7 @@ } ], "source": [ - "new_pm_model.load_model(os.path.join(ROOT_DIR, \"pm_model\"))" + "new_pm_model.load_model(os.path.join(ROOT_DIR, 'pm_model'))" ] }, { @@ -1253,7 +1237,7 @@ } ], "source": [ - "utt.retrieve_meta(\"motifs__sink\")" + "utt.retrieve_meta('motifs__sink')" ] }, { @@ -1280,7 +1264,7 @@ } ], "source": [ - "utt.retrieve_meta(\"motifs_new__sink\")" + "utt.retrieve_meta('motifs_new__sink')" ] }, { @@ -1422,24 +1406,14 @@ } ], "source": [ - "q_arc_pipe_full = ConvokitPipeline(\n", - " [\n", - " (\n", - " \"shallow_arcs_full\",\n", - " TextToArcs(\"root_arcs\", input_field=\"parsed\", root_only=True, verbosity=VERBOSITY),\n", - " ),\n", - " (\n", - " \"question_sentence_filter\",\n", - " QuestionSentences(\n", - " \"question_arcs_full\",\n", - " input_field=\"root_arcs\",\n", - " input_filter=question_filter,\n", - " verbosity=VERBOSITY,\n", - " ),\n", - " ),\n", - " ]\n", - ")\n", - "corpus = q_arc_pipe_full.transform(corpus)" + "q_arc_pipe_full = ConvokitPipeline([\n", + " ('shallow_arcs_full', TextToArcs('root_arcs', input_field='parsed', \n", + " root_only=True, verbosity=VERBOSITY)),\n", + " ('question_sentence_filter', QuestionSentences('question_arcs_full', input_field='root_arcs',\n", + " input_filter=question_filter, verbosity=VERBOSITY)),\n", + "\n", + "])\n", + "corpus = q_arc_pipe_full.transform(corpus)\n" ] }, { @@ -1563,13 +1537,9 @@ } ], "source": [ - "noun_pm_model = PhrasingMotifs(\n", - " \"motifs_full\",\n", - " \"question_arcs_full\",\n", - " min_support=100,\n", - " fit_filter=question_filter,\n", - " verbosity=VERBOSITY,\n", - ")\n", + "noun_pm_model = PhrasingMotifs('motifs_full','question_arcs_full',min_support=100,\n", + " fit_filter=question_filter, \n", + " verbosity=VERBOSITY)\n", "noun_pm_model.fit(corpus)" ] }, @@ -1666,7 +1636,7 @@ } ], "source": [ - "utt.retrieve_meta(\"motifs__sink\")" + "utt.retrieve_meta('motifs__sink')" ] }, { @@ -1688,7 +1658,7 @@ } ], "source": [ - "utt.retrieve_meta(\"motifs_full__sink\")" + "utt.retrieve_meta('motifs_full__sink')" ] }, { @@ -1763,11 +1733,9 @@ "outputs": [], "source": [ "def question_filter(utt):\n", - " return utt.retrieve_meta(\"is_question\")\n", - "\n", - "\n", + " return utt.retrieve_meta('is_question')\n", "def response_filter(utt):\n", - " return (not utt.retrieve_meta(\"is_question\")) and (utt.reply_to is not None)" + " return (not utt.retrieve_meta('is_question')) and (utt.reply_to is not None)" ] }, { @@ -1796,15 +1764,10 @@ }, "outputs": [], "source": [ - "pt = PromptTypes(\n", - " n_types=8,\n", - " prompt_field=\"motifs\",\n", - " reference_field=\"arcs_censored\",\n", - " prompt_transform_field=\"motifs__sink\",\n", - " output_field=\"prompt_types\",\n", - " random_state=1000,\n", - " verbosity=1,\n", - ")" + "pt = PromptTypes(n_types=8, prompt_field='motifs', reference_field='arcs_censored', \n", + " prompt_transform_field='motifs__sink',\n", + " output_field='prompt_types',\n", + " random_state=1000, verbosity=1)" ] }, { @@ -3325,7 +3288,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types__prompt_repr\")" + "utt.retrieve_meta('prompt_types__prompt_repr')" ] }, { @@ -3359,7 +3322,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types__prompt_dists__8\")" + "utt.retrieve_meta('prompt_types__prompt_dists__8')" ] }, { @@ -3386,7 +3349,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types__prompt_type_dist__8\")" + "utt.retrieve_meta('prompt_types__prompt_type_dist__8')" ] }, { @@ -3595,7 +3558,7 @@ } ], "source": [ - "pt.summarize(corpus, type_ids=utt.retrieve_meta(\"prompt_types__prompt_type__8\"), k=15)" + "pt.summarize(corpus,type_ids=utt.retrieve_meta('prompt_types__prompt_type__8'), k=15)" ] }, { @@ -3677,7 +3640,7 @@ } ], "source": [ - "utt1.retrieve_meta(\"motifs__sink\")" + "utt1.retrieve_meta('motifs__sink')" ] }, { @@ -3697,7 +3660,7 @@ } ], "source": [ - "utt1.retrieve_meta(\"prompt_types__prompt_type__8\")" + "utt1.retrieve_meta('prompt_types__prompt_type__8')" ] }, { @@ -3882,7 +3845,7 @@ } ], "source": [ - "pt.summarize(corpus, type_ids=utt1.retrieve_meta(\"prompt_types__prompt_type__8\"), k=15)" + "pt.summarize(corpus,type_ids=utt1.retrieve_meta('prompt_types__prompt_type__8'), k=15)" ] }, { @@ -3910,7 +3873,7 @@ }, "outputs": [], "source": [ - "utt2 = corpus.get_utterance(\"1987-03-04a.857.5\")" + "utt2 = corpus.get_utterance('1987-03-04a.857.5')" ] }, { @@ -3952,7 +3915,7 @@ } ], "source": [ - "utt2.retrieve_meta(\"motifs__sink\")" + "utt2.retrieve_meta('motifs__sink')" ] }, { @@ -3972,7 +3935,7 @@ } ], "source": [ - "utt2.retrieve_meta(\"prompt_types__prompt_type__8\")" + "utt2.retrieve_meta('prompt_types__prompt_type__8')" ] }, { @@ -4176,7 +4139,7 @@ } ], "source": [ - "pt.summarize(corpus, type_ids=utt2.retrieve_meta(\"prompt_types__prompt_type__8\"), k=15)" + "pt.summarize(corpus,type_ids=utt2.retrieve_meta('prompt_types__prompt_type__8'), k=15)" ] }, { @@ -4211,7 +4174,7 @@ }, "outputs": [], "source": [ - "response_utt = corpus.get_utterance(\"1997-01-27a.4.1\")" + "response_utt = corpus.get_utterance('1997-01-27a.4.1')" ] }, { @@ -4278,7 +4241,7 @@ } ], "source": [ - "response_utt.retrieve_meta(\"prompt_types__reference_type__8\")" + "response_utt.retrieve_meta('prompt_types__reference_type__8')" ] }, { @@ -4471,9 +4434,8 @@ } ], "source": [ - "corpus.get_vectors(\n", - " \"prompt_types__prompt_repr\", ids=[utt.id, utt1.id, utt2.id], as_dataframe=True\n", - ").head()" + "corpus.get_vectors('prompt_types__prompt_repr', ids=[utt.id, utt1.id, utt2.id], \n", + " as_dataframe=True).head()" ] }, { @@ -4575,9 +4537,8 @@ } ], "source": [ - "corpus.get_vectors(\n", - " \"prompt_types__prompt_dists__8\", ids=[utt.id, utt1.id, utt2.id], as_dataframe=True\n", - ")" + "corpus.get_vectors('prompt_types__prompt_dists__8', ids=[utt.id, utt1.id, utt2.id], \n", + " as_dataframe=True)" ] }, { @@ -4606,7 +4567,7 @@ }, "outputs": [], "source": [ - "corpus.dump_vectors(\"prompt_types__prompt_repr\")" + "corpus.dump_vectors('prompt_types__prompt_repr')\n" ] }, { @@ -4620,7 +4581,7 @@ }, "outputs": [], "source": [ - "corpus.dump_vectors(\"prompt_types__prompt_dists__8\")" + "corpus.dump_vectors('prompt_types__prompt_dists__8')\n" ] }, { @@ -4636,9 +4597,8 @@ "metadata": {}, "outputs": [], "source": [ - "new_corpus = convokit.Corpus(\n", - " ROOT_DIR, preload_vectors=[\"prompt_types__prompt_repr\", \"prompt_types__prompt_dists__8\"]\n", - ")" + "new_corpus = convokit.Corpus(ROOT_DIR, preload_vectors=['prompt_types__prompt_repr',\n", + " 'prompt_types__prompt_dists__8'])\n" ] }, { @@ -4798,9 +4758,8 @@ } ], "source": [ - "new_corpus.get_vectors(\n", - " \"prompt_types__prompt_repr\", ids=[utt.id, utt1.id, utt2.id], as_dataframe=True\n", - ")" + "new_corpus.get_vectors('prompt_types__prompt_repr', ids=[utt.id, utt1.id, utt2.id],\n", + " as_dataframe=True)" ] }, { @@ -4895,9 +4854,8 @@ } ], "source": [ - "new_corpus.get_vectors(\n", - " \"prompt_types__prompt_dists__8\", ids=[utt.id, utt1.id, utt2.id], as_dataframe=True\n", - ")" + "new_corpus.get_vectors('prompt_types__prompt_dists__8', ids=[utt.id, utt1.id, utt2.id],\n", + " as_dataframe=True)" ] }, { @@ -4967,7 +4925,7 @@ } ], "source": [ - "pt.dump_model(os.path.join(ROOT_DIR, \"pt_model\"))" + "pt.dump_model(os.path.join(ROOT_DIR, 'pt_model'))" ] }, { @@ -4999,7 +4957,7 @@ } ], "source": [ - "pt_model_dir = os.path.join(ROOT_DIR, \"pt_model\")\n", + "pt_model_dir = os.path.join(ROOT_DIR, 'pt_model')\n", "!ls $pt_model_dir" ] }, @@ -5021,16 +4979,11 @@ }, "outputs": [], "source": [ - "new_pt = PromptTypes(\n", - " prompt_field=\"motifs\",\n", - " reference_field=\"arcs_censored\",\n", - " prompt_transform_field=\"motifs__sink\",\n", - " output_field=\"prompt_types_new\",\n", - " prompt__tfidf_min_df=100,\n", - " reference__tfidf_min_df=100,\n", - " random_state=1000,\n", - " verbosity=1,\n", - ")" + "new_pt = PromptTypes(prompt_field='motifs', reference_field='arcs_censored', \n", + " prompt_transform_field='motifs__sink',\n", + " output_field='prompt_types_new', prompt__tfidf_min_df=100,\n", + " reference__tfidf_min_df=100, \n", + " random_state=1000, verbosity=1)" ] }, { @@ -5083,7 +5036,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types_new__prompt_type__8\")" + "utt.retrieve_meta('prompt_types_new__prompt_type__8')" ] }, { @@ -5739,17 +5692,11 @@ }, "outputs": [], "source": [ - "pt_arcs = PromptTypes(\n", - " prompt_field=\"arcs_censored\",\n", - " reference_field=\"arcs_censored\",\n", - " prompt_transform_field=\"arcs_censored\",\n", - " output_field=\"prompt_types_arcs\",\n", - " prompt__tfidf_min_df=100,\n", - " reference__tfidf_min_df=100,\n", - " n_types=8,\n", - " random_state=1000,\n", - " verbosity=1,\n", - ")" + "pt_arcs = PromptTypes(prompt_field='arcs_censored', reference_field='arcs_censored', \n", + " prompt_transform_field='arcs_censored',\n", + " output_field='prompt_types_arcs', prompt__tfidf_min_df=100,\n", + " reference__tfidf_min_df=100, n_types=8,\n", + " random_state=1000, verbosity=1)" ] }, { diff --git a/examples/prompt-types/prompt-type-wrapper-demo.ipynb b/examples/prompt-types/prompt-type-wrapper-demo.ipynb index b5f367f7..e6307f0e 100644 --- a/examples/prompt-types/prompt-type-wrapper-demo.ipynb +++ b/examples/prompt-types/prompt-type-wrapper-demo.ipynb @@ -57,8 +57,7 @@ "outputs": [], "source": [ "import warnings\n", - "\n", - "warnings.filterwarnings(\"ignore\")" + "warnings.filterwarnings('ignore')" ] }, { @@ -79,7 +78,7 @@ }, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# ROOT_DIR = download('parliament-corpus', data_dir=DATA_DIR)\n", @@ -89,7 +88,7 @@ "# ROOT_DIR = ''\n", "\n", "corpus = Corpus(ROOT_DIR)\n", - "corpus.load_info(\"utterance\", [\"parsed\"])" + "corpus.load_info('utterance',['parsed'])" ] }, { @@ -124,7 +123,7 @@ }, "outputs": [], "source": [ - "test_utt_id = \"1997-01-27a.4.0\"\n", + "test_utt_id = '1997-01-27a.4.0'\n", "utt = corpus.get_utterance(test_utt_id)" ] }, @@ -1933,7 +1932,7 @@ } ], "source": [ - "utt.retrieve_meta(\"motifs\")" + "utt.retrieve_meta('motifs')" ] }, { @@ -1983,7 +1982,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types__prompt_repr\")" + "utt.retrieve_meta('prompt_types__prompt_repr')" ] }, { @@ -2017,7 +2016,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types__prompt_dists__8\")" + "utt.retrieve_meta('prompt_types__prompt_dists__8')" ] }, { @@ -2044,7 +2043,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types__prompt_type__8\")" + "utt.retrieve_meta('prompt_types__prompt_type__8')" ] }, { @@ -2064,7 +2063,7 @@ } ], "source": [ - "utt.retrieve_meta(\"prompt_types__prompt_type_dist__8\")" + "utt.retrieve_meta('prompt_types__prompt_type_dist__8')" ] }, { @@ -2340,7 +2339,7 @@ }, "outputs": [], "source": [ - "utt1 = corpus.get_utterance(\"1987-03-04a.857.5\")" + "utt1 = corpus.get_utterance('1987-03-04a.857.5')" ] }, { @@ -2362,7 +2361,7 @@ } ], "source": [ - "utt1.retrieve_meta(\"motifs\")" + "utt1.retrieve_meta('motifs')" ] }, { @@ -2402,7 +2401,7 @@ } ], "source": [ - "utt1.retrieve_meta(\"prompt_types__prompt_type__8\")" + "utt1.retrieve_meta('prompt_types__prompt_type__8')" ] }, { @@ -2435,7 +2434,7 @@ }, "outputs": [], "source": [ - "str_utt = pt.transform_utterance(\"Do you share my distaste for cockroaches?\")" + "str_utt = pt.transform_utterance('Do you share my distaste for cockroaches?')" ] }, { @@ -2455,7 +2454,7 @@ } ], "source": [ - "str_utt.retrieve_meta(\"motifs\")" + "str_utt.retrieve_meta('motifs')" ] }, { @@ -2475,7 +2474,7 @@ } ], "source": [ - "str_utt.retrieve_meta(\"prompt_types__prompt_type__8\")" + "str_utt.retrieve_meta('prompt_types__prompt_type__8')" ] }, { @@ -2543,7 +2542,7 @@ } ], "source": [ - "pt.dump_model(os.path.join(ROOT_DIR, \"full_pipe_models\"))" + "pt.dump_model(os.path.join(ROOT_DIR, 'full_pipe_models'))" ] }, { @@ -2564,9 +2563,8 @@ }, "outputs": [], "source": [ - "new_pt = PromptTypeWrapper(\n", - " output_field=\"prompt_types_new\", min_support=100, svd__n_components=25, random_state=1000\n", - ")" + "new_pt = PromptTypeWrapper(output_field='prompt_types_new',\n", + " min_support=100, svd__n_components=25, random_state=1000)" ] }, { @@ -2589,7 +2587,7 @@ } ], "source": [ - "new_pt.load_model(os.path.join(ROOT_DIR, \"full_pipe_models\"))" + "new_pt.load_model(os.path.join(ROOT_DIR, 'full_pipe_models'))" ] }, { @@ -2606,7 +2604,7 @@ } ], "source": [ - "pt_model_dir = os.path.join(ROOT_DIR, \"full_pipe_models\")\n", + "pt_model_dir = os.path.join(ROOT_DIR, 'full_pipe_models')\n", "!ls $pt_model_dir" ] }, @@ -2621,7 +2619,7 @@ }, "outputs": [], "source": [ - "new_str_utt = new_pt.transform_utterance(\"Do you share my distaste for cockroaches?\")" + "new_str_utt = new_pt.transform_utterance('Do you share my distaste for cockroaches?')" ] }, { @@ -2641,7 +2639,7 @@ } ], "source": [ - "new_str_utt.retrieve_meta(\"motifs\")" + "new_str_utt.retrieve_meta('motifs')" ] }, { @@ -2661,7 +2659,7 @@ } ], "source": [ - "new_str_utt.retrieve_meta(\"prompt_types_new__prompt_type__8\")" + "new_str_utt.retrieve_meta('prompt_types_new__prompt_type__8')" ] }, { diff --git a/examples/sigdial-demo.ipynb b/examples/sigdial-demo.ipynb index 31ff4225..cba90dc8 100644 --- a/examples/sigdial-demo.ipynb +++ b/examples/sigdial-demo.ipynb @@ -47,7 +47,7 @@ } ], "source": [ - "movie_corpus = Corpus(download(\"movie-corpus\"))" + "movie_corpus = Corpus(download('movie-corpus'))" ] }, { @@ -106,8 +106,8 @@ "outputs": [], "source": [ "for convo in movie_corpus.iter_conversations():\n", - " genders = set([speaker.meta[\"gender\"] for speaker in convo.iter_speakers()])\n", - " convo.meta[\"mixed\"] = \"M\" in genders and \"F\" in genders" + " genders = set([speaker.meta['gender'] for speaker in convo.iter_speakers()])\n", + " convo.meta['mixed'] = 'M' in genders and 'F' in genders" ] }, { @@ -570,13 +570,10 @@ "source": [ "fw = FightingWords()\n", "\n", - "fw.fit(\n", - " movie_corpus,\n", - " class1_func=lambda utt: utt.get_conversation().meta[\"mixed\"],\n", - " class2_func=lambda utt: not utt.get_conversation().meta[\"mixed\"],\n", - ")\n", + "fw.fit(movie_corpus, class1_func=lambda utt: utt.get_conversation().meta['mixed'],\n", + " class2_func=lambda utt: not utt.get_conversation().meta['mixed'])\n", "\n", - "fw.summarize(movie_corpus, plot=True, class1_name=\"mixed\", class2_name=\"single\")" + "fw.summarize(movie_corpus, plot=True, class1_name='mixed', class2_name='single')" ] }, { @@ -610,4 +607,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/examples/speaker-convo-attributes/speaker-convo-diversity-demo.ipynb b/examples/speaker-convo-attributes/speaker-convo-diversity-demo.ipynb index 205e99a5..3fd29723 100644 --- a/examples/speaker-convo-attributes/speaker-convo-diversity-demo.ipynb +++ b/examples/speaker-convo-attributes/speaker-convo-diversity-demo.ipynb @@ -93,13 +93,9 @@ "metadata": {}, "outputs": [], "source": [ - "SPEAKER_BLACKLIST = [\"[deleted]\", \"DeltaBot\", \"AutoModerator\"]\n", - "\n", - "\n", + "SPEAKER_BLACKLIST = ['[deleted]', 'DeltaBot','AutoModerator']\n", "def utterance_is_valid(utterance):\n", - " return (utterance.id != utterance.conversation_id) and (\n", - " utterance.speaker.id not in SPEAKER_BLACKLIST\n", - " )" + " return (utterance.id != utterance.conversation_id) and (utterance.speaker.id not in SPEAKER_BLACKLIST)" ] }, { @@ -135,7 +131,7 @@ } ], "source": [ - "corpus.get_speaker(\"ThatBelligerentSloth\").meta[\"n_convos\"]" + "corpus.get_speaker('ThatBelligerentSloth').meta['n_convos']" ] }, { @@ -155,7 +151,7 @@ } ], "source": [ - "corpus.get_speaker(\"ThatBelligerentSloth\").meta[\"start_time\"]" + "corpus.get_speaker('ThatBelligerentSloth').meta['start_time']" ] }, { @@ -185,7 +181,7 @@ } ], "source": [ - "corpus.get_speaker(\"ThatBelligerentSloth\").meta[\"conversations\"][\"2wm22t\"]" + "corpus.get_speaker('ThatBelligerentSloth').meta['conversations']['2wm22t']" ] }, { @@ -210,7 +206,7 @@ "metadata": {}, "outputs": [], "source": [ - "speaker_activities = corpus.get_attribute_table(\"speaker\", [\"n_convos\"])" + "speaker_activities = corpus.get_attribute_table('speaker',['n_convos'])" ] }, { @@ -312,7 +308,7 @@ } ], "source": [ - "speaker_activities.sort_values(\"n_convos\", ascending=False).head(10)" + "speaker_activities.sort_values('n_convos', ascending=False).head(10)" ] }, { @@ -321,7 +317,7 @@ "metadata": {}, "outputs": [], "source": [ - "top_speakers = speaker_activities.sort_values(\"n_convos\", ascending=False).head(100).index" + "top_speakers = speaker_activities.sort_values('n_convos', ascending=False).head(100).index" ] }, { @@ -933,7 +929,7 @@ } ], "source": [ - "tokenizer = TextParser(mode=\"tokenize\", output_field=\"tokens\", verbosity=1000)\n", + "tokenizer = TextParser(mode='tokenize', output_field='tokens', verbosity=1000)\n", "subset_corpus = tokenizer.transform(subset_corpus)" ] }, @@ -1057,7 +1053,7 @@ } ], "source": [ - "subset_corpus.get_utterance(\"cos7k4p\").retrieve_meta(\"tokens\")" + "subset_corpus.get_utterance('cos7k4p').retrieve_meta('tokens')" ] }, { @@ -1110,13 +1106,9 @@ } ], "source": [ - "wordcounter = TextProcessor(\n", - " input_field=\"tokens\",\n", - " output_field=\"wordcount\",\n", - " proc_fn=lambda sents: sum(len(sent[\"toks\"]) for sent in sents),\n", - " verbosity=25000,\n", - ")\n", - "subset_corpus = wordcounter.transform(subset_corpus)" + "wordcounter = TextProcessor(input_field='tokens', output_field='wordcount', \n", + " proc_fn=lambda sents: sum(len(sent['toks']) for sent in sents), verbosity=25000)\n", + "subset_corpus = wordcounter.transform(subset_corpus) " ] }, { @@ -1136,7 +1128,7 @@ } ], "source": [ - "subset_corpus.get_utterance(\"cos7k4p\").retrieve_meta(\"wordcount\")" + "subset_corpus.get_utterance('cos7k4p').retrieve_meta('wordcount')" ] }, { @@ -1156,7 +1148,7 @@ } ], "source": [ - "subset_corpus.get_utterance(\"cos8ffz\").retrieve_meta(\"wordcount\")" + "subset_corpus.get_utterance('cos8ffz').retrieve_meta('wordcount')" ] }, { @@ -1190,9 +1182,7 @@ "metadata": {}, "outputs": [], "source": [ - "sc_wordcount = convokit.speaker_convo_helpers.speaker_convo_attrs.SpeakerConvoAttrs(\n", - " \"wordcount\", agg_fn=np.mean\n", - ")\n", + "sc_wordcount = convokit.speaker_convo_helpers.speaker_convo_attrs.SpeakerConvoAttrs('wordcount', agg_fn=np.mean)\n", "subset_corpus = sc_wordcount.transform(subset_corpus)" ] }, @@ -1356,7 +1346,7 @@ } ], "source": [ - "subset_corpus.get_speaker(\"ThatBelligerentSloth\").meta[\"conversations\"][\"2wm22t\"]" + "subset_corpus.get_speaker('ThatBelligerentSloth').meta['conversations']['2wm22t']" ] }, { @@ -1374,9 +1364,8 @@ "metadata": {}, "outputs": [], "source": [ - "speaker_convo_len_df = subset_corpus.get_full_attribute_table(\n", - " speaker_convo_attrs=[\"wordcount\", \"n_utterances\"], speaker_attrs=[\"n_convos\"]\n", - ")" + "speaker_convo_len_df = subset_corpus.get_full_attribute_table(speaker_convo_attrs=['wordcount','n_utterances'],\n", + " speaker_attrs=['n_convos'])" ] }, { @@ -1516,8 +1505,10 @@ "outputs": [], "source": [ "def get_lifestage_attributes(attr_df, attr, lifestage_size, agg_fn=np.mean):\n", - " aggs = attr_df.groupby([\"speaker\", attr_df.convo_idx // lifestage_size])[attr].agg(agg_fn)\n", - " aggs = aggs.reset_index().pivot(index=\"speaker\", columns=\"convo_idx\", values=attr)\n", + " aggs = attr_df.groupby(['speaker', attr_df.convo_idx // lifestage_size])\\\n", + " [attr].agg(agg_fn)\n", + " aggs = aggs.reset_index().pivot(index='speaker', columns='convo_idx',\n", + " values=attr)\n", " return aggs" ] }, @@ -1534,9 +1525,8 @@ "metadata": {}, "outputs": [], "source": [ - "subset = speaker_convo_len_df[\n", - " (speaker_convo_len_df.n_convos__speaker >= 20) & (speaker_convo_len_df.convo_idx < 20)\n", - "]" + "subset = speaker_convo_len_df[(speaker_convo_len_df.n_convos__speaker >= 20)\n", + " & (speaker_convo_len_df.convo_idx < 20)]" ] }, { @@ -1545,7 +1535,7 @@ "metadata": {}, "outputs": [], "source": [ - "stage_wc_df = get_lifestage_attributes(subset, \"wordcount\", 10)" + "stage_wc_df = get_lifestage_attributes(subset, 'wordcount', 10)" ] }, { @@ -1672,14 +1662,15 @@ "source": [ "def print_lifestage_comparisons(stage_df):\n", " for i in range(stage_df.columns.max()):\n", - " mask = stage_df[i + 1].notnull() & stage_df[i].notnull()\n", - " c1 = stage_df[i + 1][mask]\n", + " \n", + " mask = stage_df[i+1].notnull() & stage_df[i].notnull()\n", + " c1 = stage_df[i+1][mask]\n", " c0 = stage_df[i][mask]\n", - "\n", - " print(\"stages %d vs %d (%d speakers)\" % (i + 1, i, sum(mask)))\n", + " \n", + " print('stages %d vs %d (%d speakers)' % (i + 1, i, sum(mask)))\n", " n_more = sum(c1 > c0)\n", " n = sum(c1 != c0)\n", - " print(\"\\tprop more: %.3f, binom_p=%.2f\" % (n_more / n, stats.binom_test(n_more, n)))" + " print('\\tprop more: %.3f, binom_p=%.2f' % (n_more/n, stats.binom_test(n_more,n)))" ] }, { @@ -1706,7 +1697,7 @@ "metadata": {}, "outputs": [], "source": [ - "stage_convo_len_df = get_lifestage_attributes(subset, \"n_utterances\", 10)" + "stage_convo_len_df = get_lifestage_attributes(subset, 'n_utterances', 10)" ] }, { @@ -1796,15 +1787,8 @@ "metadata": {}, "outputs": [], "source": [ - "scd = convokit.SpeakerConvoDiversityWrapper(\n", - " lifestage_size=10,\n", - " max_exp=20,\n", - " sample_size=300,\n", - " min_n_utterances=1,\n", - " n_iters=50,\n", - " cohort_delta=60 * 60 * 24 * 30 * 2,\n", - " verbosity=100,\n", - ")" + "scd = convokit.SpeakerConvoDiversityWrapper(lifestage_size=10, max_exp=20,\n", + " sample_size=300, min_n_utterances=1, n_iters=50, cohort_delta=60*60*24*30*2, verbosity=100)" ] }, { @@ -1862,9 +1846,7 @@ "metadata": {}, "outputs": [], "source": [ - "div_df = subset_corpus.get_full_attribute_table(\n", - " [\"div__self\", \"div__other\", \"div__adj\", \"tokens\", \"n_utterances\"], [\"n_convos\"]\n", - ")" + "div_df = subset_corpus.get_full_attribute_table(['div__self','div__other','div__adj', 'tokens', 'n_utterances'], ['n_convos'])" ] }, { @@ -1941,11 +1923,11 @@ } ], "source": [ - "for attr in [\"div__self\", \"div__other\", \"div__adj\"]:\n", + "for attr in ['div__self','div__other','div__adj']:\n", " print(attr)\n", " stage_df = get_lifestage_attributes(div_df, attr, 10)\n", " print_lifestage_comparisons(stage_df)\n", - " print(\"\\n\\n===\")" + " print('\\n\\n===')" ] }, { diff --git a/examples/text-processing/text_preprocessing_demo.ipynb b/examples/text-processing/text_preprocessing_demo.ipynb index 8cb053c7..cdcbb5b8 100644 --- a/examples/text-processing/text_preprocessing_demo.ipynb +++ b/examples/text-processing/text_preprocessing_demo.ipynb @@ -38,18 +38,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 67, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/seanzhangkx/opt/anaconda3/envs/convokit_git/lib/python3.9/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], + "outputs": [], "source": [ "import convokit\n", "from convokit import download, Speaker" @@ -57,11 +48,11 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ - "# OPTION 1: DOWNLOAD CORPUS\n", + "# OPTION 1: DOWNLOAD CORPUS \n", "# UNCOMMENT THESE LINES TO DOWNLOAD CORPUS\n", "# DATA_DIR = ''\n", "# ROOT_DIR = download('tennis-corpus')\n", @@ -75,7 +66,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -94,11 +85,11 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ - "# SET YOUR OWN OUTPUT DIRECTORY HERE.\n", + "# SET YOUR OWN OUTPUT DIRECTORY HERE. \n", "# OUT_DIR = ''" ] }, @@ -111,17 +102,17 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ - "test_utt_id = \"1681_14.a\"\n", + "test_utt_id = '1681_14.a'\n", "utt = corpus.get_utterance(test_utt_id)" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -130,7 +121,7 @@ "\"Yeah, but many friends went with me, Japanese guy. So I wasn't -- I wasn't like homesick. But now sometimes I get homesick.\"" ] }, - "execution_count": 9, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -148,16 +139,16 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "ConvoKitMeta({'is_answer': True, 'is_question': False, 'pair_idx': '1681_14'})" + "{'is_answer': True, 'is_question': False, 'pair_idx': '1681_14'}" ] }, - "execution_count": 10, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -206,7 +197,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ @@ -229,12 +220,12 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "def preprocess_text(text):\n", - " text = text.replace(\" -- \", \" \")\n", + " text = text.replace(' -- ', ' ')\n", " return text" ] }, @@ -252,11 +243,11 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ - "prep = TextProcessor(proc_fn=preprocess_text, output_field=\"clean_text\")\n", + "prep = TextProcessor(proc_fn=preprocess_text, output_field='clean_text')\n", "corpus = prep.transform(corpus)" ] }, @@ -269,7 +260,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 33, "metadata": {}, "outputs": [ { @@ -278,13 +269,13 @@ "\"Yeah, but many friends went with me, Japanese guy. So I wasn't I wasn't like homesick. But now sometimes I get homesick.\"" ] }, - "execution_count": 14, + "execution_count": 33, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"clean_text\")" + "utt.retrieve_meta('clean_text')" ] }, { @@ -330,7 +321,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 34, "metadata": {}, "outputs": [], "source": [ @@ -339,16 +330,16 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 35, "metadata": {}, "outputs": [], "source": [ - "parser = TextParser(input_field=\"clean_text\", verbosity=50)" + "parser = TextParser(input_field='clean_text', verbosity=50)" ] }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 36, "metadata": {}, "outputs": [ { @@ -395,16 +386,16 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 37, "metadata": {}, "outputs": [], "source": [ - "test_parse = utt.retrieve_meta(\"parsed\")" + "test_parse = utt.retrieve_meta('parsed')" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 38, "metadata": {}, "outputs": [ { @@ -415,20 +406,17 @@ " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 5, 'dn': []},\n", " {'tok': 'but', 'tag': 'CC', 'dep': 'cc', 'up': 5, 'dn': []},\n", " {'tok': 'many', 'tag': 'JJ', 'dep': 'amod', 'up': 4, 'dn': []},\n", - " {'tok': 'friends', 'tag': 'NNS', 'dep': 'nsubj', 'up': 5, 'dn': [3]},\n", - " {'tok': 'went',\n", - " 'tag': 'VBD',\n", - " 'dep': 'ROOT',\n", - " 'dn': [0, 1, 2, 4, 6, 8, 10, 11]},\n", + " {'tok': 'friends', 'tag': 'NNS', 'dep': 'nsubj', 'up': 5, 'dn': [3, 10]},\n", + " {'tok': 'went', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [0, 1, 2, 4, 6, 8, 11]},\n", " {'tok': 'with', 'tag': 'IN', 'dep': 'prep', 'up': 5, 'dn': [7]},\n", " {'tok': 'me', 'tag': 'PRP', 'dep': 'pobj', 'up': 6, 'dn': []},\n", " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 5, 'dn': []},\n", " {'tok': 'Japanese', 'tag': 'JJ', 'dep': 'amod', 'up': 10, 'dn': []},\n", - " {'tok': 'guy', 'tag': 'NN', 'dep': 'npadvmod', 'up': 5, 'dn': [9]},\n", + " {'tok': 'guy', 'tag': 'NN', 'dep': 'appos', 'up': 4, 'dn': [9]},\n", " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 5, 'dn': []}]}" ] }, - "execution_count": 19, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -446,17 +434,17 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 39, "metadata": {}, "outputs": [], "source": [ - "texttagger = TextParser(output_field=\"tagged\", input_field=\"clean_text\", mode=\"tag\")\n", + "texttagger = TextParser(output_field='tagged', input_field='clean_text', mode='tag')\n", "corpus = texttagger.transform(corpus)" ] }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 41, "metadata": {}, "outputs": [ { @@ -476,13 +464,13 @@ " {'tok': '.', 'tag': '.'}]}" ] }, - "execution_count": 21, + "execution_count": 41, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"tagged\")[0]" + "utt.retrieve_meta('tagged')[0]" ] }, { @@ -508,7 +496,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 42, "metadata": {}, "outputs": [ { @@ -517,7 +505,7 @@ "['is_answer', 'is_question', 'pair_idx', 'clean_text', 'parsed', 'tagged']" ] }, - "execution_count": 22, + "execution_count": 42, "metadata": {}, "output_type": "execute_result" } @@ -544,15 +532,12 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 55, "metadata": {}, "outputs": [], "source": [ - "corpus.dump(\n", - " os.path.basename(OUT_DIR),\n", - " base_path=os.path.dirname(OUT_DIR),\n", - " fields_to_skip={\"utterance\": [\"parsed\", \"tagged\", \"clean_text\"]},\n", - ")" + "corpus.dump(os.path.basename(OUT_DIR), base_path=os.path.dirname(OUT_DIR), \n", + " fields_to_skip={'utterance': ['parsed','tagged','clean_text']})" ] }, { @@ -570,11 +555,11 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 56, "metadata": {}, "outputs": [], "source": [ - "corpus.dump_info(\"utterance\", [\"parsed\", \"tagged\"], dir_name=OUT_DIR)" + "corpus.dump_info('utterance',['parsed','tagged'], dir_name = OUT_DIR)" ] }, { @@ -586,16 +571,15 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 59, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "conversations.json info.parsed.jsonl \u001b[34msupreme-corpus\u001b[m\u001b[m/ \u001b[34mwiki-corpus\u001b[m\u001b[m/\n", - "corpus.json info.tagged.jsonl supreme-corpus.zip wiki-corpus.zip\n", - "index.json speakers.json utterances.jsonl\n" + "conversations.json index.json info.tagged.jsonl users.json\n", + "corpus.json info.parsed.jsonl speakers.json utterances.jsonl\n" ] } ], @@ -605,9 +589,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 58, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "'/kitchen/convokit_corpora_lf/tennis-corpus/'" + ] + }, + "execution_count": 58, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [] }, { @@ -619,7 +614,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 60, "metadata": {}, "outputs": [], "source": [ @@ -628,7 +623,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 61, "metadata": {}, "outputs": [], "source": [ @@ -644,16 +639,16 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 62, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "KeysView(ConvoKitMeta({'is_answer': True, 'is_question': False, 'pair_idx': '1681_14'}))" + "KeysView({'is_answer': True, 'is_question': False, 'pair_idx': '1681_14'})" ] }, - "execution_count": 28, + "execution_count": 62, "metadata": {}, "output_type": "execute_result" } @@ -671,36 +666,33 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "new_corpus.load_info(\"utterance\", [\"parsed\"])" + "new_corpus.load_info('utterance',['parsed'])" ] }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 68, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "({'rt': 5,\n", + "[{'rt': 5,\n", " 'toks': [{'tok': 'Yeah', 'tag': 'UH', 'dep': 'intj', 'up': 5, 'dn': []},\n", " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 5, 'dn': []},\n", " {'tok': 'but', 'tag': 'CC', 'dep': 'cc', 'up': 5, 'dn': []},\n", " {'tok': 'many', 'tag': 'JJ', 'dep': 'amod', 'up': 4, 'dn': []},\n", - " {'tok': 'friends', 'tag': 'NNS', 'dep': 'nsubj', 'up': 5, 'dn': [3]},\n", - " {'tok': 'went',\n", - " 'tag': 'VBD',\n", - " 'dep': 'ROOT',\n", - " 'dn': [0, 1, 2, 4, 6, 8, 10, 11]},\n", + " {'tok': 'friends', 'tag': 'NNS', 'dep': 'nsubj', 'up': 5, 'dn': [3, 10]},\n", + " {'tok': 'went', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [0, 1, 2, 4, 6, 8, 11]},\n", " {'tok': 'with', 'tag': 'IN', 'dep': 'prep', 'up': 5, 'dn': [7]},\n", " {'tok': 'me', 'tag': 'PRP', 'dep': 'pobj', 'up': 6, 'dn': []},\n", " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 5, 'dn': []},\n", " {'tok': 'Japanese', 'tag': 'JJ', 'dep': 'amod', 'up': 10, 'dn': []},\n", - " {'tok': 'guy', 'tag': 'NN', 'dep': 'npadvmod', 'up': 5, 'dn': [9]},\n", + " {'tok': 'guy', 'tag': 'NN', 'dep': 'appos', 'up': 4, 'dn': [9]},\n", " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 5, 'dn': []}]},\n", " {'rt': 2,\n", " 'toks': [{'tok': 'So', 'tag': 'RB', 'dep': 'advmod', 'up': 2, 'dn': []},\n", @@ -708,10 +700,10 @@ " {'tok': 'was', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [0, 1, 3, 5, 9]},\n", " {'tok': \"n't\", 'tag': 'RB', 'dep': 'neg', 'up': 2, 'dn': []},\n", " {'tok': 'I', 'tag': 'PRP', 'dep': 'nsubj', 'up': 5, 'dn': []},\n", - " {'tok': 'was', 'tag': 'VBD', 'dep': 'ccomp', 'up': 2, 'dn': [4, 6, 7]},\n", + " {'tok': 'was', 'tag': 'VBD', 'dep': 'ccomp', 'up': 2, 'dn': [4, 6, 8]},\n", " {'tok': \"n't\", 'tag': 'RB', 'dep': 'neg', 'up': 5, 'dn': []},\n", - " {'tok': 'like', 'tag': 'IN', 'dep': 'prep', 'up': 5, 'dn': [8]},\n", - " {'tok': 'homesick', 'tag': 'NN', 'dep': 'pobj', 'up': 7, 'dn': []},\n", + " {'tok': 'like', 'tag': 'UH', 'dep': 'intj', 'up': 8, 'dn': []},\n", + " {'tok': 'homesick', 'tag': 'JJ', 'dep': 'acomp', 'up': 5, 'dn': [7]},\n", " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 2, 'dn': []}]},\n", " {'rt': 4,\n", " 'toks': [{'tok': 'But', 'tag': 'CC', 'dep': 'cc', 'up': 4, 'dn': []},\n", @@ -720,16 +712,16 @@ " {'tok': 'I', 'tag': 'PRP', 'dep': 'nsubj', 'up': 4, 'dn': []},\n", " {'tok': 'get', 'tag': 'VBP', 'dep': 'ROOT', 'dn': [0, 1, 2, 3, 5, 6]},\n", " {'tok': 'homesick', 'tag': 'JJ', 'dep': 'acomp', 'up': 4, 'dn': []},\n", - " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 4, 'dn': []}]})" + " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 4, 'dn': []}]}]" ] }, - "execution_count": 30, + "execution_count": 68, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "new_utt.retrieve_meta(\"parsed\")" + "new_utt.retrieve_meta('parsed')" ] }, { @@ -761,7 +753,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 69, "metadata": {}, "outputs": [], "source": [ @@ -770,16 +762,16 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 70, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Utterance({'obj_type': 'utterance', 'vectors': [], 'speaker_': Speaker({'obj_type': 'speaker', 'vectors': [], 'owner': None, 'id': 'speaker', 'temp_storage': {}, 'meta': {}}), 'owner': None, 'id': None, 'temp_storage': {'speaker_id': 'speaker', 'conversation_id': None, 'reply_to': None, 'timestamp': None, 'text': 'I played -- a tennis match.'}, 'meta': {'clean_text': 'I played a tennis match.'}})" + "Utterance({'obj_type': 'utterance', 'meta': {'clean_text': 'I played a tennis match.'}, 'vectors': [], 'speaker': Speaker({'obj_type': 'speaker', 'meta': {}, 'vectors': [], 'owner': None, 'id': 'speaker'}), 'conversation_id': None, 'reply_to': None, 'timestamp': None, 'text': 'I played -- a tennis match.', 'owner': None, 'id': None})" ] }, - "execution_count": 32, + "execution_count": 70, "metadata": {}, "output_type": "execute_result" } @@ -790,16 +782,16 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 72, "metadata": {}, "outputs": [], "source": [ - "adhoc_utt = prep.transform_utterance(test_str)" + "adhoc_utt = prep.transform_utterance(adhoc_utt)" ] }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 74, "metadata": {}, "outputs": [ { @@ -808,13 +800,13 @@ "'I played a tennis match.'" ] }, - "execution_count": 34, + "execution_count": 74, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "adhoc_utt.retrieve_meta(\"clean_text\")" + "adhoc_utt.retrieve_meta('clean_text')" ] }, { @@ -840,7 +832,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 75, "metadata": {}, "outputs": [], "source": [ @@ -859,21 +851,18 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 76, "metadata": {}, "outputs": [], "source": [ - "parse_pipe = ConvokitPipeline(\n", - " [\n", - " (\"prep\", TextProcessor(preprocess_text, \"clean_text_pipe\")),\n", - " (\"parse\", TextParser(\"parsed_pipe\", input_field=\"clean_text_pipe\", verbosity=50)),\n", - " ]\n", - ")" + "parse_pipe = ConvokitPipeline([('prep', TextProcessor(preprocess_text, 'clean_text_pipe')),\n", + " ('parse', TextParser('parsed_pipe', input_field='clean_text_pipe',\n", + " verbosity=50))])" ] }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 77, "metadata": {}, "outputs": [ { @@ -893,27 +882,24 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 78, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "({'rt': 5,\n", + "[{'rt': 5,\n", " 'toks': [{'tok': 'Yeah', 'tag': 'UH', 'dep': 'intj', 'up': 5, 'dn': []},\n", " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 5, 'dn': []},\n", " {'tok': 'but', 'tag': 'CC', 'dep': 'cc', 'up': 5, 'dn': []},\n", " {'tok': 'many', 'tag': 'JJ', 'dep': 'amod', 'up': 4, 'dn': []},\n", - " {'tok': 'friends', 'tag': 'NNS', 'dep': 'nsubj', 'up': 5, 'dn': [3]},\n", - " {'tok': 'went',\n", - " 'tag': 'VBD',\n", - " 'dep': 'ROOT',\n", - " 'dn': [0, 1, 2, 4, 6, 8, 10, 11]},\n", + " {'tok': 'friends', 'tag': 'NNS', 'dep': 'nsubj', 'up': 5, 'dn': [3, 10]},\n", + " {'tok': 'went', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [0, 1, 2, 4, 6, 8, 11]},\n", " {'tok': 'with', 'tag': 'IN', 'dep': 'prep', 'up': 5, 'dn': [7]},\n", " {'tok': 'me', 'tag': 'PRP', 'dep': 'pobj', 'up': 6, 'dn': []},\n", " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 5, 'dn': []},\n", " {'tok': 'Japanese', 'tag': 'JJ', 'dep': 'amod', 'up': 10, 'dn': []},\n", - " {'tok': 'guy', 'tag': 'NN', 'dep': 'npadvmod', 'up': 5, 'dn': [9]},\n", + " {'tok': 'guy', 'tag': 'NN', 'dep': 'appos', 'up': 4, 'dn': [9]},\n", " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 5, 'dn': []}]},\n", " {'rt': 2,\n", " 'toks': [{'tok': 'So', 'tag': 'RB', 'dep': 'advmod', 'up': 2, 'dn': []},\n", @@ -921,10 +907,10 @@ " {'tok': 'was', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [0, 1, 3, 5, 9]},\n", " {'tok': \"n't\", 'tag': 'RB', 'dep': 'neg', 'up': 2, 'dn': []},\n", " {'tok': 'I', 'tag': 'PRP', 'dep': 'nsubj', 'up': 5, 'dn': []},\n", - " {'tok': 'was', 'tag': 'VBD', 'dep': 'ccomp', 'up': 2, 'dn': [4, 6, 7]},\n", + " {'tok': 'was', 'tag': 'VBD', 'dep': 'ccomp', 'up': 2, 'dn': [4, 6, 8]},\n", " {'tok': \"n't\", 'tag': 'RB', 'dep': 'neg', 'up': 5, 'dn': []},\n", - " {'tok': 'like', 'tag': 'IN', 'dep': 'prep', 'up': 5, 'dn': [8]},\n", - " {'tok': 'homesick', 'tag': 'NN', 'dep': 'pobj', 'up': 7, 'dn': []},\n", + " {'tok': 'like', 'tag': 'UH', 'dep': 'intj', 'up': 8, 'dn': []},\n", + " {'tok': 'homesick', 'tag': 'JJ', 'dep': 'acomp', 'up': 5, 'dn': [7]},\n", " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 2, 'dn': []}]},\n", " {'rt': 4,\n", " 'toks': [{'tok': 'But', 'tag': 'CC', 'dep': 'cc', 'up': 4, 'dn': []},\n", @@ -933,16 +919,16 @@ " {'tok': 'I', 'tag': 'PRP', 'dep': 'nsubj', 'up': 4, 'dn': []},\n", " {'tok': 'get', 'tag': 'VBP', 'dep': 'ROOT', 'dn': [0, 1, 2, 3, 5, 6]},\n", " {'tok': 'homesick', 'tag': 'JJ', 'dep': 'acomp', 'up': 4, 'dn': []},\n", - " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 4, 'dn': []}]})" + " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 4, 'dn': []}]}]" ] }, - "execution_count": 38, + "execution_count": 78, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"parsed_pipe\")" + "utt.retrieve_meta('parsed_pipe')" ] }, { @@ -954,7 +940,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 79, "metadata": {}, "outputs": [], "source": [ @@ -963,7 +949,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 80, "metadata": {}, "outputs": [ { @@ -978,13 +964,13 @@ " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 1, 'dn': []}]}]" ] }, - "execution_count": 40, + "execution_count": 80, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "test_utt.retrieve_meta(\"parsed_pipe\")" + "test_utt.retrieve_meta('parsed_pipe')" ] }, { @@ -1012,17 +998,17 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 81, "metadata": {}, "outputs": [], "source": [ - "wc_raw = TextProcessor(proc_fn=lambda x: len(x.split()), output_field=\"wc_raw\")\n", + "wc_raw = TextProcessor(proc_fn=lambda x: len(x.split()), output_field='wc_raw')\n", "corpus = wc_raw.transform(corpus)" ] }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 83, "metadata": {}, "outputs": [ { @@ -1031,13 +1017,13 @@ "23" ] }, - "execution_count": 42, + "execution_count": 83, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"wc_raw\")" + "utt.retrieve_meta('wc_raw')" ] }, { @@ -1049,11 +1035,11 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 84, "metadata": {}, "outputs": [], "source": [ - "wc = TextProcessor(proc_fn=lambda x: len(x.split()), output_field=\"wc\", input_field=\"clean_text\")\n", + "wc = TextProcessor(proc_fn=lambda x: len(x.split()), output_field='wc', input_field='clean_text')\n", "corpus = wc.transform(corpus)" ] }, @@ -1066,7 +1052,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 86, "metadata": {}, "outputs": [ { @@ -1075,13 +1061,13 @@ "22" ] }, - "execution_count": 44, + "execution_count": 86, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"wc\")" + "utt.retrieve_meta('wc')" ] }, { @@ -1093,17 +1079,17 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 87, "metadata": {}, "outputs": [], "source": [ - "chars = TextProcessor(proc_fn=lambda x: len(x), output_field=\"ch\", input_field=\"clean_text\")\n", + "chars = TextProcessor(proc_fn=lambda x: len(x), output_field='ch', input_field='clean_text')\n", "corpus = chars.transform(corpus)" ] }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 88, "metadata": {}, "outputs": [ { @@ -1112,13 +1098,13 @@ "120" ] }, - "execution_count": 46, + "execution_count": 88, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"ch\")" + "utt.retrieve_meta('ch')" ] }, { @@ -1147,21 +1133,18 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 89, "metadata": {}, "outputs": [], "source": [ - "char_per_word = TextProcessor(\n", - " proc_fn=lambda x: (x[\"ch\"] / x[\"wc\"], x[\"wc\"] / x[\"ch\"]),\n", - " output_field=[\"char_per_word\", \"word_per_char\"],\n", - " input_field=[\"ch\", \"wc\"],\n", - ")\n", + "char_per_word = TextProcessor(proc_fn=lambda x: (x['ch']/x['wc'], x['wc']/x['ch']), \n", + " output_field=['char_per_word', 'word_per_char'], input_field=['ch','wc'])\n", "corpus = char_per_word.transform(corpus)" ] }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 90, "metadata": {}, "outputs": [ { @@ -1170,18 +1153,18 @@ "5.454545454545454" ] }, - "execution_count": 48, + "execution_count": 90, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"char_per_word\")" + "utt.retrieve_meta('char_per_word')" ] }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 91, "metadata": {}, "outputs": [ { @@ -1190,13 +1173,13 @@ "0.18333333333333332" ] }, - "execution_count": 49, + "execution_count": 91, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "utt.retrieve_meta(\"word_per_char\")" + "utt.retrieve_meta('word_per_char')" ] }, { @@ -1222,28 +1205,26 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 92, "metadata": {}, "outputs": [], "source": [ "def is_question(utt, aux={}):\n", - " return utt.meta[\"is_question\"]" + " return utt.meta['is_question']" ] }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 93, "metadata": {}, "outputs": [], "source": [ - "qparser = TextParser(\n", - " output_field=\"qparsed\", input_field=\"clean_text\", input_filter=is_question, verbosity=50\n", - ")" + "qparser = TextParser(output_field='qparsed', input_field='clean_text', input_filter=is_question, verbosity=50)" ] }, { "cell_type": "code", - "execution_count": 52, + "execution_count": 94, "metadata": {}, "outputs": [ { @@ -1270,11 +1251,11 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 95, "metadata": {}, "outputs": [], "source": [ - "utt.retrieve_meta(\"qparsed\")" + "utt.retrieve_meta('qparsed')" ] }, { @@ -1286,7 +1267,7 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 96, "metadata": {}, "outputs": [ { @@ -1295,20 +1276,20 @@ "'How hard was it for you when, 13 years, left your parents, left Japan to go to the States. Was it a big step for you?'" ] }, - "execution_count": 54, + "execution_count": 96, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "q_utt_id = \"1681_14.q\"\n", + "q_utt_id = '1681_14.q'\n", "q_utt = corpus.get_utterance(q_utt_id)\n", "q_utt.text" ] }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 97, "metadata": { "scrolled": true }, @@ -1316,30 +1297,30 @@ { "data": { "text/plain": [ - "({'rt': 15,\n", + "[{'rt': 11,\n", " 'toks': [{'tok': 'How', 'tag': 'WRB', 'dep': 'advmod', 'up': 1, 'dn': []},\n", " {'tok': 'hard', 'tag': 'RB', 'dep': 'acomp', 'up': 2, 'dn': [0]},\n", - " {'tok': 'was', 'tag': 'VBD', 'dep': 'advcl', 'up': 15, 'dn': [1, 3, 4, 11]},\n", + " {'tok': 'was', 'tag': 'VBD', 'dep': 'advcl', 'up': 11, 'dn': [1, 3, 4, 9]},\n", " {'tok': 'it', 'tag': 'PRP', 'dep': 'nsubj', 'up': 2, 'dn': []},\n", " {'tok': 'for', 'tag': 'IN', 'dep': 'prep', 'up': 2, 'dn': [5]},\n", " {'tok': 'you', 'tag': 'PRP', 'dep': 'pobj', 'up': 4, 'dn': []},\n", - " {'tok': 'when', 'tag': 'WRB', 'dep': 'advmod', 'up': 11, 'dn': []},\n", - " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 11, 'dn': []},\n", + " {'tok': 'when', 'tag': 'WRB', 'dep': 'advmod', 'up': 9, 'dn': [7]},\n", + " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 6, 'dn': []},\n", " {'tok': '13', 'tag': 'CD', 'dep': 'nummod', 'up': 9, 'dn': []},\n", - " {'tok': 'years', 'tag': 'NNS', 'dep': 'nsubj', 'up': 11, 'dn': [8, 10]},\n", - " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 9, 'dn': []},\n", - " {'tok': 'left', 'tag': 'VBD', 'dep': 'advcl', 'up': 2, 'dn': [6, 7, 9, 13]},\n", + " {'tok': 'years', 'tag': 'NNS', 'dep': 'npadvmod', 'up': 2, 'dn': [6, 8]},\n", + " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 11, 'dn': []},\n", + " {'tok': 'left', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [2, 10, 13, 14, 15, 22]},\n", " {'tok': 'your', 'tag': 'PRP$', 'dep': 'poss', 'up': 13, 'dn': []},\n", " {'tok': 'parents', 'tag': 'NNS', 'dep': 'dobj', 'up': 11, 'dn': [12]},\n", - " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 15, 'dn': []},\n", - " {'tok': 'left', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [2, 14, 16, 18, 22]},\n", + " {'tok': ',', 'tag': ',', 'dep': 'punct', 'up': 11, 'dn': []},\n", + " {'tok': 'left', 'tag': 'VBD', 'dep': 'conj', 'up': 11, 'dn': [16, 18]},\n", " {'tok': 'Japan', 'tag': 'NNP', 'dep': 'dobj', 'up': 15, 'dn': []},\n", " {'tok': 'to', 'tag': 'TO', 'dep': 'aux', 'up': 18, 'dn': []},\n", " {'tok': 'go', 'tag': 'VB', 'dep': 'xcomp', 'up': 15, 'dn': [17, 19]},\n", " {'tok': 'to', 'tag': 'IN', 'dep': 'prep', 'up': 18, 'dn': [21]},\n", " {'tok': 'the', 'tag': 'DT', 'dep': 'det', 'up': 21, 'dn': []},\n", - " {'tok': 'States', 'tag': 'NNPS', 'dep': 'pobj', 'up': 19, 'dn': [20]},\n", - " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 15, 'dn': []}]},\n", + " {'tok': 'States', 'tag': 'NNP', 'dep': 'pobj', 'up': 19, 'dn': [20]},\n", + " {'tok': '.', 'tag': '.', 'dep': 'punct', 'up': 11, 'dn': []}]},\n", " {'rt': 0,\n", " 'toks': [{'tok': 'Was', 'tag': 'VBD', 'dep': 'ROOT', 'dn': [1, 4, 7]},\n", " {'tok': 'it', 'tag': 'PRP', 'dep': 'nsubj', 'up': 0, 'dn': []},\n", @@ -1348,16 +1329,16 @@ " {'tok': 'step', 'tag': 'NN', 'dep': 'attr', 'up': 0, 'dn': [2, 3, 5]},\n", " {'tok': 'for', 'tag': 'IN', 'dep': 'prep', 'up': 4, 'dn': [6]},\n", " {'tok': 'you', 'tag': 'PRP', 'dep': 'pobj', 'up': 5, 'dn': []},\n", - " {'tok': '?', 'tag': '.', 'dep': 'punct', 'up': 0, 'dn': []}]})" + " {'tok': '?', 'tag': '.', 'dep': 'punct', 'up': 0, 'dn': []}]}]" ] }, - "execution_count": 55, + "execution_count": 97, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "q_utt.retrieve_meta(\"qparsed\")" + "q_utt.retrieve_meta('qparsed')" ] }, { @@ -1391,7 +1372,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.7.4" } }, "nbformat": 4, diff --git a/examples/vectors/bag-of-words-demo.ipynb b/examples/vectors/bag-of-words-demo.ipynb index 368a02e0..fbfb60ce 100644 --- a/examples/vectors/bag-of-words-demo.ipynb +++ b/examples/vectors/bag-of-words-demo.ipynb @@ -22,7 +22,9 @@ "execution_count": 1, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "import convokit" + ] }, { "cell_type": "code", @@ -47,7 +49,7 @@ } ], "source": [ - "corpus = Corpus(filename=download(\"subreddit-Cornell\"))" + "corpus = Corpus(filename=download('subreddit-Cornell'))" ] }, { @@ -143,7 +145,7 @@ ], "source": [ "# before transformation\n", - "corpus.get_utterance(\"dsbgljl\").vectors" + "corpus.get_utterance('dsbgljl').vectors" ] }, { @@ -184,7 +186,7 @@ ], "source": [ "# after transformation\n", - "corpus.get_utterance(\"dsbgljl\").vectors" + "corpus.get_utterance('dsbgljl').vectors" ] }, { @@ -232,7 +234,7 @@ } ], "source": [ - "corpus.get_vector_matrix(\"bow_vector\")" + "corpus.get_vector_matrix('bow_vector')" ] }, { @@ -309,9 +311,9 @@ } ], "source": [ - "bow_classifier = VectorClassifier(\n", - " obj_type=\"utterance\", vector_name=\"bow_vector\", labeller=lambda utt: utt.meta[\"score\"] > 0\n", - ")" + "bow_classifier = VectorClassifier(obj_type=\"utterance\", \n", + " vector_name='bow_vector',\n", + " labeller=lambda utt: utt.meta['score'] > 0)" ] }, { @@ -331,7 +333,7 @@ } ], "source": [ - "# This fit_transform() step fits the classifier and then uses it to compute predictions for all the\n", + "# This fit_transform() step fits the classifier and then uses it to compute predictions for all the \n", "# utterances in the Corpus\n", "bow_classifier.fit_transform(corpus)" ] @@ -533,7 +535,7 @@ ], "source": [ "# The ngrams weighted most positively (i.e. utterances with these ngrams are more likely to have positive scores)\n", - "bow_classifier.get_coefs(feature_names=corpus.get_vector_matrix(\"bow_vector\").columns).head()" + "bow_classifier.get_coefs(feature_names=corpus.get_vector_matrix('bow_vector').columns).head()" ] }, { @@ -732,9 +734,7 @@ "metadata": {}, "outputs": [], "source": [ - "top_level_comment_ids = [\n", - " utt.id for utt in corpus.iter_utterances() if utt.id == utt.meta[\"top_level_comment\"]\n", - "]" + "top_level_comment_ids = [utt.id for utt in corpus.iter_utterances() if utt.id == utt.meta['top_level_comment']]" ] }, { @@ -832,11 +832,11 @@ "for thread in threads_corpus.iter_conversations():\n", " thread_len = len(list(thread.iter_utterances()))\n", " if thread_len == 5:\n", - " thread.meta[\"thread_doubles\"] = False\n", + " thread.meta['thread_doubles'] = False\n", " elif thread_len >= 10:\n", - " thread.meta[\"thread_doubles\"] = True\n", + " thread.meta['thread_doubles'] = True\n", " else:\n", - " thread.meta[\"thread_doubles\"] = None" + " thread.meta['thread_doubles'] = None" ] }, { @@ -861,13 +861,9 @@ ], "source": [ "# We set our BoWTransformer to use only the first 5 utterances in the Conversation by configuring 'text_func'\n", - "bow_transformer2 = BoWTransformer(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"bow_vector_2\",\n", - " text_func=lambda convo: \" \".join(\n", - " [utt.text for utt in convo.get_chronological_utterance_list()[:5]]\n", - " ),\n", - ")" + "bow_transformer2 = BoWTransformer(obj_type=\"conversation\", vector_name='bow_vector_2',\n", + " text_func=lambda convo: ' '.join([utt.text for utt in convo.get_chronological_utterance_list()[:5]])\n", + " )" ] }, { @@ -887,9 +883,7 @@ } ], "source": [ - "bow_transformer2.fit_transform(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None\n", - ")" + "bow_transformer2.fit_transform(threads_corpus, selector=lambda convo: convo.meta['thread_doubles'] is not None)" ] }, { @@ -933,11 +927,8 @@ } ], "source": [ - "bow_classifier2 = VectorClassifier(\n", - " obj_type=\"conversation\",\n", - " vector_name=\"bow_vector_2\",\n", - " labeller=lambda convo: convo.meta[\"thread_doubles\"],\n", - ")" + "bow_classifier2 = VectorClassifier(obj_type=\"conversation\", vector_name='bow_vector_2',\n", + " labeller=lambda convo: convo.meta['thread_doubles'])" ] }, { @@ -957,9 +948,7 @@ } ], "source": [ - "bow_classifier2.fit_transform(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None\n", - ")" + "bow_classifier2.fit_transform(threads_corpus, selector=lambda convo: convo.meta['thread_doubles'] is not None)" ] }, { @@ -968,9 +957,7 @@ "metadata": {}, "outputs": [], "source": [ - "summary = bow_classifier2.summarize(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None\n", - ")" + "summary = bow_classifier2.summarize(threads_corpus, selector=lambda convo: convo.meta['thread_doubles'] is not None)" ] }, { @@ -1158,9 +1145,7 @@ } ], "source": [ - "bow_classifier2.base_accuracy(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None\n", - ")" + "bow_classifier2.base_accuracy(threads_corpus, selector=lambda convo: convo.meta['thread_doubles'] is not None)" ] }, { @@ -1180,9 +1165,7 @@ } ], "source": [ - "bow_classifier2.accuracy(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None\n", - ")" + "bow_classifier2.accuracy(threads_corpus, selector=lambda convo: convo.meta['thread_doubles'] is not None)" ] }, { @@ -1207,11 +1190,7 @@ } ], "source": [ - "print(\n", - " bow_classifier2.classification_report(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None\n", - " )\n", - ")" + "print(bow_classifier2.classification_report(threads_corpus, selector=lambda convo: convo.meta['thread_doubles'] is not None))" ] }, { @@ -1239,9 +1218,7 @@ "source": [ "# consider only conversations that have at least 5 utterances, i.e. from earlier,\n", "# this is any conversation that has thread_doubles with a value that is not None.\n", - "valid_convos = list(\n", - " threads_corpus.iter_conversations(lambda convo: convo.meta[\"thread_doubles\"] is not None)\n", - ")" + "valid_convos = list(threads_corpus.iter_conversations(lambda convo: convo.meta['thread_doubles'] is not None))" ] }, { @@ -1316,15 +1293,15 @@ "outputs": [], "source": [ "for convo in train_convos:\n", - " convo.meta[\"train_test_type\"] = \"train\"\n", - "\n", + " convo.meta['train_test_type'] = 'train'\n", + " \n", "for convo in test_convos:\n", - " convo.meta[\"train_test_type\"] = \"test\"\n", + " convo.meta['train_test_type'] = 'test'\n", "\n", "# any other convo not part of the train/test split should have the metadata attribute value set to None\n", "for convo in threads_corpus.iter_conversations():\n", - " if \"train_test_type\" not in convo.meta:\n", - " convo.meta[\"train_test_type\"] = None" + " if 'train_test_type' not in convo.meta:\n", + " convo.meta['train_test_type'] = None" ] }, { @@ -1345,7 +1322,7 @@ ], "source": [ "# Fit the classifier only on train data\n", - "bow_classifier2.fit(threads_corpus, selector=lambda convo: convo.meta[\"train_test_type\"] == \"train\")" + "bow_classifier2.fit(threads_corpus, selector=lambda convo: convo.meta['train_test_type'] == 'train')" ] }, { @@ -1471,14 +1448,10 @@ "# Evaluating the classifier on test data\n", "\n", "# First annotate the conversation with the prediction\n", - "bow_classifier2.transform(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"train_test_type\"] == \"test\"\n", - ")\n", + "bow_classifier2.transform(threads_corpus, selector=lambda convo: convo.meta['train_test_type'] == 'test')\n", "\n", "# Then evaluate the accuracy of this prediction\n", - "bow_classifier2.summarize(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"train_test_type\"] == \"test\"\n", - ")" + "bow_classifier2.summarize(threads_corpus, selector=lambda convo: convo.meta['train_test_type'] == 'test')" ] }, { @@ -1503,11 +1476,8 @@ } ], "source": [ - "print(\n", - " bow_classifier2.classification_report(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"train_test_type\"] == \"test\"\n", - " )\n", - ")" + "print(bow_classifier2.classification_report(threads_corpus, \n", + " selector=lambda convo: convo.meta['train_test_type'] == 'test'))" ] }, { @@ -1541,9 +1511,7 @@ } ], "source": [ - "bow_classifier2.evaluate_with_cv(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None\n", - ")" + "bow_classifier2.evaluate_with_cv(threads_corpus, selector=lambda convo: convo.meta['thread_doubles'] is not None)" ] }, { @@ -1573,9 +1541,9 @@ } ], "source": [ - "bow_classifier2.evaluate_with_train_test_split(\n", - " threads_corpus, selector=lambda convo: convo.meta[\"thread_doubles\"] is not None, test_size=0.2\n", - ")" + "bow_classifier2.evaluate_with_train_test_split(threads_corpus, \n", + " selector=lambda convo: convo.meta['thread_doubles'] is not None,\n", + " test_size=0.2)" ] }, { diff --git a/examples/vectors/vector_demo.ipynb b/examples/vectors/vector_demo.ipynb index 167427bc..3a8bc33d 100644 --- a/examples/vectors/vector_demo.ipynb +++ b/examples/vectors/vector_demo.ipynb @@ -55,7 +55,7 @@ } ], "source": [ - "corpus = Corpus(download(\"subreddit-Cornell\"))" + "corpus = Corpus(download('subreddit-Cornell'))" ] }, { @@ -208,7 +208,7 @@ } ], "source": [ - "bow_transformer = BoWTransformer(obj_type=\"utterance\", vector_name=\"bow\")\n", + "bow_transformer = BoWTransformer(obj_type=\"utterance\", vector_name='bow')\n", "bow_transformer.fit_transform(corpus)" ] }, @@ -284,7 +284,7 @@ } ], "source": [ - "random_utt.get_vector(\"bow\")" + "random_utt.get_vector('bow')" ] }, { @@ -383,7 +383,7 @@ ], "source": [ "# We can get a more interpretable display of the vector as a dataframe\n", - "random_utt.get_vector(\"bow\", as_dataframe=True)" + "random_utt.get_vector('bow', as_dataframe=True)" ] }, { @@ -453,7 +453,7 @@ } ], "source": [ - "random_utt.get_vector(\"bow\", as_dataframe=True, columns=[\"youtu\", \"youtube\", \"yr\"])" + "random_utt.get_vector('bow', as_dataframe=True, columns=['youtu', 'youtube', 'yr'])" ] }, { @@ -475,7 +475,7 @@ ], "source": [ "# This works for the non-dataframe format too\n", - "random_utt.get_vector(\"bow\", as_dataframe=False, columns=[\"youtu\", \"youtube\", \"yr\"])" + "random_utt.get_vector('bow', as_dataframe=False, columns=['youtu', 'youtube', 'yr'])" ] }, { @@ -502,7 +502,7 @@ } ], "source": [ - "corpus.vectors # The corpus has a 'bow' vector associated with it" + "corpus.vectors # The corpus has a 'bow' vector associated with it" ] }, { @@ -530,7 +530,7 @@ } ], "source": [ - "corpus.get_vector_matrix(\"bow\")" + "corpus.get_vector_matrix('bow') " ] }, { @@ -539,7 +539,7 @@ "metadata": {}, "outputs": [], "source": [ - "bow_matrix = corpus.get_vector_matrix(\"bow\")" + "bow_matrix = corpus.get_vector_matrix('bow')" ] }, { @@ -838,7 +838,7 @@ } ], "source": [ - "# Accessing the numpy matrix directly; we could use this\n", + "# Accessing the numpy matrix directly; we could use this \n", "bow_matrix.matrix" ] }, @@ -916,7 +916,7 @@ "metadata": {}, "outputs": [], "source": [ - "ck_matrix = ConvoKitMatrix(name=\"bag-of-words\", matrix=matrix_data)" + "ck_matrix = ConvoKitMatrix(name='bag-of-words', matrix=matrix_data)" ] }, { @@ -1171,10 +1171,12 @@ "metadata": {}, "outputs": [], "source": [ - "# We can initialize the ConvoKitMatrix with this information\n", - "ck_matrix = ConvoKitMatrix(\n", - " name=\"bag-of-words\", matrix=matrix_data, columns=column_names, ids=row_ids\n", - ")" + "# We can initialize the ConvoKitMatrix with this information \n", + "ck_matrix = ConvoKitMatrix(name='bag-of-words',\n", + " matrix=matrix_data,\n", + " columns=column_names,\n", + " ids=row_ids\n", + " )" ] }, { @@ -1457,7 +1459,11 @@ } ], "source": [ - "corpus.set_vector_matrix(name=\"bag-of-words\", matrix=matrix_data, columns=column_names, ids=row_ids)" + "corpus.set_vector_matrix(name='bag-of-words', \n", + " matrix=matrix_data,\n", + " columns=column_names,\n", + " ids=row_ids\n", + " )" ] }, { @@ -1492,7 +1498,7 @@ ], "source": [ "# It does not have a 'bag-of-words' vector\n", - "utt_example = corpus.get_utterance(\"nyx4d\")\n", + "utt_example = corpus.get_utterance('nyx4d')\n", "utt_example.vectors" ] }, @@ -1516,7 +1522,7 @@ ], "source": [ "# this call will fail since there is no such vector associated with the utterance\n", - "utt_example.get_vector(\"bag-of-words\")" + "utt_example.get_vector('bag-of-words') " ] }, { @@ -1532,7 +1538,7 @@ "metadata": {}, "outputs": [], "source": [ - "utt_example.add_vector(\"bag-of-words\")" + "utt_example.add_vector('bag-of-words')" ] }, { @@ -1560,7 +1566,7 @@ } ], "source": [ - "utt_example.get_vector(\"bag-of-words\")" + "utt_example.get_vector('bag-of-words')" ] }, { @@ -1607,7 +1613,7 @@ } ], "source": [ - "utt_example.delete_vector(\"bag-of-words\")\n", + "utt_example.delete_vector('bag-of-words')\n", "utt_example.vectors" ] }, @@ -1648,7 +1654,7 @@ } ], "source": [ - "corpus.delete_vector_matrix(\"bag-of-words\")\n", + "corpus.delete_vector_matrix('bag-of-words')\n", "corpus.vectors" ] }, @@ -1775,7 +1781,7 @@ ], "source": [ "# horizontal stack\n", - "ConvoKitMatrix.hstack(name=\"hstacked_matrix\", matrices=[matrix_a, matrix_b])" + "ConvoKitMatrix.hstack(name='hstacked_matrix', matrices=[matrix_a, matrix_b])" ] }, { @@ -1797,7 +1803,7 @@ ], "source": [ "# vertical stack\n", - "ConvoKitMatrix.vstack(name=\"vstacked_matrix\", matrices=[matrix_a, matrix_b])" + "ConvoKitMatrix.vstack(name='vstacked_matrix', matrices=[matrix_a, matrix_b])" ] }, { @@ -1843,7 +1849,6 @@ ], "source": [ "import os\n", - "\n", "os.listdir()" ] }, @@ -1854,7 +1859,7 @@ "outputs": [], "source": [ "# dumps all vectors by default\n", - "corpus.dump(\"cornell-with-bow\", base_path=\".\")" + "corpus.dump('cornell-with-bow', base_path='.')" ] }, { @@ -1879,7 +1884,7 @@ } ], "source": [ - "os.listdir(\"./cornell-with-bow\")" + "os.listdir('./cornell-with-bow')" ] }, { @@ -1895,7 +1900,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus.dump(\"cornell-no-bow\", base_path=\".\", exclude_vectors=[\"bow\"])" + "corpus.dump('cornell-no-bow', base_path='.', exclude_vectors=['bow'])" ] }, { @@ -1919,7 +1924,7 @@ } ], "source": [ - "os.listdir(\"./cornell-no-bow\")" + "os.listdir('./cornell-no-bow')" ] }, { @@ -1935,7 +1940,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus = Corpus(filename=\"./cornell-no-bow\")" + "corpus = Corpus(filename='./cornell-no-bow')" ] }, { @@ -1998,7 +2003,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus = Corpus(filename=\"./cornell-with-bow\")" + "corpus = Corpus(filename='./cornell-with-bow')" ] }, { @@ -2080,7 +2085,7 @@ ], "source": [ "# fetched normally (the lazy-loading is invisible to the ConvoKit user)\n", - "corpus.get_vector_matrix(\"bow\")" + "corpus.get_vector_matrix('bow')" ] }, { @@ -2096,7 +2101,7 @@ "metadata": {}, "outputs": [], "source": [ - "corpus = Corpus(filename=\"./cornell-with-bow\", preload_vectors=[\"bow\"])" + "corpus = Corpus(filename='./cornell-with-bow', preload_vectors=['bow'])" ] }, { @@ -2137,7 +2142,7 @@ } ], "source": [ - "corpus.get_vector_matrix(\"bow\")" + "corpus.get_vector_matrix('bow')" ] }, {