Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
d978aa2
Address issue #8 - work from home filtering
americalexander Aug 13, 2025
4030e89
Fix filtering in free parking crosstab
americalexander Aug 13, 2025
697fdd1
Fix telecommute frequency xtab
americalexander Aug 13, 2025
58b6a62
Fix missing new tours in per-purpose summaries
americalexander Aug 13, 2025
05b4f50
Fix trip filtering message
americalexander Aug 13, 2025
e07c4fb
Add missing column to long term
americalexander Aug 13, 2025
1f55576
clean up filtering languge
americalexander Aug 20, 2025
7213855
Clarify trip summary language
americalexander Aug 20, 2025
e4250a8
Correct default time period bounds
americalexander Aug 20, 2025
9dd88bd
Merge branch 'main' into develop
americalexander Aug 20, 2025
bf1dbfb
typo fix
americalexander Aug 20, 2025
0fc5333
Merge branch 'develop' of https://github.com/ActivitySim/asim_eet_viz…
americalexander Aug 20, 2025
6e9d401
More typo fixes
americalexander Aug 20, 2025
cb89517
More language clarifications
americalexander Aug 20, 2025
682eb4f
Fix long-term summaries filtered by work location
americalexander Aug 20, 2025
da1e736
telecommute summary with only workers not at home
dhensle Aug 25, 2025
bffcaa0
fix filtering in long-term telecommute freq summary
americalexander Aug 25, 2025
994b447
add additions/removals to joint tour scheduling
americalexander Aug 27, 2025
4e95a20
add addition/removal columns to tour summaries
americalexander Aug 27, 2025
2c8d4e4
Fix additions in tour scheduling summaries
americalexander Aug 27, 2025
a00bf3a
Remove redundant filtering/ file IO
americalexander Aug 27, 2025
0b4a197
Expand trip scheduling summary
americalexander Sep 10, 2025
3edbe20
Fix at-work subtour filtering summary
americalexander Sep 22, 2025
ea04d85
Fix bug in atwork subtour scheduling summary
americalexander Sep 22, 2025
696ba86
Tour bugfix
americalexander Sep 22, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,11 +67,11 @@ def run_notebooks(
nb_in = nbparam.replace_definitions(nb_in, new_params)

# execute it
ep = ExecutePreprocessor(timeout=600, kernel_name="python3")
ep = ExecutePreprocessor(timeout=1000, kernel_name="python3")
ep.preprocess(nb_in)

# strip out all metadata that causes issues for Quarto
cmp = ClearMetadataPreprocessor(timeout=600, kernel_name="python3")
cmp = ClearMetadataPreprocessor(timeout=1000, kernel_name="python3")
# exclude cell tags from removal
cmp.preserve_cell_metadata_mask |= {"tags"}
cmp.preprocess(nb_in, resources={})
Expand Down
12 changes: 6 additions & 6 deletions notebooks/daily.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -139,10 +139,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by home MAZ. Original Persons: {len(base_persons_df)}\")\n",
"print(f\"Filtering all persons by home MAZ. Original persons: {len(base_persons_df)}\")\n",
"base_persons_df = base_persons_df[single_filter_mazs(base_persons_df.home_zone_id)]\n",
"build_persons_df = build_persons_df[single_filter_mazs(build_persons_df.home_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_persons_df)}\")"
"print(f\"Total persons after filtering: {len(base_persons_df)}\")"
]
},
{
Expand Down Expand Up @@ -218,10 +218,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by home MAZ. Original Persons: {len(base_persons_df)}\")\n",
"print(f\"Filtering workers and students by home MAZ. Original workers/students: {len(base_persons_df)}\")\n",
"base_persons_df = base_persons_df[single_filter_mazs(base_persons_df.home_zone_id)]\n",
"build_persons_df = build_persons_df[single_filter_mazs(build_persons_df.home_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_persons_df)}\")"
"print(f\"Total workers/students after filtering: {len(base_persons_df)}\")"
]
},
{
Expand Down Expand Up @@ -334,10 +334,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by home MAZ. Original Persons: {len(base_persons_df)}\")\n",
"print(f\"Filtering all persons by home MAZ. Original persons: {len(base_persons_df)}\")\n",
"base_persons_df = base_persons_df[single_filter_mazs(base_persons_df.home_zone_id)]\n",
"build_persons_df = build_persons_df[single_filter_mazs(build_persons_df.home_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_persons_df)}\")"
"print(f\"Total persons after filtering: {len(base_persons_df)}\")"
]
},
{
Expand Down
17 changes: 9 additions & 8 deletions notebooks/joint.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"\"\"Filtering tours by origin {\n",
"print(f\"\"\"Filtering joint tours by origin {\n",
" 'and' if how_method == 'all' else 'or' if how_method == 'any' else '???'\n",
" } destination MAZ.\\nOriginal tours in base: {len(base_tour)}\\tbuild: {len(build_tour)}\"\"\")\n",
" } destination MAZ.\\nOriginal joint tours in base: {len(base_tour[base_tour.number_of_participants > 1])}\\tbuild: {len(build_tour[build_tour.number_of_participants > 1])}\"\"\")\n",
"\n",
"base_tour = base_tour[multi_filter_mazs([base_tour.origin, base_tour.destination])] # base tour in the filtered set\n",
"\n",
Expand All @@ -139,7 +139,7 @@
" & multi_filter_mazs([build_tour.origin, build_tour.destination]) # and it's in this set\n",
" )]\n",
"\n",
"print(f\"After filtering, tours in base: {len(base_tour)}\\tbuild: {len(build_tour)}\")"
"print(f\"After filtering, joint tours in base: {len(base_tour[base_tour.number_of_participants > 1])}\\tbuild: {len(build_tour[build_tour.number_of_participants > 1])}\")"
]
},
{
Expand Down Expand Up @@ -389,7 +389,7 @@
" left_index=True,\n",
" right_index=True,\n",
" suffixes=('_base', '_build'),\n",
" how='inner'\n",
" how='outer'\n",
")\n",
"\n",
"# get the difference in start and end times\n",
Expand Down Expand Up @@ -448,18 +448,19 @@
" 3: \"PM\",\n",
" 4: \"EV\"\n",
"}\n",
"tp_order = ['EA','AM','MD','PM','EV','Total']\n",
"tp_order_base = ['EA','AM','MD','PM','EV','Newly created','Total']\n",
"tp_order_build = ['EA','AM','MD','PM','EV','Removed','Total']\n",
"\n",
"for metric in ['start', 'end']:\n",
" purpose_df = df\n",
" display(Markdown(f\"### Joint tour {metric} changes\"))\n",
" xtab = pd.crosstab(\n",
" purpose_df[f'{metric}_period_base'].replace(mapper),\n",
" purpose_df[f'{metric}_period_build'].replace(mapper),\n",
" purpose_df[f'{metric}_period_base'].replace(mapper).fillna('Newly created'),\n",
" purpose_df[f'{metric}_period_build'].replace(mapper).fillna('Removed'),\n",
" margins=True,\n",
" margins_name='Total'\n",
" )\n",
" display(xtab.loc[tp_order,tp_order])\n",
" display(xtab.loc[tp_order_base,tp_order_build])\n",
"\n",
" display(Markdown(\" \"))\n",
"\n"
Expand Down
72 changes: 43 additions & 29 deletions notebooks/long_term.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"import pandas as pd\n",
"import yaml\n",
"import plotly.io as pio\n",
"import numpy as np\n",
"pio.renderers.default = \"plotly_mimetype+notebook_connected\"\n",
"\n",
"from IPython.display import Markdown, display\n",
Expand Down Expand Up @@ -185,10 +186,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by home MAZ. Original Persons: {len(base_per)}\")\n",
"print(f\"Filtering workers by home MAZ. Original workers: {len(base_per[base_per.is_worker])}\")\n",
"base_per = base_per[single_filter_mazs(base_per.home_zone_id)]\n",
"build_per = build_per[single_filter_mazs(build_per.home_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_per)}\")"
"print(f\"Workers after filtering: {len(base_per[base_per.is_worker])}\")"
]
},
{
Expand Down Expand Up @@ -238,10 +239,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by home MAZ. Original Persons: {len(base_per)}\")\n",
"print(f\"Filtering workers by home MAZ. Original workers: {len(base_per[base_per.is_worker])}\")\n",
"base_per = base_per[single_filter_mazs(base_per.home_zone_id)]\n",
"build_per = build_per[single_filter_mazs(build_per.home_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_per)}\")"
"print(f\"Workers after filtering: {len(base_per[base_per.is_worker])}\")"
]
},
{
Expand Down Expand Up @@ -303,10 +304,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by home MAZ. Original Persons: {len(base_per)}\")\n",
"print(f\"Filtering students by home MAZ. Original students: {len(base_per[base_per.is_student])}\")\n",
"base_per = base_per[single_filter_mazs(base_per.home_zone_id)]\n",
"build_per = build_per[single_filter_mazs(build_per.home_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_per)}\")"
"print(f\"Students after filtering: {len(base_per[base_per.is_student])}\")"
]
},
{
Expand Down Expand Up @@ -370,10 +371,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by home MAZ. Original Persons: {len(base_per)}\")\n",
"print(f\"Filtering all persons by home MAZ. Original persons: {len(base_per)}\")\n",
"base_per = base_per[single_filter_mazs(base_per.home_zone_id)]\n",
"build_per = build_per[single_filter_mazs(build_per.home_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_per)}\")"
"print(f\"Total persons after filtering: {len(base_per)}\")"
]
},
{
Expand Down Expand Up @@ -531,10 +532,10 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by workplace MAZ. Original Persons: {len(base_per)}\")\n",
"print(f\"Filtering workers by workplace MAZ. Original workers in base scenario: {len(base_per[base_per.is_worker])}\")\n",
"base_per = base_per[single_filter_mazs(base_per.workplace_zone_id)]\n",
"build_per = build_per[single_filter_mazs(build_per.workplace_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_per)}\")"
"print(f\"Workers in base scenario after filtering: {len(base_per[base_per.is_worker])}\")"
]
},
{
Expand All @@ -545,7 +546,7 @@
"outputs": [],
"source": [
"df = base_per[base_per.is_worker].merge(build_per[build_per.is_worker],\n",
" how='left',\n",
" how='outer',\n",
" left_index=True,\n",
" right_index=True, \n",
" suffixes=('_base','_build'))"
Expand All @@ -559,12 +560,13 @@
"outputs": [],
"source": [
"df0 = pd.crosstab(\n",
" base_per[base_per.is_worker].free_parking_at_work,\n",
" build_per[build_per.is_worker].free_parking_at_work,\n",
" df.free_parking_at_work_base.map({False:\"No free parking\",True:\"Has free parking\",np.nan:\"Workplace moved into area\"}),\n",
" df.free_parking_at_work_build.map({False:\"No free parking\",True:\"Has free parking\",np.nan:\"Workplace moved out of area\"}),\n",
" rownames=['base'],\n",
" colnames=['build'],\n",
" margins=True,\n",
" margins_name='Total'\n",
" margins_name='Total',\n",
" dropna=False,\n",
")\n",
"df0"
]
Expand All @@ -576,9 +578,13 @@
"metadata": {},
"outputs": [],
"source": [
"df = (df.free_parking_at_work_base == df.free_parking_at_work_build).value_counts()\n",
"df.index = df.index.map({True:'Unchanged',False:'Changed'})\n",
"df"
"df.loc[df.free_parking_at_work_base == df.free_parking_at_work_build,\"Case\"] = \"Unchanged\"\n",
"df.loc[df.free_parking_at_work_base != df.free_parking_at_work_build,\"Case\"] = \"Changed\"\n",
"\n",
"df.loc[df.free_parking_at_work_base.isna(),\"Case\"] = \"Workplace moved into area\"\n",
"df.loc[df.free_parking_at_work_build.isna(),\"Case\"] = \"Workplace moved out of area\"\n",
"\n",
"df.Case.value_counts().sort_index()"
]
},
{
Expand All @@ -588,7 +594,7 @@
"metadata": {},
"outputs": [],
"source": [
"fig = vh.create_pie_chart(df.to_frame().sort_index(),[\"count\"])\n",
"fig = vh.create_pie_chart(df.Case.value_counts().to_frame().sort_index(),[\"count\"])\n",
"fig.show()"
]
},
Expand All @@ -607,7 +613,7 @@
"metadata": {},
"outputs": [],
"source": [
"usecols = ['person_id', 'is_worker', 'telecommute_frequency', 'workplace_zone_id']\n",
"usecols = ['person_id', 'is_worker', 'telecommute_frequency', 'workplace_zone_id', 'work_from_home']\n",
"base_per = pd.read_csv(f\"{base_dir}/final_persons.csv\",\n",
" index_col='person_id',\n",
" usecols=usecols)\n",
Expand Down Expand Up @@ -635,10 +641,12 @@
"metadata": {},
"outputs": [],
"source": [
"print(f\"Filtering persons by workplace MAZ. Original Persons: {len(base_per)}\")\n",
"base_tele_mask = (base_per.is_worker) & ~(base_per.work_from_home)\n",
"build_tele_mask = (build_per.is_worker) & ~(build_per.work_from_home)\n",
"print(f\"Filtering workers by workplace MAZ who do not work from home. Original workers in base scenario: {len(base_per[base_tele_mask])}\")\n",
"base_per = base_per[single_filter_mazs(base_per.workplace_zone_id)]\n",
"build_per = build_per[single_filter_mazs(build_per.workplace_zone_id)]\n",
"print(f\"Persons after filtering: {len(base_per)}\")"
"print(f\"Workers in base scenario after filtering: {len(base_per[base_tele_mask])}\")"
]
},
{
Expand All @@ -648,8 +656,8 @@
"metadata": {},
"outputs": [],
"source": [
"df = base_per[base_per.is_worker].merge(build_per[build_per.is_worker],\n",
" how='left',\n",
"df = base_per[base_tele_mask].merge(build_per[build_tele_mask],\n",
" how='outer',\n",
" left_index=True,\n",
" right_index=True, \n",
" suffixes=('_base','_build'))"
Expand All @@ -663,14 +671,18 @@
"outputs": [],
"source": [
"xtab = pd.crosstab(\n",
" df.telecommute_frequency_base,\n",
" df.telecommute_frequency_build,\n",
" df.telecommute_frequency_base.fillna(\"Workplace moved into area\"),\n",
" df.telecommute_frequency_build.fillna(\"Workplace moved out of area\"),\n",
" rownames=['base'],\n",
" colnames=['build'],\n",
" margins=True,\n",
" margins_name='Total'\n",
").sort_index()\n",
"xtab"
" margins_name='Total',\n",
" dropna=False,\n",
")\n",
"xtab.loc[\n",
" filter(lambda x: x in xtab.index, ['0 (No Telecommute)','1 Day per Week','2-3 Days per Week','4 Days per Week','Workplace moved into area','Total']),\n",
" filter(lambda x: x in xtab.columns, ['0 (No Telecommute)','1 Day per Week','2-3 Days per Week','4 Days per Week','Workplace moved out of area','Total'])\n",
" ]"
]
},
{
Expand All @@ -688,6 +700,8 @@
"df1.loc[base_tc_magnitude == build_tc_magnitude, 'case'] = 'Unchanged'\n",
"df1.loc[base_tc_magnitude > build_tc_magnitude, 'case'] = 'Decreased'\n",
"df1.loc[base_tc_magnitude < build_tc_magnitude, 'case'] = 'Increased'\n",
"df1.loc[df1.telecommute_frequency_base.isna() & (~df1.telecommute_frequency_build.isna()),'case'] = \"Workplace moved into area\"\n",
"df1.loc[df1.telecommute_frequency_build.isna() & (~df1.telecommute_frequency_base.isna()),'case'] = \"Workplace moved out of area\"\n",
"\n",
"fig = vh.create_pie_chart(df1.case.value_counts().to_frame().sort_index(), [\"count\"])\n",
"fig.show()"
Expand Down Expand Up @@ -764,7 +778,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "asimviz",
"display_name": "asim_eet_viz",
"language": "python",
"name": "python3"
},
Expand Down
Loading