Skip to content

Commit

Permalink
CLN: black format
Browse files Browse the repository at this point in the history
  • Loading branch information
hongyeehh committed Apr 4, 2021
1 parent c375d80 commit 49fe10a
Show file tree
Hide file tree
Showing 22 changed files with 251 additions and 243 deletions.
10 changes: 9 additions & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,15 @@ def setup(app):
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
html_sidebars = {"**": ["about.html", "navigation.html", "relations.html", "searchbox.html", "donate.html",]}
html_sidebars = {
"**": [
"about.html",
"navigation.html",
"relations.html",
"searchbox.html",
"donate.html",
]
}

# only defined in 'Alabaster' html_theme
# html_theme_options = {
Expand Down
4 changes: 2 additions & 2 deletions tests/analysis/test_transport_mode_identification.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

class TestTransportModeIdentification:
def test_check_empty_dataframe(self):
"""Assert that the method does not work for empty DataFrames
"""Assert that the method does not work for empty DataFrames
(but that the rest works fine, e.g., method signature).
"""
tpls_file = os.path.join("tests", "data", "triplegs_transport_mode_identification.csv")
Expand All @@ -20,7 +20,7 @@ def test_check_empty_dataframe(self):
empty_frame.as_triplegs.predict_transport_mode(method="simple-coarse")

def test_simple_coarse_identification_no_crs(self):
"""Assert that the simple-coarse transport mode identification throws the correct
"""Assert that the simple-coarse transport mode identification throws the correct
warning and and yields the correct results for WGS84.
"""
tpls_file = os.path.join("tests", "data", "triplegs_transport_mode_identification.csv")
Expand Down
2 changes: 1 addition & 1 deletion tests/visualization/test_locations.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,4 @@ def test_parameter(self, test_data):
# plot only location
locs.as_locations.plot(out_filename=tmp_file, plot_osm=True)
assert os.path.exists(tmp_file)
os.remove(tmp_file)
os.remove(tmp_file)
2 changes: 1 addition & 1 deletion tests/visualization/test_positionfixes.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@ def test_positionfixes_plot(self):
pfs = ti.read_positionfixes_csv(pfs_file, sep=";", index_col="id", crs="EPSG:4326")
pfs.as_positionfixes.plot(out_filename=tmp_file, plot_osm=False)
assert os.path.exists(tmp_file)
os.remove(tmp_file)
os.remove(tmp_file)
2 changes: 1 addition & 1 deletion tests/visualization/test_staypoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,4 @@ def test_parameter(self, test_data):
# with osm
stps.as_staypoints.plot(out_filename="staypoints_plot", plot_osm=True)
assert os.path.exists("staypoints_plot.png")
os.remove("staypoints_plot.png")
os.remove("staypoints_plot.png")
2 changes: 1 addition & 1 deletion tests/visualization/test_triplegs.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ def test_parameter(self, test_data):
# test plot_osm
tpls.as_triplegs.plot(out_filename=tmp_file, plot_osm=True)
assert os.path.exists(tmp_file)
os.remove(tmp_file)
os.remove(tmp_file)
8 changes: 4 additions & 4 deletions trackintel/analysis/tracking_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def temporal_tracking_quality(source, granularity="all"):
granularity : {"all", "day", "week", "weekday", "hour"}
The level of which the tracking quality is calculated. The default "all" returns
the overall tracking quality; "day" the tracking quality by days; "week" the quality
by weeks; "weekday" the quality by day of the week (e.g, Mondays, Tuesdays, etc.) and
by weeks; "weekday" the quality by day of the week (e.g, Mondays, Tuesdays, etc.) and
"hour" the quality by hours.
Returns
Expand All @@ -28,11 +28,11 @@ def temporal_tracking_quality(source, granularity="all"):
-----
Requires at least the following columns:
``['user_id', 'started_at', 'finished_at']``
which means the function supports trackintel ``staypoints``, ``triplegs``, ``trips`` and ``tours``
which means the function supports trackintel ``staypoints``, ``triplegs``, ``trips`` and ``tours``
datamodels and their combinations (e.g., staypoints and triplegs sequence).
The temporal tracking quality is the ratio of tracking time and the total time extent. It is
calculated and returned per-user in the defined ``granularity``. The possible time extents of
calculated and returned per-user in the defined ``granularity``. The possible time extents of
the different granularities are different:
- ``all`` considers the time between the latest "finished_at" and the earliest "started_at";
Expand Down
36 changes: 18 additions & 18 deletions trackintel/geogr/distances.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,46 +15,46 @@
def calculate_distance_matrix(X, Y=None, dist_metric="haversine", n_jobs=0, **kwds):
"""
Calculate a distance matrix based on a specific distance metric.
If only X is given, the pair-wise distances between all elements in X are calculated. If X and Y are given, the
distances between all combinations of X and Y are calculated. Distances between elements of X and X, and distances
between elements of Y and Y are not calculated.
Parameters
----------
X : GeoDataFrame (as trackintel staypoints or triplegs)
Y : GeoDataFrame (as trackintel staypoints or triplegs), optional
dist_metric: {'haversine', 'euclidean', 'dtw', 'frechet'}
The distance metric to be used for calculating the matrix.
For staypoints, common choice is 'haversine' or 'euclidean'. This function wraps around
the ``pairwise_distance`` function from scikit-learn if only `X` is given and wraps around the
``scipy.spatial.distance.cdist`` function if X and Y are given.
The distance metric to be used for calculating the matrix.
For staypoints, common choice is 'haversine' or 'euclidean'. This function wraps around
the ``pairwise_distance`` function from scikit-learn if only `X` is given and wraps around the
``scipy.spatial.distance.cdist`` function if X and Y are given.
Therefore the following metrics are also accepted:
via ``scikit-learn``: `[‘cityblock’, ‘cosine’, ‘euclidean’, ‘l1’, ‘l2’, ‘manhattan’]`
via ``scipy.spatial.distance``: `[‘braycurtis’, ‘canberra’, ‘chebyshev’, ‘correlation’, ‘dice’, ‘hamming’, ‘jaccard’,
‘kulsinski’, ‘mahalanobis’, ‘minkowski’, ‘rogerstanimoto’, ‘russellrao’, ‘seuclidean’, ‘sokalmichener’,
‘sokalsneath’, ‘sqeuclidean’, ‘yule’]`
For triplegs, common choice is 'dtw' or 'frechet'. This function uses the implementation
For triplegs, common choice is 'dtw' or 'frechet'. This function uses the implementation
from similaritymeasures.
n_jobs: int
Number of cores to use: 'dtw', 'frechet' and all distance metrics from `pairwise_distance` (only available
Number of cores to use: 'dtw', 'frechet' and all distance metrics from `pairwise_distance` (only available
if only X is given) are parallelized.
**kwds:
**kwds:
optional keywords passed to the distance functions.
Returns
-------
D: np.array
matrix of shape (len(X), len(X)) or of shape (len(X), len(Y)) if Y is provided.
"""
geom_type = X.geometry.iat[0].geom_type
if Y is None:
Expand Down Expand Up @@ -171,7 +171,7 @@ def meters_to_decimal_degrees(meters, latitude):
The meters to convert to degrees.
latitude : float
As the conversion is dependent (approximatively) on the latitude where
As the conversion is dependent (approximatively) on the latitude where
the conversion happens, this needs to be specified. Use 0 for the equator.
Returns
Expand Down
2 changes: 1 addition & 1 deletion trackintel/geogr/point_distances.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
def haversine_dist(lon_1, lat_1, lon_2, lat_2, r=6371000):
"""
Compute the great circle or haversine distance between two coordinates in WGS84.
Serialized version of the haversine distance.
Parameters
Expand Down
14 changes: 7 additions & 7 deletions trackintel/io/dataset_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def read_geolife(geolife_path):
-------
gdf: GeoDataFrame (as trackintel positionfixes)
Contains all loaded geolife positionfixes
labels: dict
Dictionary with the available (optional) mode labels.
Expand Down Expand Up @@ -171,16 +171,16 @@ def geolife_add_modes_to_triplegs(
----------
tpls_in : GeoDataFrame (as trackintel triplegs)
Geolife triplegs.
labels : dictionary
Geolife labels as provided by the trackintel `read_geolife` function.
ratio_threshold : float, default 0.5
How much a label needs to overlap a tripleg to assign a the to this tripleg.
max_triplegs : int, default 20
Number of neighbors that are considered in the search for matching triplegs.
max_duration_tripleg : float, default 7 * 24 * 60 * 60 (seconds)
Used for a primary filter. All triplegs that are further away in time than 'max_duration_tripleg' from a
label won't be considered for matching.
Expand Down Expand Up @@ -260,10 +260,10 @@ def _calc_overlap_for_candidates(candidates, tpls_this, labels_this, ratio_thres
tpls_this : GeoDataFrame (as trackintel triplegs)
triplegs of a single user
labels_this : DataFrame
labels of a single user
ratio_threshold : float, optional
How much a label needs to overlap a tripleg to assign a the to this tripleg.
Expand Down
Loading

0 comments on commit 49fe10a

Please sign in to comment.