Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New version 1.7.4 #200

Merged
merged 17 commits into from
Oct 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ default_language_version:

repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.6.5
rev: v0.6.9
hooks:
- id: ruff
args: [ --fix, --exit-non-zero-on-fix ]
Expand All @@ -24,7 +24,7 @@ repos:
- id: python-check-blanket-noqa
- id: python-check-blanket-type-ignore
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: check-ast
- id: check-added-large-files
Expand Down
10 changes: 9 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## Version [1.7.4] - 2024-10-06

- Tightened existing checks to not allow mixed series types as methods input. The coverage level of the checks has not been widened.
- Added pandas .ffill() as precursor to pct_change() to suppress pandas FutureWarnings in dependent projects.
- Fixed method .resample_to_business_period_ends() so it considers renamed labels.
- Corrected warning in this changelog for release of version 1.7.0. Added 'NO'.
- Miscellaneous dependency and lockfile updates.

## Version [1.7.3] - 2024-09-17

- Consolidated all_properties() method and its string validations.
Expand Down Expand Up @@ -30,7 +38,7 @@

## Version [1.7.0] - 2024-07-27

- Changed code to enforce PEP 604 on typing. This means that the PACKAGE WILL LONGER WORK FOR PYTHON 3.9.
- Changed code to enforce PEP 604 on typing. This means that the PACKAGE WILL NO LONGER WORK FOR PYTHON 3.9.
- Limited GitHub workflow build.yaml to no longer run on Python 3.9
- Adjustments to adhere to ruff TCH type checking imports.
- Introduced strict requirement that generate_calendar_date_range argument trading_days must be positive.
Expand Down
10 changes: 7 additions & 3 deletions openseries/_common_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
)


# noinspection PyTypeChecker
class _CommonModel(BaseModel):
"""Declare _CommonModel."""

Expand Down Expand Up @@ -680,7 +681,7 @@ def to_json(
output.append(dict(itemdata))

with dirpath.joinpath(filename).open(mode="w", encoding="utf-8") as jsonfile:
dump(output, jsonfile, indent=2, sort_keys=False)
dump(obj=output, fp=jsonfile, indent=2, sort_keys=False)

return output

Expand Down Expand Up @@ -1027,7 +1028,10 @@ def arithmetic_ret_func(
time_factor = how_many / fraction

result = (
self.tsdf.loc[cast(int, earlier) : cast(int, later)].pct_change().mean()
self.tsdf.loc[cast(int, earlier) : cast(int, later)]
.ffill()
.pct_change()
.mean()
* time_factor
)

Expand Down Expand Up @@ -1085,7 +1089,7 @@ def vol_func(
time_factor = how_many / fraction

data = self.tsdf.loc[cast(int, earlier) : cast(int, later)]
result = data.pct_change().std().mul(sqrt(time_factor))
result = data.ffill().pct_change().std().mul(sqrt(time_factor))

if self.tsdf.shape[1] == 1:
return float(cast(SupportsFloat, result.iloc[0]))
Expand Down
105 changes: 61 additions & 44 deletions openseries/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import statsmodels.api as sm # type: ignore[import-untyped,unused-ignore]
from numpy import (
cov,
cumprod,
divide,
isinf,
log,
Expand Down Expand Up @@ -336,11 +335,12 @@ def value_to_ret(self: Self) -> Self:
The returns of the values in the series

"""
self.tsdf = self.tsdf.pct_change()
self.tsdf.iloc[0] = 0
returns = self.tsdf.ffill().pct_change()
returns.iloc[0] = 0
new_labels = [ValueType.RTRN] * self.item_count
arrays = [self.tsdf.columns.get_level_values(0), new_labels]
self.tsdf.columns = MultiIndex.from_arrays(arrays)
returns.columns = MultiIndex.from_arrays(arrays=arrays)
self.tsdf = returns.copy()
return self

def value_to_diff(self: Self, periods: int = 1) -> Self:
Expand Down Expand Up @@ -374,14 +374,20 @@ def to_cumret(self: Self) -> Self:
An OpenFrame object

"""
if any(
x == ValueType.PRICE
for x in self.tsdf.columns.get_level_values(1).to_numpy()
):
self.value_to_ret()
vtypes = [x == ValueType.RTRN for x in self.tsdf.columns.get_level_values(1)]
if not any(vtypes):
returns = self.tsdf.ffill().pct_change()
returns.iloc[0] = 0
elif all(vtypes):
returns = self.tsdf.copy()
returns.iloc[0] = 0
else:
msg = "Mix of series types will give inconsistent results"
raise ValueError(msg)

returns = returns.add(1.0)
self.tsdf = returns.cumprod(axis=0) / returns.iloc[0]

self.tsdf = self.tsdf.add(1.0)
self.tsdf = self.tsdf.apply(cumprod, axis="index") / self.tsdf.iloc[0]
new_labels = [ValueType.PRICE] * self.item_count
arrays = [self.tsdf.columns.get_level_values(0), new_labels]
self.tsdf.columns = MultiIndex.from_arrays(arrays)
Expand Down Expand Up @@ -453,8 +459,15 @@ def resample_to_business_period_ends(
method=method,
)

arrays = [
self.tsdf.columns.get_level_values(0),
self.tsdf.columns.get_level_values(1),
]

self._set_tsdf()

self.tsdf.columns = MultiIndex.from_arrays(arrays)

return self

def ewma_risk(
Expand Down Expand Up @@ -1284,30 +1297,8 @@ def jensen_alpha( # noqa: C901

"""
full_year = 1.0
if all(
x == ValueType.RTRN
for x in self.tsdf.columns.get_level_values(1).to_numpy()
):
msg = "asset should be a tuple[str, ValueType] or an integer."
if isinstance(asset, tuple):
asset_log = self.tsdf.loc[:, asset]
asset_cagr = asset_log.mean()
elif isinstance(asset, int):
asset_log = self.tsdf.iloc[:, asset]
asset_cagr = asset_log.mean()
else:
raise TypeError(msg)

msg = "market should be a tuple[str, ValueType] or an integer."
if isinstance(market, tuple):
market_log = self.tsdf.loc[:, market]
market_cagr = market_log.mean()
elif isinstance(market, int):
market_log = self.tsdf.iloc[:, market]
market_cagr = market_log.mean()
else:
raise TypeError(msg)
else:
vtypes = [x == ValueType.RTRN for x in self.tsdf.columns.get_level_values(1)]
if not any(vtypes):
msg = "asset should be a tuple[str, ValueType] or an integer."
if isinstance(asset, tuple):
asset_log = log(
Expand Down Expand Up @@ -1375,6 +1366,29 @@ def jensen_alpha( # noqa: C901
)
else:
raise TypeError(msg)
elif all(vtypes):
msg = "asset should be a tuple[str, ValueType] or an integer."
if isinstance(asset, tuple):
asset_log = self.tsdf.loc[:, asset]
asset_cagr = asset_log.mean()
elif isinstance(asset, int):
asset_log = self.tsdf.iloc[:, asset]
asset_cagr = asset_log.mean()
else:
raise TypeError(msg)

msg = "market should be a tuple[str, ValueType] or an integer."
if isinstance(market, tuple):
market_log = self.tsdf.loc[:, market]
market_cagr = market_log.mean()
elif isinstance(market, int):
market_log = self.tsdf.iloc[:, market]
market_cagr = market_log.mean()
else:
raise TypeError(msg)
else:
msg = "Mix of series types will give inconsistent results"
raise ValueError(msg)

covariance = cov(asset_log, market_log, ddof=dlta_degr_freedms)
beta = covariance[0, 1] / covariance[1, 1]
Expand Down Expand Up @@ -1407,27 +1421,30 @@ def make_portfolio(
"to run the make_portfolio method."
)
raise ValueError(msg)
dframe = self.tsdf.copy()
if not any(
x == ValueType.RTRN
for x in self.tsdf.columns.get_level_values(1).to_numpy()
):
dframe = dframe.pct_change()
dframe.iloc[0] = 0

vtypes = [x == ValueType.RTRN for x in self.tsdf.columns.get_level_values(1)]
if not any(vtypes):
returns = self.tsdf.ffill().pct_change()
returns.iloc[0] = 0
elif all(vtypes):
returns = self.tsdf.copy()
else:
msg = "Mix of series types will give inconsistent results"
raise ValueError(msg)

msg = "Weight strategy not implemented"
if weight_strat:
if weight_strat == "eq_weights":
self.weights = [1.0 / self.item_count] * self.item_count
elif weight_strat == "inv_vol":
vol = divide(1.0, std(dframe, axis=0, ddof=1))
vol = divide(1.0, std(returns, axis=0, ddof=1))
vol[isinf(vol)] = nan
self.weights = list(divide(vol, vol.sum()))
else:
raise NotImplementedError(msg)

return DataFrame(
data=(dframe @ self.weights).add(1.0).cumprod(),
data=(returns @ self.weights).add(1.0).cumprod(),
index=self.tsdf.index,
columns=[[name], [ValueType.PRICE]],
dtype="float64",
Expand Down
20 changes: 12 additions & 8 deletions openseries/portfoliotools.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,13 +83,15 @@ def simulate_portfolios(
"""
copi = simframe.from_deepcopy()

if any(
x == ValueType.PRICE for x in copi.tsdf.columns.get_level_values(1).to_numpy()
):
vtypes = [x == ValueType.RTRN for x in copi.tsdf.columns.get_level_values(1)]
if not any(vtypes):
copi.value_to_ret()
log_ret = copi.tsdf.copy()[1:]
else:
elif all(vtypes):
log_ret = copi.tsdf.copy()
else:
msg = "Mix of series types will give inconsistent results"
raise ValueError(msg)

log_ret.columns = log_ret.columns.droplevel(level=1)

Expand Down Expand Up @@ -165,13 +167,15 @@ def efficient_frontier( # noqa: C901

copi = eframe.from_deepcopy()

if any(
x == ValueType.PRICE for x in copi.tsdf.columns.get_level_values(1).to_numpy()
):
vtypes = [x == ValueType.RTRN for x in copi.tsdf.columns.get_level_values(1)]
if not any(vtypes):
copi.value_to_ret()
log_ret = copi.tsdf.copy()[1:]
else:
elif all(vtypes):
log_ret = copi.tsdf.copy()
else:
msg = "Mix of series types will give inconsistent results"
raise ValueError(msg)

log_ret.columns = log_ret.columns.droplevel(level=1)

Expand Down
28 changes: 11 additions & 17 deletions openseries/series.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,7 @@ def from_fixed_rate(
- cast(DatetimeIndex, d_range)[:-1]
],
)
# noinspection PyTypeChecker
arr = list(cumprod(insert(1 + deltas * rate / 365, 0, 1.0)))
dates = [d.strftime("%Y-%m-%d") for d in cast(DatetimeIndex, d_range)]

Expand Down Expand Up @@ -434,15 +435,12 @@ def value_to_ret(self: Self) -> Self:
The returns of the values in the series

"""
self.tsdf = self.tsdf.pct_change()
self.tsdf.iloc[0] = 0
returns = self.tsdf.ffill().pct_change()
returns.iloc[0] = 0
self.valuetype = ValueType.RTRN
self.tsdf.columns = MultiIndex.from_arrays(
[
[self.label],
[self.valuetype],
],
)
arrays = [[self.label], [self.valuetype]]
returns.columns = MultiIndex.from_arrays(arrays=arrays)
self.tsdf = returns.copy()
return self

def value_to_diff(self: Self, periods: int = 1) -> Self:
Expand Down Expand Up @@ -480,14 +478,12 @@ def to_cumret(self: Self) -> Self:
An OpenTimeSeries object

"""
if not any(
x == ValueType.RTRN
for x in cast(MultiIndex, self.tsdf.columns).get_level_values(1).to_numpy()
):
if self.valuetype == ValueType.PRICE:
self.value_to_ret()

self.tsdf = self.tsdf.add(1.0)
self.tsdf = self.tsdf.cumprod(axis=0) / self.tsdf.iloc[0]

self.valuetype = ValueType.PRICE
self.tsdf.columns = MultiIndex.from_arrays(
[
Expand Down Expand Up @@ -520,6 +516,7 @@ def from_1d_rate_to_cumret(
arr = array(self.values) / divider

deltas = array([i.days for i in self.tsdf.index[1:] - self.tsdf.index[:-1]])
# noinspection PyTypeChecker
arr = cumprod(insert(1.0 + deltas * arr[:-1] / days_in_year, 0, 1.0))

self.dates = [d.strftime("%Y-%m-%d") for d in self.tsdf.index]
Expand Down Expand Up @@ -684,11 +681,7 @@ def running_adjustment(
An OpenTimeSeries object

"""
values: list[float]
if any(
x == ValueType.RTRN
for x in cast(MultiIndex, self.tsdf.columns).get_level_values(1).to_numpy()
):
if self.valuetype == ValueType.RTRN:
ra_df = self.tsdf.copy()
values = [1.0]
returns_input = True
Expand Down Expand Up @@ -818,6 +811,7 @@ def timeseries_chain(

dates.extend([x.strftime("%Y-%m-%d") for x in new.tsdf.index])

# noinspection PyUnresolvedReferences
if back.__class__.__subclasscheck__(
OpenTimeSeries,
):
Expand Down
Loading